diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index e944cc8..cfa67a7 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -24,12 +24,6 @@ jobs: with: submodules: true - - name: Setup Hugo - uses: peaceiris/actions-hugo@v3 - with: - hugo-version: '0.124.1' - extended: true - - name: Build run: bash build.sh diff --git a/.gitignore b/.gitignore index a48cf0d..fc1666b 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ public +node_modules \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4e65345..232d8f7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM klakegg/hugo:ext-alpine +FROM floryn90/hugo:ext-alpine RUN apk add git && \ git config --global --add safe.directory /src diff --git a/README.md b/README.md index 28d21dd..481ce67 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,19 @@ # Docsy Example -## 🚧 WARNING: `main` is unstable! For details, see [#207](https://github.com/google/docsy-example/issues/207) 🚧 [Docsy][] is a [Hugo theme module][] for technical documentation sites, providing easy site navigation, structure, and more. This **Docsy Example Project** uses the Docsy theme component as a hugo module and provides a skeleton documentation structure for you to use. You can clone/copy this project and edit it with your own content, or use it as an example. -In this project, the Docsy theme component is pulled in as a Hugo module, together with other module dependencies: +In this project, the Docsy theme is pulled in as a Hugo module, together with +its dependencies: ```console $ hugo mod graph -hugo: collected modules in 566 ms -hugo: collected modules in 578 ms -github.com/google/docsy-example github.com/google/docsy@v0.7.0 -github.com/google/docsy-example github.com/google/docsy/dependencies@v0.7.0 -github.com/google/docsy/dependencies@v0.7.0 github.com/twbs/bootstrap@v5.2.3+incompatible -github.com/google/docsy/dependencies@v0.7.0 github.com/FortAwesome/Font-Awesome@v0.0.0-20230327165841-0698449d50f2 +... ``` -You can find detailed theme instructions in the [Docsy user guide][]. +For Docsy documentation, see [Docsy user guide][]. This Docsy Example Project is hosted on [Netlify][] at [example.docsy.dev][]. You can view deploy logs from the [deploy section of the project's Netlify @@ -30,11 +25,13 @@ This is not an officially supported Google product. This project is currently ma A simple way to get started is to use this project as a template, which gives you a site project that is set up and ready to use. To do this: -1. Click **Use this template**. +1. Use the dropdown for switching branches/tags to change to the **latest** released tag. -2. Select a name for your new project and click **Create repository from template**. +2. Click **Use this template**. -3. Make your own local working copy of your new repo using git clone, replacing https://github.com/me/example.git with your repo’s web URL: +3. Select a name for your new project and click **Create repository from template**. + +4. Make your own local working copy of your new repo using git clone, replacing https://github.com/me/example.git with your repo’s web URL: ```bash git clone --depth 1 https://github.com/me/example.git @@ -99,13 +96,60 @@ To remove the produced images run: ```bash docker-compose rm ``` -For more information see the [Docker Compose -documentation](https://docs.docker.com/compose/gettingstarted/). +For more information see the [Docker Compose documentation][]. + +## Using a local Docsy clone + +Make sure your installed go version is `1.18` or higher. + +Clone the latest version of the docsy theme into the parent folder of your project. The newly created repo should now reside in a sibling folder of your site's root folder. + +```shell +cd root-of-your-site +git clone --branch v0.7.2 https://github.com/google/docsy.git ../docsy +``` + +Now run: + +```shell +HUGO_MODULE_WORKSPACE=docsy.work hugo server --ignoreVendorPaths "**" +``` + +or, when using npm, prepend `local` to the script you want to invoke, e.g.: + +```shell +npm run local serve +``` + +By using the `HUGO_MODULE_WORKSPACE` directive (either directly or via prefix `local` when using npm), the server now watches all files and directories inside the sibling directory `../docsy` , too. Any changes inside the local `docsy` theme clone are now immediately picked up (hot reload), you can instantly see the effect of your local edits. + +In the command above, we used the environment variable `HUGO_MODULE_WORKSPACE` to tell hugo about the local workspace file `docsy.work`. Alternatively, you can declare the workspace file inside your settings file `hugo.toml`: + +```toml +[module] + workspace = "docsy.work" +``` + +Your project's `hugo.toml` file already contains these lines, the directive for workspace assignment is commented out, however. Remove the two trailing comment characters '//' so that this line takes effect. ## Troubleshooting As you run the website locally, you may run into the following error: +```console +$ hugo server +WARN 2023/06/27 16:59:06 Module "project" is not compatible with this Hugo version; run "hugo mod graph" for more information. +Start building sites … +hugo v0.101.0-466fa43c16709b4483689930a4f9ac8add5c9f66+extended windows/amd64 BuildDate=2022-06-16T07:09:16Z VendorInfo=gohugoio +Error: Error building site: "C:\Users\foo\path\to\docsy-example\content\en\_index.md:5:1": failed to extract shortcode: template for shortcode "blocks/cover" not found +Built in 27 ms +``` + +This error occurs if you are running an outdated version of Hugo. As of docsy theme version `v0.7.0`, hugo version `0.110.0` or higher is required. +See this [section](https://www.docsy.dev/docs/get-started/docsy-as-module/installation-prerequisites/#install-hugo) of the user guide for instructions on how to install Hugo. + +Or you may be confronted with the following error: + ```console $ hugo server @@ -137,3 +181,4 @@ See this [section](https://www.docsy.dev/docs/get-started/docsy-as-module/instal [example.docsy.dev]: https://example.docsy.dev [Hugo theme module]: https://gohugo.io/hugo-modules/use-modules/#use-a-module-for-a-theme [Netlify]: https://netlify.com +[Docker Compose documentation]: https://docs.docker.com/compose/gettingstarted/ diff --git a/build.sh b/build.sh old mode 100644 new mode 100755 index 59504ea..1794af6 --- a/build.sh +++ b/build.sh @@ -3,10 +3,7 @@ set -e set -u -git submodule update --init --recursive npm install -cd themes/docsy -npm install -cd ../.. + export HUGO_ENV="production" -hugo --gc --minify +./node_modules/.bin/hugo-extended --gc --minify diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..9070e38 --- /dev/null +++ b/config.yaml @@ -0,0 +1,15 @@ +# THIS IS A TEST CONFIG ONLY! +# FOR THE CONFIGURATION OF YOUR SITE USE hugo.yaml. +# +# As of Docsy 0.7.0, Hugo 0.110.0 or later must be used. +# +# The sole purpose of this config file is to detect Hugo-module builds that use +# an older version of Hugo. +# +# DO NOT add any config parameters to this file. You can safely delete this file +# if your project is using the required Hugo version. + +module: + hugoVersion: + extended: true + min: 0.110.0 diff --git a/docker-compose.yaml b/docker-compose.yaml index e8f211a..9069dc0 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,4 +1,4 @@ -version: "3.3" +version: "3.8" services: diff --git a/docsy.work b/docsy.work new file mode 100644 index 0000000..074dc2a --- /dev/null +++ b/docsy.work @@ -0,0 +1,5 @@ +go 1.19 + +use . +use ../docsy/ // Local docsy clone resides in sibling folder to this project +// use ./themes/docsy/ // Local docsy clone resides in themes folder diff --git a/docsy.work.sum b/docsy.work.sum new file mode 100644 index 0000000..e69de29 diff --git a/go.mod b/go.mod index 56f6887..00890a6 100644 --- a/go.mod +++ b/go.mod @@ -2,8 +2,4 @@ module github.com/google/docsy-example go 1.12 -require ( - github.com/FortAwesome/Font-Awesome v0.0.0-20230327165841-0698449d50f2 // indirect - github.com/google/docsy v0.7.0 // indirect - github.com/twbs/bootstrap v5.2.3+incompatible // indirect -) +require github.com/google/docsy v0.10.0 // indirect diff --git a/go.sum b/go.sum index bf4a2c8..78bc934 100644 --- a/go.sum +++ b/go.sum @@ -1,19 +1,4 @@ -github.com/FortAwesome/Font-Awesome v0.0.0-20220831210243-d3a7818c253f h1:bvkUptSRPZBr3Kxuk+bnWCEmQ5MtEJX5fjezyV0bC3g= -github.com/FortAwesome/Font-Awesome v0.0.0-20220831210243-d3a7818c253f/go.mod h1:IUgezN/MFpCDIlFezw3L8j83oeiIuYoj28Miwr/KUYo= -github.com/FortAwesome/Font-Awesome v0.0.0-20221115183454-96cafbd73ec4 h1:xfr9SidRCMEh4A8fdkLhFPcHAVbrdv3Ua0Jp/nSmhhQ= -github.com/FortAwesome/Font-Awesome v0.0.0-20221115183454-96cafbd73ec4/go.mod h1:IUgezN/MFpCDIlFezw3L8j83oeiIuYoj28Miwr/KUYo= -github.com/FortAwesome/Font-Awesome v0.0.0-20230327165841-0698449d50f2 h1:Uv1z5EqCfmiK4IHUwT0m3h/u/WCk+kpRfxvAZhpC7Gc= -github.com/FortAwesome/Font-Awesome v0.0.0-20230327165841-0698449d50f2/go.mod h1:IUgezN/MFpCDIlFezw3L8j83oeiIuYoj28Miwr/KUYo= -github.com/google/docsy v0.5.1 h1:D/ZdFKiE29xM/gwPwQzmkyXhcbQGkReRS6aGrF7lnYk= -github.com/google/docsy v0.5.1/go.mod h1:maoUAQU5H/d+FrZIB4xg1EVWAx7RyFMGSDJyWghm31E= -github.com/google/docsy v0.6.0 h1:43bVF18t2JihAamelQjjGzx1vO2ljCilVrBgetCA8oI= -github.com/google/docsy v0.6.0/go.mod h1:VKKLqD8PQ7AglJc98yBorATfW7GrNVsn0kGXVYF6G+M= -github.com/google/docsy v0.7.0 h1:JaeZ0/KufX/BJ3SyATb/fmZa1DFI7o5d9KU+i6+lLJY= -github.com/google/docsy v0.7.0/go.mod h1:5WhIFchr5BfH6agjcInhpLRz7U7map0bcmKSpcrg6BE= -github.com/google/docsy/dependencies v0.5.1/go.mod h1:EDGc2znMbGUw0RW5kWwy2oGgLt0iVXBmoq4UOqstuNE= -github.com/google/docsy/dependencies v0.6.0/go.mod h1:EDGc2znMbGUw0RW5kWwy2oGgLt0iVXBmoq4UOqstuNE= -github.com/google/docsy/dependencies v0.7.0/go.mod h1:gihhs5gmgeO+wuoay4FwOzob+jYJVyQbNaQOh788lD4= -github.com/twbs/bootstrap v4.6.2+incompatible h1:TDa+R51BTiy1wEHSYjmqDb8LxNl/zaEjAOpRE9Hwh/o= -github.com/twbs/bootstrap v4.6.2+incompatible/go.mod h1:fZTSrkpSf0/HkL0IIJzvVspTt1r9zuf7XlZau8kpcY0= -github.com/twbs/bootstrap v5.2.3+incompatible h1:lOmsJx587qfF7/gE7Vv4FxEofegyJlEACeVV+Mt7cgc= -github.com/twbs/bootstrap v5.2.3+incompatible/go.mod h1:fZTSrkpSf0/HkL0IIJzvVspTt1r9zuf7XlZau8kpcY0= +github.com/FortAwesome/Font-Awesome v0.0.0-20240402185447-c0f460dca7f7/go.mod h1:IUgezN/MFpCDIlFezw3L8j83oeiIuYoj28Miwr/KUYo= +github.com/google/docsy v0.10.0 h1:6tMDacPwAyRWNCfvsn/9qGOZDQ8b0aRzjRZvnZPY5dg= +github.com/google/docsy v0.10.0/go.mod h1:c0nIAqmRTOuJ01F85U/wJPQtc3Zj9N58Kea9bOT2AJc= +github.com/twbs/bootstrap v5.3.3+incompatible/go.mod h1:fZTSrkpSf0/HkL0IIJzvVspTt1r9zuf7XlZau8kpcY0= diff --git a/hugo.toml b/hugo.toml index 3b7bf8a..dfb50f2 100644 --- a/hugo.toml +++ b/hugo.toml @@ -1,9 +1,9 @@ -baseURL = "/" -title = "Tkkrlab" +baseURL = "https://example.docsy.dev/" +title = "Goldydocs" # Language settings -contentDir = "content/nl" -defaultContentLanguage = "nl" +contentDir = "content/en" +defaultContentLanguage = "en" defaultContentLanguageInSubdir = false # Useful when translating. enableMissingTranslationPlaceholders = true @@ -51,33 +51,34 @@ resampleFilter = "CatmullRom" quality = 75 anchor = "smart" -[services] -[services.googleAnalytics] -# Comment out the next line to disable GA tracking. Also disables the feature described in [params.ui.feedback]. -id = "UA-00000000-0" - # Language configuration -# english has been removed for now un-comment to add english again -#[languages] -#[languages.en] -#languageName ="English" -#contentDir = "content/en" - # Weight used for sorting. -#weight = 1 -#[languages.en.params] -#title = "Tkkrlab" -#description = "Create, inovate" - -[languages.nl] -languageName ="Dutch/Nederlands" -contentDir = "content/nl" -[languages.nl.params] -title = "Tkkrlab" -description = "Create, inovate" +[languages] +[languages.en] +languageName ="English" +# Weight used for sorting. +weight = 1 +[languages.en.params] +title = "Goldydocs" +description = "A Docsy example site" + +[languages.no] +languageName ="Norsk" +contentDir = "content/no" +[languages.no.params] +title = "Goldydocs" +description = "Docsy er operativsystem for skyen" time_format_default = "02.01.2006" time_format_blog = "02.01.2006" +[languages.fa] +languageName ="فارسی" +contentDir = "content/fa" +[languages.fa.params] +title = "اسناد گلدی" +description = "یک نمونه برای پوسته داکسی" +time_format_default = "2006.01.02" +time_format_blog = "2006.01.02" [markup] [markup.goldmark] @@ -98,7 +99,6 @@ time_format_blog = "02.01.2006" section = ["HTML", "print", "RSS"] [params] -copyright = "The Docsy Authors" privacy_policy = "https://policies.google.com/privacy" # First one is picked as the Twitter card image if not set on page. @@ -137,27 +137,26 @@ github_branch= "main" # Google Custom Search Engine ID. Remove or comment out to disable search. gcs_engine_id = "d72aa9b2712488cc3" -# Enable Algolia DocSearch -algolia_docsearch = false - # Enable Lunr.js offline search offlineSearch = false # Enable syntax highlighting and copy buttons on code blocks with Prism prism_syntax_highlighting = false +[params.copyright] + authors = "Docsy Authors | [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) | " + from_year = 2018 + # User interface configuration [params.ui] # Set to true to disable breadcrumb navigation. breadcrumb_disable = false -# Set to true to disable the About link in the site footer -footer_about_disable = false # Set to false if you don't want to display a logo (/assets/icons/logo.svg) in the top navbar navbar_logo = true # Set to true if you don't want the top navbar to be translucent when over a `block/cover`, like on the homepage. navbar_translucent_over_cover_disable = false # Enable to show the side bar menu in its compact state. -sidebar_menu_compact = true +sidebar_menu_compact = false # Set to true to hide the sidebar search box (the top nav search box will still be displayed if search is enabled) sidebar_search_disable = false @@ -214,14 +213,11 @@ enable = false # hugo module configuration [module] - # uncomment line below for temporary local development of module - # replacements = "github.com/google/docsy -> ../../docsy" + # Uncomment the next line to build and serve using local docsy clone declared in the named Hugo workspace: + # workspace = "docsy.work" [module.hugoVersion] extended = true min = "0.110.0" [[module.imports]] path = "github.com/google/docsy" disable = false - [[module.imports]] - path = "github.com/google/docsy/dependencies" - disable = false diff --git a/layouts/_default/_markup/render-heading.html b/layouts/_default/_markup/render-heading.html new file mode 100644 index 0000000..7f8e974 --- /dev/null +++ b/layouts/_default/_markup/render-heading.html @@ -0,0 +1 @@ +{{ template "_default/_markup/td-render-heading.html" . }} diff --git a/netlify.toml b/netlify.toml index 60839e2..7898ef8 100644 --- a/netlify.toml +++ b/netlify.toml @@ -6,7 +6,7 @@ command = "npm run build:preview" publish = "public" [build.environment] -GO_VERSION = "1.20.5" +GO_VERSION = "1.22.2" [context.production] command = "npm run build:production" diff --git a/node_modules/.bin/autoprefixer b/node_modules/.bin/autoprefixer new file mode 120000 index 0000000..e876d81 --- /dev/null +++ b/node_modules/.bin/autoprefixer @@ -0,0 +1 @@ +../autoprefixer/bin/autoprefixer \ No newline at end of file diff --git a/node_modules/.bin/browserslist b/node_modules/.bin/browserslist new file mode 120000 index 0000000..3cd991b --- /dev/null +++ b/node_modules/.bin/browserslist @@ -0,0 +1 @@ +../browserslist/cli.js \ No newline at end of file diff --git a/node_modules/.bin/cross-env b/node_modules/.bin/cross-env new file mode 120000 index 0000000..7c86606 --- /dev/null +++ b/node_modules/.bin/cross-env @@ -0,0 +1 @@ +../cross-env/src/bin/cross-env.js \ No newline at end of file diff --git a/node_modules/.bin/cross-env-shell b/node_modules/.bin/cross-env-shell new file mode 120000 index 0000000..a9149d1 --- /dev/null +++ b/node_modules/.bin/cross-env-shell @@ -0,0 +1 @@ +../cross-env/src/bin/cross-env-shell.js \ No newline at end of file diff --git a/node_modules/.bin/hugo b/node_modules/.bin/hugo new file mode 120000 index 0000000..f463409 --- /dev/null +++ b/node_modules/.bin/hugo @@ -0,0 +1 @@ +../hugo-extended/lib/cli.js \ No newline at end of file diff --git a/node_modules/.bin/hugo-extended b/node_modules/.bin/hugo-extended new file mode 120000 index 0000000..f463409 --- /dev/null +++ b/node_modules/.bin/hugo-extended @@ -0,0 +1 @@ +../hugo-extended/lib/cli.js \ No newline at end of file diff --git a/node_modules/.bin/nanoid b/node_modules/.bin/nanoid new file mode 120000 index 0000000..e2be547 --- /dev/null +++ b/node_modules/.bin/nanoid @@ -0,0 +1 @@ +../nanoid/bin/nanoid.cjs \ No newline at end of file diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which new file mode 120000 index 0000000..6f8415e --- /dev/null +++ b/node_modules/.bin/node-which @@ -0,0 +1 @@ +../which/bin/node-which \ No newline at end of file diff --git a/node_modules/.bin/postcss b/node_modules/.bin/postcss new file mode 120000 index 0000000..236af7c --- /dev/null +++ b/node_modules/.bin/postcss @@ -0,0 +1 @@ +../postcss-cli/index.js \ No newline at end of file diff --git a/node_modules/.bin/seek-bunzip b/node_modules/.bin/seek-bunzip new file mode 120000 index 0000000..7bda56e --- /dev/null +++ b/node_modules/.bin/seek-bunzip @@ -0,0 +1 @@ +../seek-bzip/bin/seek-bunzip \ No newline at end of file diff --git a/node_modules/.bin/seek-table b/node_modules/.bin/seek-table new file mode 120000 index 0000000..7721710 --- /dev/null +++ b/node_modules/.bin/seek-table @@ -0,0 +1 @@ +../seek-bzip/bin/seek-bzip-table \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 0000000..5aaadf4 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/.bin/update-browserslist-db b/node_modules/.bin/update-browserslist-db new file mode 120000 index 0000000..b11e16f --- /dev/null +++ b/node_modules/.bin/update-browserslist-db @@ -0,0 +1 @@ +../update-browserslist-db/cli.js \ No newline at end of file diff --git a/node_modules/.bin/yaml b/node_modules/.bin/yaml new file mode 120000 index 0000000..0368324 --- /dev/null +++ b/node_modules/.bin/yaml @@ -0,0 +1 @@ +../yaml/bin.mjs \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..8b9429c --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,2476 @@ +{ + "name": "docsy-example-site", + "version": "0.10.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@babel/code-frame": { + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.6.tgz", + "integrity": "sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.24.6", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.6.tgz", + "integrity": "sha512-4yA7s865JHaqUdRbnaxarZREuPTHrjpDT+pXoAZ1yhyo6uFnIEpS8VMu16siFOHDpZNKYv5BObhsB//ycbICyw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.6.tgz", + "integrity": "sha512-2YnuOp4HAk2BsBrJJvYCbItHx0zWscI1C3zgWkz+wDyD9I7GIVrfnLyrR4Y1VR+7p+chAEcrgRQYZAGIKMV7vQ==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.6", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sindresorhus/is": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", + "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@szmarczak/http-timer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", + "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", + "dev": true, + "dependencies": { + "defer-to-connect": "^2.0.1" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.13" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "dev": true, + "dependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "node_modules/buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==", + "dev": true + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==", + "dev": true + }, + "node_modules/cacheable-lookup": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", + "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", + "dev": true, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request": { + "version": "10.2.14", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.14.tgz", + "integrity": "sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==", + "dev": true, + "dependencies": { + "@types/http-cache-semantics": "^4.0.2", + "get-stream": "^6.0.1", + "http-cache-semantics": "^4.1.1", + "keyv": "^4.5.3", + "mimic-response": "^4.0.0", + "normalize-url": "^8.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001625", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001625.tgz", + "integrity": "sha512-4KE9N2gcRH+HQhpeiRZXd+1niLB/XNLAhSy4z7fI8EzcbcPoAqjNInxVHTiTwWfTIV4w096XG8OtCOCQQKPv3w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/careful-downloader": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/careful-downloader/-/careful-downloader-3.0.0.tgz", + "integrity": "sha512-5KMIPa0Yoj+2tY6OK9ewdwcPebp+4XS0dMYvvF9/8fkFEfvnEpWmHWYs9JNcZ7RZUvY/v6oPzLpmmTzSIbroSA==", + "dev": true, + "dependencies": { + "debug": "^4.3.4", + "decompress": "^4.2.1", + "fs-extra": "^11.1.1", + "got": "^12.6.0", + "is-path-inside": "^4.0.0", + "tempy": "^3.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/crypto-random-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", + "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", + "dev": true, + "dependencies": { + "type-fest": "^1.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/crypto-random-string/node_modules/type-fest": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", + "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", + "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", + "dev": true, + "dependencies": { + "decompress-tar": "^4.0.0", + "decompress-tarbz2": "^4.0.0", + "decompress-targz": "^4.0.0", + "decompress-unzip": "^4.0.1", + "graceful-fs": "^4.1.10", + "make-dir": "^1.0.0", + "pify": "^2.3.0", + "strip-dirs": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-tar": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", + "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", + "dev": true, + "dependencies": { + "file-type": "^5.2.0", + "is-stream": "^1.1.0", + "tar-stream": "^1.5.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tarbz2": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", + "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", + "dev": true, + "dependencies": { + "decompress-tar": "^4.1.0", + "file-type": "^6.1.0", + "is-stream": "^1.1.0", + "seek-bzip": "^1.0.5", + "unbzip2-stream": "^1.0.9" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tarbz2/node_modules/file-type": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", + "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-targz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", + "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", + "dev": true, + "dependencies": { + "decompress-tar": "^4.1.1", + "file-type": "^5.2.0", + "is-stream": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-unzip": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", + "integrity": "sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw==", + "dev": true, + "dependencies": { + "file-type": "^3.8.0", + "get-stream": "^2.2.0", + "pify": "^2.3.0", + "yauzl": "^2.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-unzip/node_modules/file-type": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "integrity": "sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/dependency-graph": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-0.11.0.tgz", + "integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.787", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.787.tgz", + "integrity": "sha512-d0EFmtLPjctczO3LogReyM2pbBiiZbnsKnGF+cdZhsYzHm/A0GV7W94kqzLD8SN4O3f3iHlgLUChqghgyznvCQ==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", + "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", + "dev": true, + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/form-data-encoder": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", + "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", + "dev": true, + "engines": { + "node": ">= 14.17" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "dev": true, + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, + "node_modules/fs-extra": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stdin": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", + "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", + "integrity": "sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA==", + "dev": true, + "dependencies": { + "object-assign": "^4.0.1", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.1.tgz", + "integrity": "sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==", + "dev": true, + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.2", + "ignore": "^5.2.4", + "path-type": "^5.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/got": { + "version": "12.6.1", + "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", + "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", + "dev": true, + "dependencies": { + "@sindresorhus/is": "^5.2.0", + "@szmarczak/http-timer": "^5.0.1", + "cacheable-lookup": "^7.0.0", + "cacheable-request": "^10.2.8", + "decompress-response": "^6.0.0", + "form-data-encoder": "^2.1.2", + "get-stream": "^6.0.1", + "http2-wrapper": "^2.1.10", + "lowercase-keys": "^3.0.0", + "p-cancelable": "^3.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/got/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http2-wrapper": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", + "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", + "dev": true, + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.2.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, + "node_modules/hugo-extended": { + "version": "0.125.4", + "resolved": "https://registry.npmjs.org/hugo-extended/-/hugo-extended-0.125.4.tgz", + "integrity": "sha512-t2P9kz/CN6UQCX/XConZLb4Xxlx/YU8A11L4ysGYdPWUZB4XxOOe/1jcY23KChIqH3joEeBcz5DnzmgmwXrmcA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "careful-downloader": "^3.0.0", + "log-symbols": "^5.1.0", + "read-pkg-up": "^9.1.0" + }, + "bin": { + "hugo": "lib/cli.js", + "hugo-extended": "lib/cli.js" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-natural-number": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", + "integrity": "sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ==", + "dev": true + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-4.0.0.tgz", + "integrity": "sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-unicode-supported": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", + "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/lilconfig": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "dev": true, + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", + "integrity": "sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==", + "dev": true, + "dependencies": { + "chalk": "^5.0.0", + "is-unicode-supported": "^1.1.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lowercase-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", + "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "dev": true, + "dependencies": { + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/make-dir/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", + "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-response": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", + "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "peer": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "dev": true + }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-url": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.1.tgz", + "integrity": "sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-cancelable": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", + "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", + "dev": true, + "engines": { + "node": ">=12.20" + } + }, + "node_modules/p-limit": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", + "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", + "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", + "dev": true, + "dependencies": { + "p-limit": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", + "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", + "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", + "dev": true, + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postcss": { + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "peer": true, + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-cli": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-11.0.0.tgz", + "integrity": "sha512-xMITAI7M0u1yolVcXJ9XTZiO9aO49mcoKQy6pCDFdMh9kGqhzLVpWxeD/32M/QBmkhcGypZFFOLNLmIW4Pg4RA==", + "dev": true, + "dependencies": { + "chokidar": "^3.3.0", + "dependency-graph": "^0.11.0", + "fs-extra": "^11.0.0", + "get-stdin": "^9.0.0", + "globby": "^14.0.0", + "picocolors": "^1.0.0", + "postcss-load-config": "^5.0.0", + "postcss-reporter": "^7.0.0", + "pretty-hrtime": "^1.0.3", + "read-cache": "^1.0.0", + "slash": "^5.0.0", + "yargs": "^17.0.0" + }, + "bin": { + "postcss": "index.js" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-load-config": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-5.1.0.tgz", + "integrity": "sha512-G5AJ+IX0aD0dygOE0yFZQ/huFFMSNneyfp0e3/bT05a8OfPC5FUoZRPfGijUdGOJNMewJiwzcHJXFafFzeKFVA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "lilconfig": "^3.1.1", + "yaml": "^2.4.2" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + } + } + }, + "node_modules/postcss-reporter": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.1.0.tgz", + "integrity": "sha512-/eoEylGWyy6/DOiMP5lmFRdmDKThqgn7D6hP2dXKJI/0rJSO1ADFNngZfDzxL0YAxFvws+Rtpuji1YIHj4mySA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "picocolors": "^1.0.0", + "thenby": "^1.3.4" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true + }, + "node_modules/pretty-hrtime": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", + "integrity": "sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/read-pkg": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-7.1.0.tgz", + "integrity": "sha512-5iOehe+WF75IccPc30bWTbpdDQLOCc3Uu8bi3Dte3Eueij81yx1Mrufk8qBx/YAbR4uL1FdUr+7BKXDwEtisXg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.1", + "normalize-package-data": "^3.0.2", + "parse-json": "^5.2.0", + "type-fest": "^2.0.0" + }, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-9.1.0.tgz", + "integrity": "sha512-vaMRR1AC1nrd5CQM0PhlRsO5oc2AAigqr7cCrZ/MW/Rsaflz4RlgzkpL4qoU/z1F6wrbd85iFv1OQj/y5RdGvg==", + "dev": true, + "dependencies": { + "find-up": "^6.3.0", + "read-pkg": "^7.1.0", + "type-fest": "^2.5.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true + }, + "node_modules/responselike": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", + "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", + "dev": true, + "dependencies": { + "lowercase-keys": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/seek-bzip": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", + "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", + "dev": true, + "dependencies": { + "commander": "^2.8.1" + }, + "bin": { + "seek-bunzip": "bin/seek-bunzip", + "seek-table": "bin/seek-bzip-table" + } + }, + "node_modules/semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.18", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", + "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==", + "dev": true + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-dirs": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", + "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", + "dev": true, + "dependencies": { + "is-natural-number": "^4.0.1" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "dependencies": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/temp-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-3.0.0.tgz", + "integrity": "sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==", + "dev": true, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/tempy": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tempy/-/tempy-3.1.0.tgz", + "integrity": "sha512-7jDLIdD2Zp0bDe5r3D2qtkd1QOCacylBuL7oa4udvN6v2pqr4+LcCr67C8DR1zkpaZ8XosF5m1yQSabKAW6f2g==", + "dev": true, + "dependencies": { + "is-stream": "^3.0.0", + "temp-dir": "^3.0.0", + "type-fest": "^2.12.2", + "unique-string": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tempy/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/thenby": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/thenby/-/thenby-1.3.4.tgz", + "integrity": "sha512-89Gi5raiWA3QZ4b2ePcEwswC3me9JIg+ToSgtE0JWeCynLnLxNr/f9G+xfo9K+Oj4AFdom8YNJjibIARTJmapQ==", + "dev": true + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "node_modules/to-buffer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", + "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/type-fest": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unbzip2-stream": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", + "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", + "dev": true, + "dependencies": { + "buffer": "^5.2.1", + "through": "^2.3.8" + } + }, + "node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unique-string": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", + "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", + "dev": true, + "dependencies": { + "crypto-random-string": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", + "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yaml": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz", + "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/node_modules/@babel/code-frame/LICENSE b/node_modules/@babel/code-frame/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/code-frame/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/code-frame/README.md b/node_modules/@babel/code-frame/README.md new file mode 100644 index 0000000..7160755 --- /dev/null +++ b/node_modules/@babel/code-frame/README.md @@ -0,0 +1,19 @@ +# @babel/code-frame + +> Generate errors that contain a code frame that point to source locations. + +See our website [@babel/code-frame](https://babeljs.io/docs/babel-code-frame) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/code-frame +``` + +or using yarn: + +```sh +yarn add @babel/code-frame --dev +``` diff --git a/node_modules/@babel/code-frame/lib/index.js b/node_modules/@babel/code-frame/lib/index.js new file mode 100644 index 0000000..53461aa --- /dev/null +++ b/node_modules/@babel/code-frame/lib/index.js @@ -0,0 +1,156 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.codeFrameColumns = codeFrameColumns; +exports.default = _default; +var _highlight = require("@babel/highlight"); +var _picocolors = _interopRequireWildcard(require("picocolors"), true); +function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } +function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } +const colors = typeof process === "object" && (process.env.FORCE_COLOR === "0" || process.env.FORCE_COLOR === "false") ? (0, _picocolors.createColors)(false) : _picocolors.default; +const compose = (f, g) => v => f(g(v)); +let pcWithForcedColor = undefined; +function getColors(forceColor) { + if (forceColor) { + var _pcWithForcedColor; + (_pcWithForcedColor = pcWithForcedColor) != null ? _pcWithForcedColor : pcWithForcedColor = (0, _picocolors.createColors)(true); + return pcWithForcedColor; + } + return colors; +} +let deprecationWarningShown = false; +function getDefs(colors) { + return { + gutter: colors.gray, + marker: compose(colors.red, colors.bold), + message: compose(colors.red, colors.bold) + }; +} +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; +function getMarkerLines(loc, source, opts) { + const startLoc = Object.assign({ + column: 0, + line: -1 + }, loc.start); + const endLoc = Object.assign({}, startLoc, loc.end); + const { + linesAbove = 2, + linesBelow = 3 + } = opts || {}; + const startLine = startLoc.line; + const startColumn = startLoc.column; + const endLine = endLoc.line; + const endColumn = endLoc.column; + let start = Math.max(startLine - (linesAbove + 1), 0); + let end = Math.min(source.length, endLine + linesBelow); + if (startLine === -1) { + start = 0; + } + if (endLine === -1) { + end = source.length; + } + const lineDiff = endLine - startLine; + const markerLines = {}; + if (lineDiff) { + for (let i = 0; i <= lineDiff; i++) { + const lineNumber = i + startLine; + if (!startColumn) { + markerLines[lineNumber] = true; + } else if (i === 0) { + const sourceLength = source[lineNumber - 1].length; + markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1]; + } else if (i === lineDiff) { + markerLines[lineNumber] = [0, endColumn]; + } else { + const sourceLength = source[lineNumber - i].length; + markerLines[lineNumber] = [0, sourceLength]; + } + } + } else { + if (startColumn === endColumn) { + if (startColumn) { + markerLines[startLine] = [startColumn, 0]; + } else { + markerLines[startLine] = true; + } + } else { + markerLines[startLine] = [startColumn, endColumn - startColumn]; + } + } + return { + start, + end, + markerLines + }; +} +function codeFrameColumns(rawLines, loc, opts = {}) { + const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts); + const colors = getColors(opts.forceColor); + const defs = getDefs(colors); + const maybeHighlight = (fmt, string) => { + return highlighted ? fmt(string) : string; + }; + const lines = rawLines.split(NEWLINE); + const { + start, + end, + markerLines + } = getMarkerLines(loc, lines, opts); + const hasColumns = loc.start && typeof loc.start.column === "number"; + const numberMaxWidth = String(end).length; + const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines; + let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => { + const number = start + 1 + index; + const paddedNumber = ` ${number}`.slice(-numberMaxWidth); + const gutter = ` ${paddedNumber} |`; + const hasMarker = markerLines[number]; + const lastMarkerLine = !markerLines[number + 1]; + if (hasMarker) { + let markerLine = ""; + if (Array.isArray(hasMarker)) { + const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " "); + const numberOfMarkers = hasMarker[1] || 1; + markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join(""); + if (lastMarkerLine && opts.message) { + markerLine += " " + maybeHighlight(defs.message, opts.message); + } + } + return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join(""); + } else { + return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`; + } + }).join("\n"); + if (opts.message && !hasColumns) { + frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`; + } + if (highlighted) { + return colors.reset(frame); + } else { + return frame; + } +} +function _default(rawLines, lineNumber, colNumber, opts = {}) { + if (!deprecationWarningShown) { + deprecationWarningShown = true; + const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`."; + if (process.emitWarning) { + process.emitWarning(message, "DeprecationWarning"); + } else { + const deprecationError = new Error(message); + deprecationError.name = "DeprecationWarning"; + console.warn(new Error(message)); + } + } + colNumber = Math.max(colNumber, 0); + const location = { + start: { + column: colNumber, + line: lineNumber + } + }; + return codeFrameColumns(rawLines, location, opts); +} + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/code-frame/lib/index.js.map b/node_modules/@babel/code-frame/lib/index.js.map new file mode 100644 index 0000000..268f2c5 --- /dev/null +++ b/node_modules/@babel/code-frame/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_highlight","require","_picocolors","_interopRequireWildcard","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","colors","process","env","FORCE_COLOR","createColors","_colors","compose","f","g","v","pcWithForcedColor","undefined","getColors","forceColor","_pcWithForcedColor","deprecationWarningShown","getDefs","gutter","gray","marker","red","bold","message","NEWLINE","getMarkerLines","loc","source","opts","startLoc","assign","column","line","start","endLoc","end","linesAbove","linesBelow","startLine","startColumn","endLine","endColumn","Math","max","min","length","lineDiff","markerLines","lineNumber","sourceLength","codeFrameColumns","rawLines","highlighted","highlightCode","shouldHighlight","defs","maybeHighlight","fmt","string","lines","split","hasColumns","numberMaxWidth","String","highlightedLines","highlight","frame","slice","map","index","number","paddedNumber","hasMarker","lastMarkerLine","markerLine","Array","isArray","markerSpacing","replace","numberOfMarkers","repeat","join","reset","_default","colNumber","emitWarning","deprecationError","Error","name","console","warn","location"],"sources":["../src/index.ts"],"sourcesContent":["import highlight, { shouldHighlight } from \"@babel/highlight\";\n\nimport _colors, { createColors } from \"picocolors\";\nimport type { Colors, Formatter } from \"picocolors/types\";\n// See https://github.com/alexeyraspopov/picocolors/issues/62\nconst colors =\n typeof process === \"object\" &&\n (process.env.FORCE_COLOR === \"0\" || process.env.FORCE_COLOR === \"false\")\n ? createColors(false)\n : _colors;\n\nconst compose: (f: (gv: U) => V, g: (v: T) => U) => (v: T) => V =\n (f, g) => v =>\n f(g(v));\n\nlet pcWithForcedColor: Colors = undefined;\nfunction getColors(forceColor: boolean) {\n if (forceColor) {\n pcWithForcedColor ??= createColors(true);\n return pcWithForcedColor;\n }\n return colors;\n}\n\nlet deprecationWarningShown = false;\n\ntype Location = {\n column: number;\n line: number;\n};\n\ntype NodeLocation = {\n end?: Location;\n start: Location;\n};\n\nexport interface Options {\n /** Syntax highlight the code as JavaScript for terminals. default: false */\n highlightCode?: boolean;\n /** The number of lines to show above the error. default: 2 */\n linesAbove?: number;\n /** The number of lines to show below the error. default: 3 */\n linesBelow?: number;\n /**\n * Forcibly syntax highlight the code as JavaScript (for non-terminals);\n * overrides highlightCode.\n * default: false\n */\n forceColor?: boolean;\n /**\n * Pass in a string to be displayed inline (if possible) next to the\n * highlighted location in the code. If it can't be positioned inline,\n * it will be placed above the code frame.\n * default: nothing\n */\n message?: string;\n}\n\n/**\n * Styles for code frame token types.\n */\nfunction getDefs(colors: Colors) {\n return {\n gutter: colors.gray,\n marker: compose(colors.red, colors.bold),\n message: compose(colors.red, colors.bold),\n };\n}\n\n/**\n * RegExp to test for newlines in terminal.\n */\n\nconst NEWLINE = /\\r\\n|[\\n\\r\\u2028\\u2029]/;\n\n/**\n * Extract what lines should be marked and highlighted.\n */\n\ntype MarkerLines = Record;\n\nfunction getMarkerLines(\n loc: NodeLocation,\n source: Array,\n opts: Options,\n): {\n start: number;\n end: number;\n markerLines: MarkerLines;\n} {\n const startLoc: Location = {\n column: 0,\n line: -1,\n ...loc.start,\n };\n const endLoc: Location = {\n ...startLoc,\n ...loc.end,\n };\n const { linesAbove = 2, linesBelow = 3 } = opts || {};\n const startLine = startLoc.line;\n const startColumn = startLoc.column;\n const endLine = endLoc.line;\n const endColumn = endLoc.column;\n\n let start = Math.max(startLine - (linesAbove + 1), 0);\n let end = Math.min(source.length, endLine + linesBelow);\n\n if (startLine === -1) {\n start = 0;\n }\n\n if (endLine === -1) {\n end = source.length;\n }\n\n const lineDiff = endLine - startLine;\n const markerLines: MarkerLines = {};\n\n if (lineDiff) {\n for (let i = 0; i <= lineDiff; i++) {\n const lineNumber = i + startLine;\n\n if (!startColumn) {\n markerLines[lineNumber] = true;\n } else if (i === 0) {\n const sourceLength = source[lineNumber - 1].length;\n\n markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];\n } else if (i === lineDiff) {\n markerLines[lineNumber] = [0, endColumn];\n } else {\n const sourceLength = source[lineNumber - i].length;\n\n markerLines[lineNumber] = [0, sourceLength];\n }\n }\n } else {\n if (startColumn === endColumn) {\n if (startColumn) {\n markerLines[startLine] = [startColumn, 0];\n } else {\n markerLines[startLine] = true;\n }\n } else {\n markerLines[startLine] = [startColumn, endColumn - startColumn];\n }\n }\n\n return { start, end, markerLines };\n}\n\nexport function codeFrameColumns(\n rawLines: string,\n loc: NodeLocation,\n opts: Options = {},\n): string {\n const highlighted =\n (opts.highlightCode || opts.forceColor) && shouldHighlight(opts);\n const colors = getColors(opts.forceColor);\n const defs = getDefs(colors);\n const maybeHighlight = (fmt: Formatter, string: string) => {\n return highlighted ? fmt(string) : string;\n };\n const lines = rawLines.split(NEWLINE);\n const { start, end, markerLines } = getMarkerLines(loc, lines, opts);\n const hasColumns = loc.start && typeof loc.start.column === \"number\";\n\n const numberMaxWidth = String(end).length;\n\n const highlightedLines = highlighted ? highlight(rawLines, opts) : rawLines;\n\n let frame = highlightedLines\n .split(NEWLINE, end)\n .slice(start, end)\n .map((line, index) => {\n const number = start + 1 + index;\n const paddedNumber = ` ${number}`.slice(-numberMaxWidth);\n const gutter = ` ${paddedNumber} |`;\n const hasMarker = markerLines[number];\n const lastMarkerLine = !markerLines[number + 1];\n if (hasMarker) {\n let markerLine = \"\";\n if (Array.isArray(hasMarker)) {\n const markerSpacing = line\n .slice(0, Math.max(hasMarker[0] - 1, 0))\n .replace(/[^\\t]/g, \" \");\n const numberOfMarkers = hasMarker[1] || 1;\n\n markerLine = [\n \"\\n \",\n maybeHighlight(defs.gutter, gutter.replace(/\\d/g, \" \")),\n \" \",\n markerSpacing,\n maybeHighlight(defs.marker, \"^\").repeat(numberOfMarkers),\n ].join(\"\");\n\n if (lastMarkerLine && opts.message) {\n markerLine += \" \" + maybeHighlight(defs.message, opts.message);\n }\n }\n return [\n maybeHighlight(defs.marker, \">\"),\n maybeHighlight(defs.gutter, gutter),\n line.length > 0 ? ` ${line}` : \"\",\n markerLine,\n ].join(\"\");\n } else {\n return ` ${maybeHighlight(defs.gutter, gutter)}${\n line.length > 0 ? ` ${line}` : \"\"\n }`;\n }\n })\n .join(\"\\n\");\n\n if (opts.message && !hasColumns) {\n frame = `${\" \".repeat(numberMaxWidth + 1)}${opts.message}\\n${frame}`;\n }\n\n if (highlighted) {\n return colors.reset(frame);\n } else {\n return frame;\n }\n}\n\n/**\n * Create a code frame, adding line numbers, code highlighting, and pointing to a given position.\n */\n\nexport default function (\n rawLines: string,\n lineNumber: number,\n colNumber?: number | null,\n opts: Options = {},\n): string {\n if (!deprecationWarningShown) {\n deprecationWarningShown = true;\n\n const message =\n \"Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.\";\n\n if (process.emitWarning) {\n // A string is directly supplied to emitWarning, because when supplying an\n // Error object node throws in the tests because of different contexts\n process.emitWarning(message, \"DeprecationWarning\");\n } else {\n const deprecationError = new Error(message);\n deprecationError.name = \"DeprecationWarning\";\n console.warn(new Error(message));\n }\n }\n\n colNumber = Math.max(colNumber, 0);\n\n const location: NodeLocation = {\n start: { column: colNumber, line: lineNumber },\n };\n\n return codeFrameColumns(rawLines, location, opts);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AAEA,IAAAC,WAAA,GAAAC,uBAAA,CAAAF,OAAA;AAAmD,SAAAG,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAF,wBAAAE,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAGnD,MAAMW,MAAM,GACV,OAAOC,OAAO,KAAK,QAAQ,KAC1BA,OAAO,CAACC,GAAG,CAACC,WAAW,KAAK,GAAG,IAAIF,OAAO,CAACC,GAAG,CAACC,WAAW,KAAK,OAAO,CAAC,GACpE,IAAAC,wBAAY,EAAC,KAAK,CAAC,GACnBC,mBAAO;AAEb,MAAMC,OAAkE,GACtEA,CAACC,CAAC,EAAEC,CAAC,KAAKC,CAAC,IACTF,CAAC,CAACC,CAAC,CAACC,CAAC,CAAC,CAAC;AAEX,IAAIC,iBAAyB,GAAGC,SAAS;AACzC,SAASC,SAASA,CAACC,UAAmB,EAAE;EACtC,IAAIA,UAAU,EAAE;IAAA,IAAAC,kBAAA;IACd,CAAAA,kBAAA,GAAAJ,iBAAiB,YAAAI,kBAAA,GAAjBJ,iBAAiB,GAAK,IAAAN,wBAAY,EAAC,IAAI,CAAC;IACxC,OAAOM,iBAAiB;EAC1B;EACA,OAAOV,MAAM;AACf;AAEA,IAAIe,uBAAuB,GAAG,KAAK;AAqCnC,SAASC,OAAOA,CAAChB,MAAc,EAAE;EAC/B,OAAO;IACLiB,MAAM,EAAEjB,MAAM,CAACkB,IAAI;IACnBC,MAAM,EAAEb,OAAO,CAACN,MAAM,CAACoB,GAAG,EAAEpB,MAAM,CAACqB,IAAI,CAAC;IACxCC,OAAO,EAAEhB,OAAO,CAACN,MAAM,CAACoB,GAAG,EAAEpB,MAAM,CAACqB,IAAI;EAC1C,CAAC;AACH;AAMA,MAAME,OAAO,GAAG,yBAAyB;AAQzC,SAASC,cAAcA,CACrBC,GAAiB,EACjBC,MAAqB,EACrBC,IAAa,EAKb;EACA,MAAMC,QAAkB,GAAApC,MAAA,CAAAqC,MAAA;IACtBC,MAAM,EAAE,CAAC;IACTC,IAAI,EAAE,CAAC;EAAC,GACLN,GAAG,CAACO,KAAK,CACb;EACD,MAAMC,MAAgB,GAAAzC,MAAA,CAAAqC,MAAA,KACjBD,QAAQ,EACRH,GAAG,CAACS,GAAG,CACX;EACD,MAAM;IAAEC,UAAU,GAAG,CAAC;IAAEC,UAAU,GAAG;EAAE,CAAC,GAAGT,IAAI,IAAI,CAAC,CAAC;EACrD,MAAMU,SAAS,GAAGT,QAAQ,CAACG,IAAI;EAC/B,MAAMO,WAAW,GAAGV,QAAQ,CAACE,MAAM;EACnC,MAAMS,OAAO,GAAGN,MAAM,CAACF,IAAI;EAC3B,MAAMS,SAAS,GAAGP,MAAM,CAACH,MAAM;EAE/B,IAAIE,KAAK,GAAGS,IAAI,CAACC,GAAG,CAACL,SAAS,IAAIF,UAAU,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;EACrD,IAAID,GAAG,GAAGO,IAAI,CAACE,GAAG,CAACjB,MAAM,CAACkB,MAAM,EAAEL,OAAO,GAAGH,UAAU,CAAC;EAEvD,IAAIC,SAAS,KAAK,CAAC,CAAC,EAAE;IACpBL,KAAK,GAAG,CAAC;EACX;EAEA,IAAIO,OAAO,KAAK,CAAC,CAAC,EAAE;IAClBL,GAAG,GAAGR,MAAM,CAACkB,MAAM;EACrB;EAEA,MAAMC,QAAQ,GAAGN,OAAO,GAAGF,SAAS;EACpC,MAAMS,WAAwB,GAAG,CAAC,CAAC;EAEnC,IAAID,QAAQ,EAAE;IACZ,KAAK,IAAI/C,CAAC,GAAG,CAAC,EAAEA,CAAC,IAAI+C,QAAQ,EAAE/C,CAAC,EAAE,EAAE;MAClC,MAAMiD,UAAU,GAAGjD,CAAC,GAAGuC,SAAS;MAEhC,IAAI,CAACC,WAAW,EAAE;QAChBQ,WAAW,CAACC,UAAU,CAAC,GAAG,IAAI;MAChC,CAAC,MAAM,IAAIjD,CAAC,KAAK,CAAC,EAAE;QAClB,MAAMkD,YAAY,GAAGtB,MAAM,CAACqB,UAAU,GAAG,CAAC,CAAC,CAACH,MAAM;QAElDE,WAAW,CAACC,UAAU,CAAC,GAAG,CAACT,WAAW,EAAEU,YAAY,GAAGV,WAAW,GAAG,CAAC,CAAC;MACzE,CAAC,MAAM,IAAIxC,CAAC,KAAK+C,QAAQ,EAAE;QACzBC,WAAW,CAACC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAEP,SAAS,CAAC;MAC1C,CAAC,MAAM;QACL,MAAMQ,YAAY,GAAGtB,MAAM,CAACqB,UAAU,GAAGjD,CAAC,CAAC,CAAC8C,MAAM;QAElDE,WAAW,CAACC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAEC,YAAY,CAAC;MAC7C;IACF;EACF,CAAC,MAAM;IACL,IAAIV,WAAW,KAAKE,SAAS,EAAE;MAC7B,IAAIF,WAAW,EAAE;QACfQ,WAAW,CAACT,SAAS,CAAC,GAAG,CAACC,WAAW,EAAE,CAAC,CAAC;MAC3C,CAAC,MAAM;QACLQ,WAAW,CAACT,SAAS,CAAC,GAAG,IAAI;MAC/B;IACF,CAAC,MAAM;MACLS,WAAW,CAACT,SAAS,CAAC,GAAG,CAACC,WAAW,EAAEE,SAAS,GAAGF,WAAW,CAAC;IACjE;EACF;EAEA,OAAO;IAAEN,KAAK;IAAEE,GAAG;IAAEY;EAAY,CAAC;AACpC;AAEO,SAASG,gBAAgBA,CAC9BC,QAAgB,EAChBzB,GAAiB,EACjBE,IAAa,GAAG,CAAC,CAAC,EACV;EACR,MAAMwB,WAAW,GACf,CAACxB,IAAI,CAACyB,aAAa,IAAIzB,IAAI,CAACd,UAAU,KAAK,IAAAwC,0BAAe,EAAC1B,IAAI,CAAC;EAClE,MAAM3B,MAAM,GAAGY,SAAS,CAACe,IAAI,CAACd,UAAU,CAAC;EACzC,MAAMyC,IAAI,GAAGtC,OAAO,CAAChB,MAAM,CAAC;EAC5B,MAAMuD,cAAc,GAAGA,CAACC,GAAc,EAAEC,MAAc,KAAK;IACzD,OAAON,WAAW,GAAGK,GAAG,CAACC,MAAM,CAAC,GAAGA,MAAM;EAC3C,CAAC;EACD,MAAMC,KAAK,GAAGR,QAAQ,CAACS,KAAK,CAACpC,OAAO,CAAC;EACrC,MAAM;IAAES,KAAK;IAAEE,GAAG;IAAEY;EAAY,CAAC,GAAGtB,cAAc,CAACC,GAAG,EAAEiC,KAAK,EAAE/B,IAAI,CAAC;EACpE,MAAMiC,UAAU,GAAGnC,GAAG,CAACO,KAAK,IAAI,OAAOP,GAAG,CAACO,KAAK,CAACF,MAAM,KAAK,QAAQ;EAEpE,MAAM+B,cAAc,GAAGC,MAAM,CAAC5B,GAAG,CAAC,CAACU,MAAM;EAEzC,MAAMmB,gBAAgB,GAAGZ,WAAW,GAAG,IAAAa,kBAAS,EAACd,QAAQ,EAAEvB,IAAI,CAAC,GAAGuB,QAAQ;EAE3E,IAAIe,KAAK,GAAGF,gBAAgB,CACzBJ,KAAK,CAACpC,OAAO,EAAEW,GAAG,CAAC,CACnBgC,KAAK,CAAClC,KAAK,EAAEE,GAAG,CAAC,CACjBiC,GAAG,CAAC,CAACpC,IAAI,EAAEqC,KAAK,KAAK;IACpB,MAAMC,MAAM,GAAGrC,KAAK,GAAG,CAAC,GAAGoC,KAAK;IAChC,MAAME,YAAY,GAAI,IAAGD,MAAO,EAAC,CAACH,KAAK,CAAC,CAACL,cAAc,CAAC;IACxD,MAAM5C,MAAM,GAAI,IAAGqD,YAAa,IAAG;IACnC,MAAMC,SAAS,GAAGzB,WAAW,CAACuB,MAAM,CAAC;IACrC,MAAMG,cAAc,GAAG,CAAC1B,WAAW,CAACuB,MAAM,GAAG,CAAC,CAAC;IAC/C,IAAIE,SAAS,EAAE;MACb,IAAIE,UAAU,GAAG,EAAE;MACnB,IAAIC,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,EAAE;QAC5B,MAAMK,aAAa,GAAG7C,IAAI,CACvBmC,KAAK,CAAC,CAAC,EAAEzB,IAAI,CAACC,GAAG,CAAC6B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CACvCM,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC;QACzB,MAAMC,eAAe,GAAGP,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;QAEzCE,UAAU,GAAG,CACX,KAAK,EACLlB,cAAc,CAACD,IAAI,CAACrC,MAAM,EAAEA,MAAM,CAAC4D,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,EACvD,GAAG,EACHD,aAAa,EACbrB,cAAc,CAACD,IAAI,CAACnC,MAAM,EAAE,GAAG,CAAC,CAAC4D,MAAM,CAACD,eAAe,CAAC,CACzD,CAACE,IAAI,CAAC,EAAE,CAAC;QAEV,IAAIR,cAAc,IAAI7C,IAAI,CAACL,OAAO,EAAE;UAClCmD,UAAU,IAAI,GAAG,GAAGlB,cAAc,CAACD,IAAI,CAAChC,OAAO,EAAEK,IAAI,CAACL,OAAO,CAAC;QAChE;MACF;MACA,OAAO,CACLiC,cAAc,CAACD,IAAI,CAACnC,MAAM,EAAE,GAAG,CAAC,EAChCoC,cAAc,CAACD,IAAI,CAACrC,MAAM,EAAEA,MAAM,CAAC,EACnCc,IAAI,CAACa,MAAM,GAAG,CAAC,GAAI,IAAGb,IAAK,EAAC,GAAG,EAAE,EACjC0C,UAAU,CACX,CAACO,IAAI,CAAC,EAAE,CAAC;IACZ,CAAC,MAAM;MACL,OAAQ,IAAGzB,cAAc,CAACD,IAAI,CAACrC,MAAM,EAAEA,MAAM,CAAE,GAC7Cc,IAAI,CAACa,MAAM,GAAG,CAAC,GAAI,IAAGb,IAAK,EAAC,GAAG,EAChC,EAAC;IACJ;EACF,CAAC,CAAC,CACDiD,IAAI,CAAC,IAAI,CAAC;EAEb,IAAIrD,IAAI,CAACL,OAAO,IAAI,CAACsC,UAAU,EAAE;IAC/BK,KAAK,GAAI,GAAE,GAAG,CAACc,MAAM,CAAClB,cAAc,GAAG,CAAC,CAAE,GAAElC,IAAI,CAACL,OAAQ,KAAI2C,KAAM,EAAC;EACtE;EAEA,IAAId,WAAW,EAAE;IACf,OAAOnD,MAAM,CAACiF,KAAK,CAAChB,KAAK,CAAC;EAC5B,CAAC,MAAM;IACL,OAAOA,KAAK;EACd;AACF;AAMe,SAAAiB,SACbhC,QAAgB,EAChBH,UAAkB,EAClBoC,SAAyB,EACzBxD,IAAa,GAAG,CAAC,CAAC,EACV;EACR,IAAI,CAACZ,uBAAuB,EAAE;IAC5BA,uBAAuB,GAAG,IAAI;IAE9B,MAAMO,OAAO,GACX,qGAAqG;IAEvG,IAAIrB,OAAO,CAACmF,WAAW,EAAE;MAGvBnF,OAAO,CAACmF,WAAW,CAAC9D,OAAO,EAAE,oBAAoB,CAAC;IACpD,CAAC,MAAM;MACL,MAAM+D,gBAAgB,GAAG,IAAIC,KAAK,CAAChE,OAAO,CAAC;MAC3C+D,gBAAgB,CAACE,IAAI,GAAG,oBAAoB;MAC5CC,OAAO,CAACC,IAAI,CAAC,IAAIH,KAAK,CAAChE,OAAO,CAAC,CAAC;IAClC;EACF;EAEA6D,SAAS,GAAG1C,IAAI,CAACC,GAAG,CAACyC,SAAS,EAAE,CAAC,CAAC;EAElC,MAAMO,QAAsB,GAAG;IAC7B1D,KAAK,EAAE;MAAEF,MAAM,EAAEqD,SAAS;MAAEpD,IAAI,EAAEgB;IAAW;EAC/C,CAAC;EAED,OAAOE,gBAAgB,CAACC,QAAQ,EAAEwC,QAAQ,EAAE/D,IAAI,CAAC;AACnD","ignoreList":[]} \ No newline at end of file diff --git a/node_modules/@babel/code-frame/package.json b/node_modules/@babel/code-frame/package.json new file mode 100644 index 0000000..0be9f68 --- /dev/null +++ b/node_modules/@babel/code-frame/package.json @@ -0,0 +1,30 @@ +{ + "name": "@babel/code-frame", + "version": "7.24.6", + "description": "Generate errors that contain a code frame that point to source locations.", + "author": "The Babel Team (https://babel.dev/team)", + "homepage": "https://babel.dev/docs/en/next/babel-code-frame", + "bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-code-frame" + }, + "main": "./lib/index.js", + "dependencies": { + "@babel/highlight": "^7.24.6", + "picocolors": "^1.0.0" + }, + "devDependencies": { + "import-meta-resolve": "^4.1.0", + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/LICENSE b/node_modules/@babel/helper-validator-identifier/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/helper-validator-identifier/README.md b/node_modules/@babel/helper-validator-identifier/README.md new file mode 100644 index 0000000..05c19e6 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/README.md @@ -0,0 +1,19 @@ +# @babel/helper-validator-identifier + +> Validate identifier/keywords name + +See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/babel-helper-validator-identifier) for more information. + +## Install + +Using npm: + +```sh +npm install --save @babel/helper-validator-identifier +``` + +or using yarn: + +```sh +yarn add @babel/helper-validator-identifier +``` diff --git a/node_modules/@babel/helper-validator-identifier/lib/identifier.js b/node_modules/@babel/helper-validator-identifier/lib/identifier.js new file mode 100644 index 0000000..8ef8303 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/identifier.js @@ -0,0 +1,70 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isIdentifierChar = isIdentifierChar; +exports.isIdentifierName = isIdentifierName; +exports.isIdentifierStart = isIdentifierStart; +let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ca\ua7d0\ua7d1\ua7d3\ua7d5-\ua7d9\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc"; +let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0898-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u200c\u200d\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\u30fb\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f\uff65"; +const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]"); +const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]"); +nonASCIIidentifierStartChars = nonASCIIidentifierChars = null; +const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 68, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 349, 41, 7, 1, 79, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 159, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 264, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 4026, 582, 8634, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 689, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 43, 8, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 757, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 16, 621, 2467, 541, 1507, 4938, 6, 4191]; +const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 370, 1, 81, 2, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 193, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 84, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 406, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 9, 5351, 0, 7, 14, 13835, 9, 87, 9, 39, 4, 60, 6, 26, 9, 1014, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4706, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 983, 6, 110, 6, 6, 9, 4759, 9, 787719, 239]; +function isInAstralSet(code, set) { + let pos = 0x10000; + for (let i = 0, length = set.length; i < length; i += 2) { + pos += set[i]; + if (pos > code) return false; + pos += set[i + 1]; + if (pos >= code) return true; + } + return false; +} +function isIdentifierStart(code) { + if (code < 65) return code === 36; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code)); + } + return isInAstralSet(code, astralIdentifierStartCodes); +} +function isIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code)); + } + return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes); +} +function isIdentifierName(name) { + let isFirst = true; + for (let i = 0; i < name.length; i++) { + let cp = name.charCodeAt(i); + if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) { + const trail = name.charCodeAt(++i); + if ((trail & 0xfc00) === 0xdc00) { + cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff); + } + } + if (isFirst) { + isFirst = false; + if (!isIdentifierStart(cp)) { + return false; + } + } else if (!isIdentifierChar(cp)) { + return false; + } + } + return !isFirst; +} + +//# sourceMappingURL=identifier.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map b/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map new file mode 100644 index 0000000..dc8aece --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map @@ -0,0 +1 @@ +{"version":3,"names":["nonASCIIidentifierStartChars","nonASCIIidentifierChars","nonASCIIidentifierStart","RegExp","nonASCIIidentifier","astralIdentifierStartCodes","astralIdentifierCodes","isInAstralSet","code","set","pos","i","length","isIdentifierStart","test","String","fromCharCode","isIdentifierChar","isIdentifierName","name","isFirst","cp","charCodeAt","trail"],"sources":["../src/identifier.ts"],"sourcesContent":["import * as charCodes from \"charcodes\";\n\n// ## Character categories\n\n// Big ugly regular expressions that match characters in the\n// whitespace, identifier, and identifier-start categories. These\n// are only applied when a character is found to actually have a\n// code point between 0x80 and 0xffff.\n// Generated by `scripts/generate-identifier-regex.js`.\n\n/* prettier-ignore */\nlet nonASCIIidentifierStartChars = \"\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u037f\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u052f\\u0531-\\u0556\\u0559\\u0560-\\u0588\\u05d0-\\u05ea\\u05ef-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u0860-\\u086a\\u0870-\\u0887\\u0889-\\u088e\\u08a0-\\u08c9\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u09fc\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0af9\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c39\\u0c3d\\u0c58-\\u0c5a\\u0c5d\\u0c60\\u0c61\\u0c80\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cdd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d04-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d54-\\u0d56\\u0d5f-\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e86-\\u0e8a\\u0e8c-\\u0ea3\\u0ea5\\u0ea7-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f5\\u13f8-\\u13fd\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f8\\u1700-\\u1711\\u171f-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1878\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191e\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19b0-\\u19c9\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4c\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1c80-\\u1c88\\u1c90-\\u1cba\\u1cbd-\\u1cbf\\u1ce9-\\u1cec\\u1cee-\\u1cf3\\u1cf5\\u1cf6\\u1cfa\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2118-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309b-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312f\\u3131-\\u318e\\u31a0-\\u31bf\\u31f0-\\u31ff\\u3400-\\u4dbf\\u4e00-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua69d\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua7ca\\ua7d0\\ua7d1\\ua7d3\\ua7d5-\\ua7d9\\ua7f2-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua8fd\\ua8fe\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\ua9e0-\\ua9e4\\ua9e6-\\ua9ef\\ua9fa-\\ua9fe\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa7e-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uab30-\\uab5a\\uab5c-\\uab69\\uab70-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc\";\n/* prettier-ignore */\nlet nonASCIIidentifierChars = \"\\u200c\\u200d\\xb7\\u0300-\\u036f\\u0387\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u064b-\\u0669\\u0670\\u06d6-\\u06dc\\u06df-\\u06e4\\u06e7\\u06e8\\u06ea-\\u06ed\\u06f0-\\u06f9\\u0711\\u0730-\\u074a\\u07a6-\\u07b0\\u07c0-\\u07c9\\u07eb-\\u07f3\\u07fd\\u0816-\\u0819\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0859-\\u085b\\u0898-\\u089f\\u08ca-\\u08e1\\u08e3-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09cb-\\u09cd\\u09d7\\u09e2\\u09e3\\u09e6-\\u09ef\\u09fe\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2\\u0ae3\\u0ae6-\\u0aef\\u0afa-\\u0aff\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b55-\\u0b57\\u0b62\\u0b63\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c00-\\u0c04\\u0c3c\\u0c3e-\\u0c44\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62\\u0c63\\u0c66-\\u0c6f\\u0c81-\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2\\u0ce3\\u0ce6-\\u0cef\\u0cf3\\u0d00-\\u0d03\\u0d3b\\u0d3c\\u0d3e-\\u0d44\\u0d46-\\u0d48\\u0d4a-\\u0d4d\\u0d57\\u0d62\\u0d63\\u0d66-\\u0d6f\\u0d81-\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0de6-\\u0def\\u0df2\\u0df3\\u0e31\\u0e34-\\u0e3a\\u0e47-\\u0e4e\\u0e50-\\u0e59\\u0eb1\\u0eb4-\\u0ebc\\u0ec8-\\u0ece\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f3e\\u0f3f\\u0f71-\\u0f84\\u0f86\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u102b-\\u103e\\u1040-\\u1049\\u1056-\\u1059\\u105e-\\u1060\\u1062-\\u1064\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u1369-\\u1371\\u1712-\\u1715\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17b4-\\u17d3\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u180f-\\u1819\\u18a9\\u1920-\\u192b\\u1930-\\u193b\\u1946-\\u194f\\u19d0-\\u19da\\u1a17-\\u1a1b\\u1a55-\\u1a5e\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1ab0-\\u1abd\\u1abf-\\u1ace\\u1b00-\\u1b04\\u1b34-\\u1b44\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1b80-\\u1b82\\u1ba1-\\u1bad\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c24-\\u1c37\\u1c40-\\u1c49\\u1c50-\\u1c59\\u1cd0-\\u1cd2\\u1cd4-\\u1ce8\\u1ced\\u1cf4\\u1cf7-\\u1cf9\\u1dc0-\\u1dff\\u200c\\u200d\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2cef-\\u2cf1\\u2d7f\\u2de0-\\u2dff\\u302a-\\u302f\\u3099\\u309a\\u30fb\\ua620-\\ua629\\ua66f\\ua674-\\ua67d\\ua69e\\ua69f\\ua6f0\\ua6f1\\ua802\\ua806\\ua80b\\ua823-\\ua827\\ua82c\\ua880\\ua881\\ua8b4-\\ua8c5\\ua8d0-\\ua8d9\\ua8e0-\\ua8f1\\ua8ff-\\ua909\\ua926-\\ua92d\\ua947-\\ua953\\ua980-\\ua983\\ua9b3-\\ua9c0\\ua9d0-\\ua9d9\\ua9e5\\ua9f0-\\ua9f9\\uaa29-\\uaa36\\uaa43\\uaa4c\\uaa4d\\uaa50-\\uaa59\\uaa7b-\\uaa7d\\uaab0\\uaab2-\\uaab4\\uaab7\\uaab8\\uaabe\\uaabf\\uaac1\\uaaeb-\\uaaef\\uaaf5\\uaaf6\\uabe3-\\uabea\\uabec\\uabed\\uabf0-\\uabf9\\ufb1e\\ufe00-\\ufe0f\\ufe20-\\ufe2f\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f\\uff65\";\n\nconst nonASCIIidentifierStart = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + \"]\",\n);\nconst nonASCIIidentifier = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + \"]\",\n);\n\nnonASCIIidentifierStartChars = nonASCIIidentifierChars = null;\n\n// These are a run-length and offset-encoded representation of the\n// >0xffff code points that are a valid part of identifiers. The\n// offset starts at 0x10000, and each pair of numbers represents an\n// offset to the next range, and then a size of the range. They were\n// generated by `scripts/generate-identifier-regex.js`.\n/* prettier-ignore */\nconst astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,14,29,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,13,10,2,14,2,6,2,1,2,10,2,14,2,6,2,1,68,310,10,21,11,7,25,5,2,41,2,8,70,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,28,43,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,14,35,349,41,7,1,79,28,11,0,9,21,43,17,47,20,28,22,13,52,58,1,3,0,14,44,33,24,27,35,30,0,3,0,9,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,20,1,64,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,21,2,31,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,14,0,72,26,38,6,186,43,117,63,32,7,3,0,3,7,2,1,2,23,16,0,2,0,95,7,3,38,17,0,2,0,29,0,11,39,8,0,22,0,12,45,20,0,19,72,264,8,2,36,18,0,50,29,113,6,2,1,2,37,22,0,26,5,2,1,2,31,15,0,328,18,16,0,2,12,2,33,125,0,80,921,103,110,18,195,2637,96,16,1071,18,5,4026,582,8634,568,8,30,18,78,18,29,19,47,17,3,32,20,6,18,689,63,129,74,6,0,67,12,65,1,2,0,29,6135,9,1237,43,8,8936,3,2,6,2,1,2,290,16,0,30,2,3,0,15,3,9,395,2309,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,1845,30,7,5,262,61,147,44,11,6,17,0,322,29,19,43,485,27,757,6,2,3,2,1,2,14,2,196,60,67,8,0,1205,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42719,33,4153,7,221,3,5761,15,7472,16,621,2467,541,1507,4938,6,4191];\n/* prettier-ignore */\nconst astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,574,3,9,9,370,1,81,2,71,10,50,3,123,2,54,14,32,10,3,1,11,3,46,10,8,0,46,9,7,2,37,13,2,9,6,1,45,0,13,2,49,13,9,3,2,11,83,11,7,0,3,0,158,11,6,9,7,3,56,1,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,5,0,82,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,243,14,166,9,71,5,2,1,3,3,2,0,2,1,13,9,120,6,3,6,4,0,29,9,41,6,2,3,9,0,10,10,47,15,406,7,2,7,17,9,57,21,2,13,123,5,4,0,2,1,2,6,2,0,9,9,49,4,2,1,2,4,9,9,330,3,10,1,2,0,49,6,4,4,14,9,5351,0,7,14,13835,9,87,9,39,4,60,6,26,9,1014,0,2,54,8,3,82,0,12,1,19628,1,4706,45,3,22,543,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,101,0,161,6,10,9,357,0,62,13,499,13,983,6,110,6,6,9,4759,9,787719,239];\n\n// This has a complexity linear to the value of the code. The\n// assumption is that looking up astral identifier characters is\n// rare.\nfunction isInAstralSet(code: number, set: readonly number[]): boolean {\n let pos = 0x10000;\n for (let i = 0, length = set.length; i < length; i += 2) {\n pos += set[i];\n if (pos > code) return false;\n\n pos += set[i + 1];\n if (pos >= code) return true;\n }\n return false;\n}\n\n// Test whether a given character code starts an identifier.\n\nexport function isIdentifierStart(code: number): boolean {\n if (code < charCodes.uppercaseA) return code === charCodes.dollarSign;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return (\n code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))\n );\n }\n return isInAstralSet(code, astralIdentifierStartCodes);\n}\n\n// Test whether a given character is part of an identifier.\n\nexport function isIdentifierChar(code: number): boolean {\n if (code < charCodes.digit0) return code === charCodes.dollarSign;\n if (code < charCodes.colon) return true;\n if (code < charCodes.uppercaseA) return false;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));\n }\n return (\n isInAstralSet(code, astralIdentifierStartCodes) ||\n isInAstralSet(code, astralIdentifierCodes)\n );\n}\n\n// Test whether a given string is a valid identifier name\n\nexport function isIdentifierName(name: string): boolean {\n let isFirst = true;\n for (let i = 0; i < name.length; i++) {\n // The implementation is based on\n // https://source.chromium.org/chromium/chromium/src/+/master:v8/src/builtins/builtins-string-gen.cc;l=1455;drc=221e331b49dfefadbc6fa40b0c68e6f97606d0b3;bpv=0;bpt=1\n // We reimplement `codePointAt` because `codePointAt` is a V8 builtin which is not inlined by TurboFan (as of M91)\n // since `name` is mostly ASCII, an inlined `charCodeAt` wins here\n let cp = name.charCodeAt(i);\n if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {\n const trail = name.charCodeAt(++i);\n if ((trail & 0xfc00) === 0xdc00) {\n cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);\n }\n }\n if (isFirst) {\n isFirst = false;\n if (!isIdentifierStart(cp)) {\n return false;\n }\n } else if (!isIdentifierChar(cp)) {\n return false;\n }\n }\n return !isFirst;\n}\n"],"mappings":";;;;;;;;AAWA,IAAIA,4BAA4B,GAAG,8qIAA8qI;AAEjtI,IAAIC,uBAAuB,GAAG,2lFAA2lF;AAEznF,MAAMC,uBAAuB,GAAG,IAAIC,MAAM,CACxC,GAAG,GAAGH,4BAA4B,GAAG,GACvC,CAAC;AACD,MAAMI,kBAAkB,GAAG,IAAID,MAAM,CACnC,GAAG,GAAGH,4BAA4B,GAAGC,uBAAuB,GAAG,GACjE,CAAC;AAEDD,4BAA4B,GAAGC,uBAAuB,GAAG,IAAI;AAQ7D,MAAMI,0BAA0B,GAAG,CAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,GAAG,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,IAAI,EAAC,EAAE,EAAC,EAAE,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,GAAG,EAAC,IAAI,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,IAAI,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,KAAK,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,IAAI,EAAC,EAAE,EAAC,GAAG,EAAC,IAAI,EAAC,GAAG,EAAC,IAAI,EAAC,IAAI,EAAC,CAAC,EAAC,IAAI,CAAC;AAEx+C,MAAMC,qBAAqB,GAAG,CAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,KAAK,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,KAAK,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,MAAM,EAAC,GAAG,CAAC;AAKjwB,SAASC,aAAaA,CAACC,IAAY,EAAEC,GAAsB,EAAW;EACpE,IAAIC,GAAG,GAAG,OAAO;EACjB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEC,MAAM,GAAGH,GAAG,CAACG,MAAM,EAAED,CAAC,GAAGC,MAAM,EAAED,CAAC,IAAI,CAAC,EAAE;IACvDD,GAAG,IAAID,GAAG,CAACE,CAAC,CAAC;IACb,IAAID,GAAG,GAAGF,IAAI,EAAE,OAAO,KAAK;IAE5BE,GAAG,IAAID,GAAG,CAACE,CAAC,GAAG,CAAC,CAAC;IACjB,IAAID,GAAG,IAAIF,IAAI,EAAE,OAAO,IAAI;EAC9B;EACA,OAAO,KAAK;AACd;AAIO,SAASK,iBAAiBA,CAACL,IAAY,EAAW;EACvD,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,MAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,OAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,IAAI,MAAM,EAAE;IAClB,OACEA,IAAI,IAAI,IAAI,IAAIN,uBAAuB,CAACY,IAAI,CAACC,MAAM,CAACC,YAAY,CAACR,IAAI,CAAC,CAAC;EAE3E;EACA,OAAOD,aAAa,CAACC,IAAI,EAAEH,0BAA0B,CAAC;AACxD;AAIO,SAASY,gBAAgBA,CAACT,IAAY,EAAW;EACtD,IAAIA,IAAI,KAAmB,EAAE,OAAOA,IAAI,OAAyB;EACjE,IAAIA,IAAI,KAAkB,EAAE,OAAO,IAAI;EACvC,IAAIA,IAAI,KAAuB,EAAE,OAAO,KAAK;EAC7C,IAAIA,IAAI,MAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,OAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,IAAI,MAAM,EAAE;IAClB,OAAOA,IAAI,IAAI,IAAI,IAAIJ,kBAAkB,CAACU,IAAI,CAACC,MAAM,CAACC,YAAY,CAACR,IAAI,CAAC,CAAC;EAC3E;EACA,OACED,aAAa,CAACC,IAAI,EAAEH,0BAA0B,CAAC,IAC/CE,aAAa,CAACC,IAAI,EAAEF,qBAAqB,CAAC;AAE9C;AAIO,SAASY,gBAAgBA,CAACC,IAAY,EAAW;EACtD,IAAIC,OAAO,GAAG,IAAI;EAClB,KAAK,IAAIT,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGQ,IAAI,CAACP,MAAM,EAAED,CAAC,EAAE,EAAE;IAKpC,IAAIU,EAAE,GAAGF,IAAI,CAACG,UAAU,CAACX,CAAC,CAAC;IAC3B,IAAI,CAACU,EAAE,GAAG,MAAM,MAAM,MAAM,IAAIV,CAAC,GAAG,CAAC,GAAGQ,IAAI,CAACP,MAAM,EAAE;MACnD,MAAMW,KAAK,GAAGJ,IAAI,CAACG,UAAU,CAAC,EAAEX,CAAC,CAAC;MAClC,IAAI,CAACY,KAAK,GAAG,MAAM,MAAM,MAAM,EAAE;QAC/BF,EAAE,GAAG,OAAO,IAAI,CAACA,EAAE,GAAG,KAAK,KAAK,EAAE,CAAC,IAAIE,KAAK,GAAG,KAAK,CAAC;MACvD;IACF;IACA,IAAIH,OAAO,EAAE;MACXA,OAAO,GAAG,KAAK;MACf,IAAI,CAACP,iBAAiB,CAACQ,EAAE,CAAC,EAAE;QAC1B,OAAO,KAAK;MACd;IACF,CAAC,MAAM,IAAI,CAACJ,gBAAgB,CAACI,EAAE,CAAC,EAAE;MAChC,OAAO,KAAK;IACd;EACF;EACA,OAAO,CAACD,OAAO;AACjB","ignoreList":[]} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/lib/index.js b/node_modules/@babel/helper-validator-identifier/lib/index.js new file mode 100644 index 0000000..76b2282 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/index.js @@ -0,0 +1,57 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "isIdentifierChar", { + enumerable: true, + get: function () { + return _identifier.isIdentifierChar; + } +}); +Object.defineProperty(exports, "isIdentifierName", { + enumerable: true, + get: function () { + return _identifier.isIdentifierName; + } +}); +Object.defineProperty(exports, "isIdentifierStart", { + enumerable: true, + get: function () { + return _identifier.isIdentifierStart; + } +}); +Object.defineProperty(exports, "isKeyword", { + enumerable: true, + get: function () { + return _keyword.isKeyword; + } +}); +Object.defineProperty(exports, "isReservedWord", { + enumerable: true, + get: function () { + return _keyword.isReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindOnlyReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindOnlyReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindReservedWord; + } +}); +Object.defineProperty(exports, "isStrictReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictReservedWord; + } +}); +var _identifier = require("./identifier.js"); +var _keyword = require("./keyword.js"); + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/index.js.map b/node_modules/@babel/helper-validator-identifier/lib/index.js.map new file mode 100644 index 0000000..d985f3b --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_identifier","require","_keyword"],"sources":["../src/index.ts"],"sourcesContent":["export {\n isIdentifierName,\n isIdentifierChar,\n isIdentifierStart,\n} from \"./identifier.ts\";\nexport {\n isReservedWord,\n isStrictBindOnlyReservedWord,\n isStrictBindReservedWord,\n isStrictReservedWord,\n isKeyword,\n} from \"./keyword.ts\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,WAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA","ignoreList":[]} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/lib/keyword.js b/node_modules/@babel/helper-validator-identifier/lib/keyword.js new file mode 100644 index 0000000..054cf84 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/keyword.js @@ -0,0 +1,35 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isKeyword = isKeyword; +exports.isReservedWord = isReservedWord; +exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord; +exports.isStrictBindReservedWord = isStrictBindReservedWord; +exports.isStrictReservedWord = isStrictReservedWord; +const reservedWords = { + keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"], + strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"], + strictBind: ["eval", "arguments"] +}; +const keywords = new Set(reservedWords.keyword); +const reservedWordsStrictSet = new Set(reservedWords.strict); +const reservedWordsStrictBindSet = new Set(reservedWords.strictBind); +function isReservedWord(word, inModule) { + return inModule && word === "await" || word === "enum"; +} +function isStrictReservedWord(word, inModule) { + return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word); +} +function isStrictBindOnlyReservedWord(word) { + return reservedWordsStrictBindSet.has(word); +} +function isStrictBindReservedWord(word, inModule) { + return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word); +} +function isKeyword(word) { + return keywords.has(word); +} + +//# sourceMappingURL=keyword.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map b/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map new file mode 100644 index 0000000..3471f78 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map @@ -0,0 +1 @@ +{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OAAO,EACP,MAAM,EACN,OAAO,EACP,UAAU,EACV,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,EACN,SAAS,EACT,KAAK,EACL,UAAU,EACV,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,OAAO,EACP,OAAO,EACP,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,EACP,OAAO,EACP,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,MAAM,EACN,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,QAAQ,EACR,MAAM,EACN,QAAQ,CACT;EACDC,MAAM,EAAE,CACN,YAAY,EACZ,WAAW,EACX,KAAK,EACL,SAAS,EACT,SAAS,EACT,WAAW,EACX,QAAQ,EACR,QAAQ,EACR,OAAO,CACR;EACDC,UAAU,EAAE,CAAC,MAAM,EAAE,WAAW;AAClC,CAAC;AACD,MAAMC,QAAQ,GAAG,IAAIC,GAAG,CAACL,aAAa,CAACC,OAAO,CAAC;AAC/C,MAAMK,sBAAsB,GAAG,IAAID,GAAG,CAACL,aAAa,CAACE,MAAM,CAAC;AAC5D,MAAMK,0BAA0B,GAAG,IAAIF,GAAG,CAACL,aAAa,CAACG,UAAU,CAAC;AAK7D,SAASK,cAAcA,CAACC,IAAY,EAAEC,QAAiB,EAAW;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAO,IAAKA,IAAI,KAAK,MAAM;AAC1D;AAOO,SAASE,oBAAoBA,CAACF,IAAY,EAAEC,QAAiB,EAAW;EAC7E,OAAOF,cAAc,CAACC,IAAI,EAAEC,QAAQ,CAAC,IAAIJ,sBAAsB,CAACM,GAAG,CAACH,IAAI,CAAC;AAC3E;AAMO,SAASI,4BAA4BA,CAACJ,IAAY,EAAW;EAClE,OAAOF,0BAA0B,CAACK,GAAG,CAACH,IAAI,CAAC;AAC7C;AAOO,SAASK,wBAAwBA,CACtCL,IAAY,EACZC,QAAiB,EACR;EACT,OACEC,oBAAoB,CAACF,IAAI,EAAEC,QAAQ,CAAC,IAAIG,4BAA4B,CAACJ,IAAI,CAAC;AAE9E;AAEO,SAASM,SAASA,CAACN,IAAY,EAAW;EAC/C,OAAOL,QAAQ,CAACQ,GAAG,CAACH,IAAI,CAAC;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/package.json b/node_modules/@babel/helper-validator-identifier/package.json new file mode 100644 index 0000000..55eb2c9 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/package.json @@ -0,0 +1,31 @@ +{ + "name": "@babel/helper-validator-identifier", + "version": "7.24.6", + "description": "Validate identifier/keywords name", + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-helper-validator-identifier" + }, + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "main": "./lib/index.js", + "exports": { + ".": { + "types": "./lib/index.d.ts", + "default": "./lib/index.js" + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@unicode/unicode-15.1.0": "^1.5.2", + "charcodes": "^0.2.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "author": "The Babel Team (https://babel.dev/team)", + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js b/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js new file mode 100644 index 0000000..f3fd9e9 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js @@ -0,0 +1,73 @@ +"use strict"; + +// Always use the latest available version of Unicode! +// https://tc39.github.io/ecma262/#sec-conformance +const version = "15.1.0"; + +const start = require( + "@unicode/unicode-" + version + "/Binary_Property/ID_Start/code-points.js" +).filter(function (ch) { + return ch > 0x7f; +}); +let last = -1; +const cont = require( + "@unicode/unicode-" + version + "/Binary_Property/ID_Continue/code-points.js" +).filter(function (ch) { + return ch > 0x7f && search(start, ch, last + 1) === -1; +}); + +function search(arr, ch, starting) { + for (let i = starting; arr[i] <= ch && i < arr.length; last = i++) { + if (arr[i] === ch) return i; + } + return -1; +} + +function pad(str, width) { + while (str.length < width) str = "0" + str; + return str; +} + +function esc(code) { + const hex = code.toString(16); + if (hex.length <= 2) return "\\x" + pad(hex, 2); + else return "\\u" + pad(hex, 4); +} + +function generate(chars) { + const astral = []; + let re = ""; + for (let i = 0, at = 0x10000; i < chars.length; i++) { + const from = chars[i]; + let to = from; + while (i < chars.length - 1 && chars[i + 1] === to + 1) { + i++; + to++; + } + if (to <= 0xffff) { + if (from === to) re += esc(from); + else if (from + 1 === to) re += esc(from) + esc(to); + else re += esc(from) + "-" + esc(to); + } else { + astral.push(from - at, to - from); + at = to; + } + } + return { nonASCII: re, astral: astral }; +} + +const startData = generate(start); +const contData = generate(cont); + +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";" +); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";" +); diff --git a/node_modules/@babel/highlight/LICENSE b/node_modules/@babel/highlight/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/highlight/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/highlight/README.md b/node_modules/@babel/highlight/README.md new file mode 100644 index 0000000..4c2ec87 --- /dev/null +++ b/node_modules/@babel/highlight/README.md @@ -0,0 +1,19 @@ +# @babel/highlight + +> Syntax highlight JavaScript strings for output in terminals. + +See our website [@babel/highlight](https://babeljs.io/docs/babel-highlight) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/highlight +``` + +or using yarn: + +```sh +yarn add @babel/highlight --dev +``` diff --git a/node_modules/@babel/highlight/lib/index.js b/node_modules/@babel/highlight/lib/index.js new file mode 100644 index 0000000..944a043 --- /dev/null +++ b/node_modules/@babel/highlight/lib/index.js @@ -0,0 +1,119 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = highlight; +exports.shouldHighlight = shouldHighlight; +var _jsTokens = require("js-tokens"); +var _helperValidatorIdentifier = require("@babel/helper-validator-identifier"); +var _picocolors = _interopRequireWildcard(require("picocolors"), true); +function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } +function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } +const colors = typeof process === "object" && (process.env.FORCE_COLOR === "0" || process.env.FORCE_COLOR === "false") ? (0, _picocolors.createColors)(false) : _picocolors.default; +const compose = (f, g) => v => f(g(v)); +const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]); +function getDefs(colors) { + return { + keyword: colors.cyan, + capitalized: colors.yellow, + jsxIdentifier: colors.yellow, + punctuator: colors.yellow, + number: colors.magenta, + string: colors.green, + regex: colors.magenta, + comment: colors.gray, + invalid: compose(compose(colors.white, colors.bgRed), colors.bold) + }; +} +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; +const BRACKET = /^[()[\]{}]$/; +let tokenize; +{ + const JSX_TAG = /^[a-z][\w-]*$/i; + const getTokenType = function (token, offset, text) { + if (token.type === "name") { + if ((0, _helperValidatorIdentifier.isKeyword)(token.value) || (0, _helperValidatorIdentifier.isStrictReservedWord)(token.value, true) || sometimesKeywords.has(token.value)) { + return "keyword"; + } + if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) === " colorize(str)).join("\n"); + } else { + highlighted += value; + } + } + return highlighted; +} +function shouldHighlight(options) { + return colors.isColorSupported || options.forceColor; +} +let pcWithForcedColor = undefined; +function getColors(forceColor) { + if (forceColor) { + var _pcWithForcedColor; + (_pcWithForcedColor = pcWithForcedColor) != null ? _pcWithForcedColor : pcWithForcedColor = (0, _picocolors.createColors)(true); + return pcWithForcedColor; + } + return colors; +} +function highlight(code, options = {}) { + if (code !== "" && shouldHighlight(options)) { + const defs = getDefs(getColors(options.forceColor)); + return highlightTokens(defs, code); + } else { + return code; + } +} +{ + let chalk, chalkWithForcedColor; + exports.getChalk = ({ + forceColor + }) => { + var _chalk; + (_chalk = chalk) != null ? _chalk : chalk = require("chalk"); + if (forceColor) { + var _chalkWithForcedColor; + (_chalkWithForcedColor = chalkWithForcedColor) != null ? _chalkWithForcedColor : chalkWithForcedColor = new chalk.constructor({ + enabled: true, + level: 1 + }); + return chalkWithForcedColor; + } + return chalk; + }; +} + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/highlight/lib/index.js.map b/node_modules/@babel/highlight/lib/index.js.map new file mode 100644 index 0000000..1672cd5 --- /dev/null +++ b/node_modules/@babel/highlight/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_jsTokens","require","_helperValidatorIdentifier","_picocolors","_interopRequireWildcard","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","colors","process","env","FORCE_COLOR","createColors","_colors","compose","f","g","v","sometimesKeywords","Set","getDefs","keyword","cyan","capitalized","yellow","jsxIdentifier","punctuator","number","magenta","string","green","regex","comment","gray","invalid","white","bgRed","bold","NEWLINE","BRACKET","tokenize","JSX_TAG","getTokenType","token","offset","text","type","isKeyword","value","isStrictReservedWord","test","slice","toLowerCase","match","jsTokens","exec","matchToToken","index","highlightTokens","defs","highlighted","colorize","split","map","str","join","shouldHighlight","options","isColorSupported","forceColor","pcWithForcedColor","undefined","getColors","_pcWithForcedColor","highlight","code","chalk","chalkWithForcedColor","exports","getChalk","_chalk","_chalkWithForcedColor","constructor","enabled","level"],"sources":["../src/index.ts"],"sourcesContent":["import type { Token as JSToken, JSXToken } from \"js-tokens\";\nimport jsTokens from \"js-tokens\";\n\nimport {\n isStrictReservedWord,\n isKeyword,\n} from \"@babel/helper-validator-identifier\";\n\nimport _colors, { createColors } from \"picocolors\";\nimport type { Colors, Formatter } from \"picocolors/types\";\n// See https://github.com/alexeyraspopov/picocolors/issues/62\nconst colors =\n typeof process === \"object\" &&\n (process.env.FORCE_COLOR === \"0\" || process.env.FORCE_COLOR === \"false\")\n ? createColors(false)\n : _colors;\n\nconst compose: (f: (gv: U) => V, g: (v: T) => U) => (v: T) => V =\n (f, g) => v =>\n f(g(v));\n\n/**\n * Names that are always allowed as identifiers, but also appear as keywords\n * within certain syntactic productions.\n *\n * https://tc39.es/ecma262/#sec-keywords-and-reserved-words\n *\n * `target` has been omitted since it is very likely going to be a false\n * positive.\n */\nconst sometimesKeywords = new Set([\"as\", \"async\", \"from\", \"get\", \"of\", \"set\"]);\n\ntype InternalTokenType =\n | \"keyword\"\n | \"capitalized\"\n | \"jsxIdentifier\"\n | \"punctuator\"\n | \"number\"\n | \"string\"\n | \"regex\"\n | \"comment\"\n | \"invalid\";\n\ntype Token = {\n type: InternalTokenType | \"uncolored\";\n value: string;\n};\n/**\n * Styles for token types.\n */\nfunction getDefs(colors: Colors): Record {\n return {\n keyword: colors.cyan,\n capitalized: colors.yellow,\n jsxIdentifier: colors.yellow,\n punctuator: colors.yellow,\n number: colors.magenta,\n string: colors.green,\n regex: colors.magenta,\n comment: colors.gray,\n invalid: compose(compose(colors.white, colors.bgRed), colors.bold),\n };\n}\n\n/**\n * RegExp to test for newlines in terminal.\n */\nconst NEWLINE = /\\r\\n|[\\n\\r\\u2028\\u2029]/;\n\n/**\n * RegExp to test for the three types of brackets.\n */\nconst BRACKET = /^[()[\\]{}]$/;\n\nlet tokenize: (\n text: string,\n) => Generator<{ type: InternalTokenType | \"uncolored\"; value: string }>;\n\nif (process.env.BABEL_8_BREAKING) {\n /**\n * Get the type of token, specifying punctuator type.\n */\n const getTokenType = function (\n token: JSToken | JSXToken,\n ): InternalTokenType | \"uncolored\" {\n if (token.type === \"IdentifierName\") {\n if (\n isKeyword(token.value) ||\n isStrictReservedWord(token.value, true) ||\n sometimesKeywords.has(token.value)\n ) {\n return \"keyword\";\n }\n\n if (token.value[0] !== token.value[0].toLowerCase()) {\n return \"capitalized\";\n }\n }\n\n if (token.type === \"Punctuator\" && BRACKET.test(token.value)) {\n return \"uncolored\";\n }\n\n if (token.type === \"Invalid\" && token.value === \"@\") {\n return \"punctuator\";\n }\n\n switch (token.type) {\n case \"NumericLiteral\":\n return \"number\";\n\n case \"StringLiteral\":\n case \"JSXString\":\n case \"NoSubstitutionTemplate\":\n return \"string\";\n\n case \"RegularExpressionLiteral\":\n return \"regex\";\n\n case \"Punctuator\":\n case \"JSXPunctuator\":\n return \"punctuator\";\n\n case \"MultiLineComment\":\n case \"SingleLineComment\":\n return \"comment\";\n\n case \"Invalid\":\n case \"JSXInvalid\":\n return \"invalid\";\n\n case \"JSXIdentifier\":\n return \"jsxIdentifier\";\n\n default:\n return \"uncolored\";\n }\n };\n\n /**\n * Turn a string of JS into an array of objects.\n */\n tokenize = function* (text: string): Generator {\n for (const token of jsTokens(text, { jsx: true })) {\n switch (token.type) {\n case \"TemplateHead\":\n yield { type: \"string\", value: token.value.slice(0, -2) };\n yield { type: \"punctuator\", value: \"${\" };\n break;\n\n case \"TemplateMiddle\":\n yield { type: \"punctuator\", value: \"}\" };\n yield { type: \"string\", value: token.value.slice(1, -2) };\n yield { type: \"punctuator\", value: \"${\" };\n break;\n\n case \"TemplateTail\":\n yield { type: \"punctuator\", value: \"}\" };\n yield { type: \"string\", value: token.value.slice(1) };\n break;\n\n default:\n yield {\n type: getTokenType(token),\n value: token.value,\n };\n }\n }\n };\n} else {\n /**\n * RegExp to test for what seems to be a JSX tag name.\n */\n const JSX_TAG = /^[a-z][\\w-]*$/i;\n\n // The token here is defined in js-tokens@4. However we don't bother\n // typing it since the whole block will be removed in Babel 8\n const getTokenType = function (token: any, offset: number, text: string) {\n if (token.type === \"name\") {\n if (\n isKeyword(token.value) ||\n isStrictReservedWord(token.value, true) ||\n sometimesKeywords.has(token.value)\n ) {\n return \"keyword\";\n }\n\n if (\n JSX_TAG.test(token.value) &&\n (text[offset - 1] === \"<\" || text.slice(offset - 2, offset) === \", text: string) {\n let highlighted = \"\";\n\n for (const { type, value } of tokenize(text)) {\n const colorize = defs[type];\n if (colorize) {\n highlighted += value\n .split(NEWLINE)\n .map(str => colorize(str))\n .join(\"\\n\");\n } else {\n highlighted += value;\n }\n }\n\n return highlighted;\n}\n\n/**\n * Highlight `text` using the token definitions in `defs`.\n */\n\ntype Options = {\n forceColor?: boolean;\n};\n\n/**\n * Whether the code should be highlighted given the passed options.\n */\nexport function shouldHighlight(options: Options): boolean {\n return colors.isColorSupported || options.forceColor;\n}\n\nlet pcWithForcedColor: Colors = undefined;\nfunction getColors(forceColor: boolean) {\n if (forceColor) {\n pcWithForcedColor ??= createColors(true);\n return pcWithForcedColor;\n }\n return colors;\n}\n\n/**\n * Highlight `code`.\n */\nexport default function highlight(code: string, options: Options = {}): string {\n if (code !== \"\" && shouldHighlight(options)) {\n const defs = getDefs(getColors(options.forceColor));\n return highlightTokens(defs, code);\n } else {\n return code;\n }\n}\n\nif (!process.env.BABEL_8_BREAKING && !USE_ESM && !IS_STANDALONE) {\n let chalk: any, chalkWithForcedColor: any;\n // eslint-disable-next-line no-restricted-globals\n exports.getChalk = ({ forceColor }: Options) => {\n // eslint-disable-next-line no-restricted-globals\n chalk ??= require(\"chalk\");\n if (forceColor) {\n chalkWithForcedColor ??= new chalk.constructor({\n enabled: true,\n level: 1,\n });\n return chalkWithForcedColor;\n }\n return chalk;\n };\n}\n"],"mappings":";;;;;;;AACA,IAAAA,SAAA,GAAAC,OAAA;AAEA,IAAAC,0BAAA,GAAAD,OAAA;AAKA,IAAAE,WAAA,GAAAC,uBAAA,CAAAH,OAAA;AAAmD,SAAAI,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAF,wBAAAE,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAGnD,MAAMW,MAAM,GACV,OAAOC,OAAO,KAAK,QAAQ,KAC1BA,OAAO,CAACC,GAAG,CAACC,WAAW,KAAK,GAAG,IAAIF,OAAO,CAACC,GAAG,CAACC,WAAW,KAAK,OAAO,CAAC,GACpE,IAAAC,wBAAY,EAAC,KAAK,CAAC,GACnBC,mBAAO;AAEb,MAAMC,OAAkE,GACtEA,CAACC,CAAC,EAAEC,CAAC,KAAKC,CAAC,IACTF,CAAC,CAACC,CAAC,CAACC,CAAC,CAAC,CAAC;AAWX,MAAMC,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AAoB9E,SAASC,OAAOA,CAACZ,MAAc,EAAwC;EACrE,OAAO;IACLa,OAAO,EAAEb,MAAM,CAACc,IAAI;IACpBC,WAAW,EAAEf,MAAM,CAACgB,MAAM;IAC1BC,aAAa,EAAEjB,MAAM,CAACgB,MAAM;IAC5BE,UAAU,EAAElB,MAAM,CAACgB,MAAM;IACzBG,MAAM,EAAEnB,MAAM,CAACoB,OAAO;IACtBC,MAAM,EAAErB,MAAM,CAACsB,KAAK;IACpBC,KAAK,EAAEvB,MAAM,CAACoB,OAAO;IACrBI,OAAO,EAAExB,MAAM,CAACyB,IAAI;IACpBC,OAAO,EAAEpB,OAAO,CAACA,OAAO,CAACN,MAAM,CAAC2B,KAAK,EAAE3B,MAAM,CAAC4B,KAAK,CAAC,EAAE5B,MAAM,CAAC6B,IAAI;EACnE,CAAC;AACH;AAKA,MAAMC,OAAO,GAAG,yBAAyB;AAKzC,MAAMC,OAAO,GAAG,aAAa;AAE7B,IAAIC,QAEoE;AA6FjE;EAIL,MAAMC,OAAO,GAAG,gBAAgB;EAIhC,MAAMC,YAAY,GAAG,SAAAA,CAAUC,KAAU,EAAEC,MAAc,EAAEC,IAAY,EAAE;IACvE,IAAIF,KAAK,CAACG,IAAI,KAAK,MAAM,EAAE;MACzB,IACE,IAAAC,oCAAS,EAACJ,KAAK,CAACK,KAAK,CAAC,IACtB,IAAAC,+CAAoB,EAACN,KAAK,CAACK,KAAK,EAAE,IAAI,CAAC,IACvC9B,iBAAiB,CAACvB,GAAG,CAACgD,KAAK,CAACK,KAAK,CAAC,EAClC;QACA,OAAO,SAAS;MAClB;MAEA,IACEP,OAAO,CAACS,IAAI,CAACP,KAAK,CAACK,KAAK,CAAC,KACxBH,IAAI,CAACD,MAAM,GAAG,CAAC,CAAC,KAAK,GAAG,IAAIC,IAAI,CAACM,KAAK,CAACP,MAAM,GAAG,CAAC,EAAEA,MAAM,CAAC,KAAK,IAAI,CAAC,EACrE;QACA,OAAO,eAAe;MACxB;MAEA,IAAID,KAAK,CAACK,KAAK,CAAC,CAAC,CAAC,KAAKL,KAAK,CAACK,KAAK,CAAC,CAAC,CAAC,CAACI,WAAW,CAAC,CAAC,EAAE;QACnD,OAAO,aAAa;MACtB;IACF;IAEA,IAAIT,KAAK,CAACG,IAAI,KAAK,YAAY,IAAIP,OAAO,CAACW,IAAI,CAACP,KAAK,CAACK,KAAK,CAAC,EAAE;MAC5D,OAAO,SAAS;IAClB;IAEA,IACEL,KAAK,CAACG,IAAI,KAAK,SAAS,KACvBH,KAAK,CAACK,KAAK,KAAK,GAAG,IAAIL,KAAK,CAACK,KAAK,KAAK,GAAG,CAAC,EAC5C;MACA,OAAO,YAAY;IACrB;IAEA,OAAOL,KAAK,CAACG,IAAI;EACnB,CAAC;EAEDN,QAAQ,GAAG,UAAAA,CAAWK,IAAY,EAAE;IAClC,IAAIQ,KAAK;IACT,OAAQA,KAAK,GAAIC,SAAQ,CAAS5D,OAAO,CAAC6D,IAAI,CAACV,IAAI,CAAC,EAAG;MACrD,MAAMF,KAAK,GAAIW,SAAQ,CAASE,YAAY,CAACH,KAAK,CAAC;MAEnD,MAAM;QACJP,IAAI,EAAEJ,YAAY,CAACC,KAAK,EAAEU,KAAK,CAACI,KAAK,EAAEZ,IAAI,CAAC;QAC5CG,KAAK,EAAEL,KAAK,CAACK;MACf,CAAC;IACH;EACF,CAAC;AACH;AAKA,SAASU,eAAeA,CAACC,IAA+B,EAAEd,IAAY,EAAE;EACtE,IAAIe,WAAW,GAAG,EAAE;EAEpB,KAAK,MAAM;IAAEd,IAAI;IAAEE;EAAM,CAAC,IAAIR,QAAQ,CAACK,IAAI,CAAC,EAAE;IAC5C,MAAMgB,QAAQ,GAAGF,IAAI,CAACb,IAAI,CAAC;IAC3B,IAAIe,QAAQ,EAAE;MACZD,WAAW,IAAIZ,KAAK,CACjBc,KAAK,CAACxB,OAAO,CAAC,CACdyB,GAAG,CAACC,GAAG,IAAIH,QAAQ,CAACG,GAAG,CAAC,CAAC,CACzBC,IAAI,CAAC,IAAI,CAAC;IACf,CAAC,MAAM;MACLL,WAAW,IAAIZ,KAAK;IACtB;EACF;EAEA,OAAOY,WAAW;AACpB;AAaO,SAASM,eAAeA,CAACC,OAAgB,EAAW;EACzD,OAAO3D,MAAM,CAAC4D,gBAAgB,IAAID,OAAO,CAACE,UAAU;AACtD;AAEA,IAAIC,iBAAyB,GAAGC,SAAS;AACzC,SAASC,SAASA,CAACH,UAAmB,EAAE;EACtC,IAAIA,UAAU,EAAE;IAAA,IAAAI,kBAAA;IACd,CAAAA,kBAAA,GAAAH,iBAAiB,YAAAG,kBAAA,GAAjBH,iBAAiB,GAAK,IAAA1D,wBAAY,EAAC,IAAI,CAAC;IACxC,OAAO0D,iBAAiB;EAC1B;EACA,OAAO9D,MAAM;AACf;AAKe,SAASkE,SAASA,CAACC,IAAY,EAAER,OAAgB,GAAG,CAAC,CAAC,EAAU;EAC7E,IAAIQ,IAAI,KAAK,EAAE,IAAIT,eAAe,CAACC,OAAO,CAAC,EAAE;IAC3C,MAAMR,IAAI,GAAGvC,OAAO,CAACoD,SAAS,CAACL,OAAO,CAACE,UAAU,CAAC,CAAC;IACnD,OAAOX,eAAe,CAACC,IAAI,EAAEgB,IAAI,CAAC;EACpC,CAAC,MAAM;IACL,OAAOA,IAAI;EACb;AACF;AAEiE;EAC/D,IAAIC,KAAU,EAAEC,oBAAyB;EAEzCC,OAAO,CAACC,QAAQ,GAAG,CAAC;IAAEV;EAAoB,CAAC,KAAK;IAAA,IAAAW,MAAA;IAE9C,CAAAA,MAAA,GAAAJ,KAAK,YAAAI,MAAA,GAALJ,KAAK,GAAK5F,OAAO,CAAC,OAAO,CAAC;IAC1B,IAAIqF,UAAU,EAAE;MAAA,IAAAY,qBAAA;MACd,CAAAA,qBAAA,GAAAJ,oBAAoB,YAAAI,qBAAA,GAApBJ,oBAAoB,GAAK,IAAID,KAAK,CAACM,WAAW,CAAC;QAC7CC,OAAO,EAAE,IAAI;QACbC,KAAK,EAAE;MACT,CAAC,CAAC;MACF,OAAOP,oBAAoB;IAC7B;IACA,OAAOD,KAAK;EACd,CAAC;AACH","ignoreList":[]} \ No newline at end of file diff --git a/node_modules/@babel/highlight/node_modules/ansi-styles/index.js b/node_modules/@babel/highlight/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..90a871c --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/ansi-styles/index.js @@ -0,0 +1,165 @@ +'use strict'; +const colorConvert = require('color-convert'); + +const wrapAnsi16 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => function () { + const rgb = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + + // Bright color + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Fix humans + styles.color.grey = styles.color.gray; + + for (const groupName of Object.keys(styles)) { + const group = styles[groupName]; + + for (const styleName of Object.keys(group)) { + const style = group[styleName]; + + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + } + + const ansi2ansi = n => n; + const rgb2rgb = (r, g, b) => [r, g, b]; + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = { + ansi: wrapAnsi16(ansi2ansi, 0) + }; + styles.color.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 0) + }; + styles.color.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 0) + }; + + styles.bgColor.ansi = { + ansi: wrapAnsi16(ansi2ansi, 10) + }; + styles.bgColor.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 10) + }; + styles.bgColor.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 10) + }; + + for (let key of Object.keys(colorConvert)) { + if (typeof colorConvert[key] !== 'object') { + continue; + } + + const suite = colorConvert[key]; + + if (key === 'ansi16') { + key = 'ansi'; + } + + if ('ansi16' in suite) { + styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0); + styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10); + } + + if ('ansi256' in suite) { + styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0); + styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10); + } + + if ('rgb' in suite) { + styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0); + styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10); + } + } + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/@babel/highlight/node_modules/ansi-styles/license b/node_modules/@babel/highlight/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/highlight/node_modules/ansi-styles/package.json b/node_modules/@babel/highlight/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..65edb48 --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "3.2.1", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^1.9.0" + }, + "devDependencies": { + "ava": "*", + "babel-polyfill": "^6.23.0", + "svg-term-cli": "^2.1.1", + "xo": "*" + }, + "ava": { + "require": "babel-polyfill" + } +} diff --git a/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md b/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3158e2d --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md @@ -0,0 +1,147 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + + +## Install + +``` +$ npm install ansi-styles +``` + + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#ABCDEF') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/@babel/highlight/node_modules/chalk/index.js b/node_modules/@babel/highlight/node_modules/chalk/index.js new file mode 100644 index 0000000..1cc5fa8 --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/chalk/index.js @@ -0,0 +1,228 @@ +'use strict'; +const escapeStringRegexp = require('escape-string-regexp'); +const ansiStyles = require('ansi-styles'); +const stdoutColor = require('supports-color').stdout; + +const template = require('./templates.js'); + +const isSimpleWindowsTerm = process.platform === 'win32' && !(process.env.TERM || '').toLowerCase().startsWith('xterm'); + +// `supportsColor.level` → `ansiStyles.color[name]` mapping +const levelMapping = ['ansi', 'ansi', 'ansi256', 'ansi16m']; + +// `color-convert` models to exclude from the Chalk API due to conflicts and such +const skipModels = new Set(['gray']); + +const styles = Object.create(null); + +function applyOptions(obj, options) { + options = options || {}; + + // Detect level if not set manually + const scLevel = stdoutColor ? stdoutColor.level : 0; + obj.level = options.level === undefined ? scLevel : options.level; + obj.enabled = 'enabled' in options ? options.enabled : obj.level > 0; +} + +function Chalk(options) { + // We check for this.template here since calling `chalk.constructor()` + // by itself will have a `this` of a previously constructed chalk object + if (!this || !(this instanceof Chalk) || this.template) { + const chalk = {}; + applyOptions(chalk, options); + + chalk.template = function () { + const args = [].slice.call(arguments); + return chalkTag.apply(null, [chalk.template].concat(args)); + }; + + Object.setPrototypeOf(chalk, Chalk.prototype); + Object.setPrototypeOf(chalk.template, chalk); + + chalk.template.constructor = Chalk; + + return chalk.template; + } + + applyOptions(this, options); +} + +// Use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001B[94m'; +} + +for (const key of Object.keys(ansiStyles)) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + styles[key] = { + get() { + const codes = ansiStyles[key]; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key); + } + }; +} + +styles.visible = { + get() { + return build.call(this, this._styles || [], true, 'visible'); + } +}; + +ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g'); +for (const model of Object.keys(ansiStyles.color.ansi)) { + if (skipModels.has(model)) { + continue; + } + + styles[model] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.color.close, + closeRe: ansiStyles.color.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g'); +for (const model of Object.keys(ansiStyles.bgColor.ansi)) { + if (skipModels.has(model)) { + continue; + } + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.bgColor.close, + closeRe: ansiStyles.bgColor.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +const proto = Object.defineProperties(() => {}, styles); + +function build(_styles, _empty, key) { + const builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder._empty = _empty; + + const self = this; + + Object.defineProperty(builder, 'level', { + enumerable: true, + get() { + return self.level; + }, + set(level) { + self.level = level; + } + }); + + Object.defineProperty(builder, 'enabled', { + enumerable: true, + get() { + return self.enabled; + }, + set(enabled) { + self.enabled = enabled; + } + }); + + // See below for fix regarding invisible grey/dim combination on Windows + builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey'; + + // `__proto__` is used because we must return a function, but there is + // no way to create a function with a different prototype + builder.__proto__ = proto; // eslint-disable-line no-proto + + return builder; +} + +function applyStyle() { + // Support varags, but simply cast to string in case there's only one arg + const args = arguments; + const argsLen = args.length; + let str = String(arguments[0]); + + if (argsLen === 0) { + return ''; + } + + if (argsLen > 1) { + // Don't slice `arguments`, it prevents V8 optimizations + for (let a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || this.level <= 0 || !str) { + return this._empty ? '' : str; + } + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + const originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && this.hasGrey) { + ansiStyles.dim.open = ''; + } + + for (const code of this._styles.slice().reverse()) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + + // Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS + // https://github.com/chalk/chalk/pull/92 + str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`); + } + + // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue + ansiStyles.dim.open = originalDim; + + return str; +} + +function chalkTag(chalk, strings) { + if (!Array.isArray(strings)) { + // If chalk() was called by itself or with a string, + // return the string itself as a string. + return [].slice.call(arguments, 1).join(' '); + } + + const args = [].slice.call(arguments, 2); + const parts = [strings.raw[0]]; + + for (let i = 1; i < strings.length; i++) { + parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&')); + parts.push(String(strings.raw[i])); + } + + return template(chalk, parts.join('')); +} + +Object.defineProperties(Chalk.prototype, styles); + +module.exports = Chalk(); // eslint-disable-line new-cap +module.exports.supportsColor = stdoutColor; +module.exports.default = module.exports; // For TypeScript diff --git a/node_modules/@babel/highlight/node_modules/chalk/index.js.flow b/node_modules/@babel/highlight/node_modules/chalk/index.js.flow new file mode 100644 index 0000000..622caaa --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/chalk/index.js.flow @@ -0,0 +1,93 @@ +// @flow strict + +type TemplateStringsArray = $ReadOnlyArray; + +export type Level = $Values<{ + None: 0, + Basic: 1, + Ansi256: 2, + TrueColor: 3 +}>; + +export type ChalkOptions = {| + enabled?: boolean, + level?: Level +|}; + +export type ColorSupport = {| + level: Level, + hasBasic: boolean, + has256: boolean, + has16m: boolean +|}; + +export interface Chalk { + (...text: string[]): string, + (text: TemplateStringsArray, ...placeholders: string[]): string, + constructor(options?: ChalkOptions): Chalk, + enabled: boolean, + level: Level, + rgb(r: number, g: number, b: number): Chalk, + hsl(h: number, s: number, l: number): Chalk, + hsv(h: number, s: number, v: number): Chalk, + hwb(h: number, w: number, b: number): Chalk, + bgHex(color: string): Chalk, + bgKeyword(color: string): Chalk, + bgRgb(r: number, g: number, b: number): Chalk, + bgHsl(h: number, s: number, l: number): Chalk, + bgHsv(h: number, s: number, v: number): Chalk, + bgHwb(h: number, w: number, b: number): Chalk, + hex(color: string): Chalk, + keyword(color: string): Chalk, + + +reset: Chalk, + +bold: Chalk, + +dim: Chalk, + +italic: Chalk, + +underline: Chalk, + +inverse: Chalk, + +hidden: Chalk, + +strikethrough: Chalk, + + +visible: Chalk, + + +black: Chalk, + +red: Chalk, + +green: Chalk, + +yellow: Chalk, + +blue: Chalk, + +magenta: Chalk, + +cyan: Chalk, + +white: Chalk, + +gray: Chalk, + +grey: Chalk, + +blackBright: Chalk, + +redBright: Chalk, + +greenBright: Chalk, + +yellowBright: Chalk, + +blueBright: Chalk, + +magentaBright: Chalk, + +cyanBright: Chalk, + +whiteBright: Chalk, + + +bgBlack: Chalk, + +bgRed: Chalk, + +bgGreen: Chalk, + +bgYellow: Chalk, + +bgBlue: Chalk, + +bgMagenta: Chalk, + +bgCyan: Chalk, + +bgWhite: Chalk, + +bgBlackBright: Chalk, + +bgRedBright: Chalk, + +bgGreenBright: Chalk, + +bgYellowBright: Chalk, + +bgBlueBright: Chalk, + +bgMagentaBright: Chalk, + +bgCyanBright: Chalk, + +bgWhiteBrigh: Chalk, + + supportsColor: ColorSupport +}; + +declare module.exports: Chalk; diff --git a/node_modules/@babel/highlight/node_modules/chalk/license b/node_modules/@babel/highlight/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/highlight/node_modules/chalk/package.json b/node_modules/@babel/highlight/node_modules/chalk/package.json new file mode 100644 index 0000000..bc32468 --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/chalk/package.json @@ -0,0 +1,71 @@ +{ + "name": "chalk", + "version": "2.4.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && tsc --project types && flow --max-warnings=0 && nyc ava", + "bench": "matcha benchmark.js", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "files": [ + "index.js", + "templates.js", + "types/index.d.ts", + "index.js.flow" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "devDependencies": { + "ava": "*", + "coveralls": "^3.0.0", + "execa": "^0.9.0", + "flow-bin": "^0.68.0", + "import-fresh": "^2.0.0", + "matcha": "^0.7.0", + "nyc": "^11.0.2", + "resolve-from": "^4.0.0", + "typescript": "^2.5.3", + "xo": "*" + }, + "types": "types/index.d.ts", + "xo": { + "envs": [ + "node", + "mocha" + ], + "ignores": [ + "test/_flow.js" + ] + } +} diff --git a/node_modules/@babel/highlight/node_modules/chalk/readme.md b/node_modules/@babel/highlight/node_modules/chalk/readme.md new file mode 100644 index 0000000..d298e2c --- /dev/null +++ b/node_modules/@babel/highlight/node_modules/chalk/readme.md @@ -0,0 +1,314 @@ +

+
+
+ Chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) [![Mentioned in Awesome Node.js](https://awesome.re/mentioned-badge.svg)](https://github.com/sindresorhus/awesome-nodejs) + +### [See what's new in Chalk 2](https://github.com/chalk/chalk/releases/tag/v2.0.0) + + + + +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~23,000 packages](https://www.npmjs.com/browse/depended/chalk) as of December 31, 2017 + + +## Install + +```console +$ npm install chalk +``` + + + + + + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + + +## API + +### chalk.`' + * ) + * document.type //=> 'document' + * document.nodes.length //=> 2 + * ``` + */ +declare class Document_ extends Container { + nodes: Root[] + parent: undefined + type: 'document' + + constructor(defaults?: Document.DocumentProps) + + assign(overrides: Document.DocumentProps | object): this + clone(overrides?: Partial): Document + cloneAfter(overrides?: Partial): Document + cloneBefore(overrides?: Partial): Document + + /** + * Returns a `Result` instance representing the document’s CSS roots. + * + * ```js + * const root1 = postcss.parse(css1, { from: 'a.css' }) + * const root2 = postcss.parse(css2, { from: 'b.css' }) + * const document = postcss.document() + * document.append(root1) + * document.append(root2) + * const result = document.toResult({ to: 'all.css', map: true }) + * ``` + * + * @param opts Options. + * @return Result with current document’s CSS. + */ + toResult(options?: ProcessOptions): Result +} + +declare class Document extends Document_ {} + +export = Document diff --git a/node_modules/postcss/lib/document.js b/node_modules/postcss/lib/document.js new file mode 100644 index 0000000..4468991 --- /dev/null +++ b/node_modules/postcss/lib/document.js @@ -0,0 +1,33 @@ +'use strict' + +let Container = require('./container') + +let LazyResult, Processor + +class Document extends Container { + constructor(defaults) { + // type needs to be passed to super, otherwise child roots won't be normalized correctly + super({ type: 'document', ...defaults }) + + if (!this.nodes) { + this.nodes = [] + } + } + + toResult(opts = {}) { + let lazy = new LazyResult(new Processor(), this, opts) + + return lazy.stringify() + } +} + +Document.registerLazyResult = dependant => { + LazyResult = dependant +} + +Document.registerProcessor = dependant => { + Processor = dependant +} + +module.exports = Document +Document.default = Document diff --git a/node_modules/postcss/lib/fromJSON.d.ts b/node_modules/postcss/lib/fromJSON.d.ts new file mode 100644 index 0000000..e1deedb --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.d.ts @@ -0,0 +1,9 @@ +import { JSONHydrator } from './postcss.js' + +interface FromJSON extends JSONHydrator { + default: FromJSON +} + +declare const fromJSON: FromJSON + +export = fromJSON diff --git a/node_modules/postcss/lib/fromJSON.js b/node_modules/postcss/lib/fromJSON.js new file mode 100644 index 0000000..09f2b89 --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.js @@ -0,0 +1,54 @@ +'use strict' + +let Declaration = require('./declaration') +let PreviousMap = require('./previous-map') +let Comment = require('./comment') +let AtRule = require('./at-rule') +let Input = require('./input') +let Root = require('./root') +let Rule = require('./rule') + +function fromJSON(json, inputs) { + if (Array.isArray(json)) return json.map(n => fromJSON(n)) + + let { inputs: ownInputs, ...defaults } = json + if (ownInputs) { + inputs = [] + for (let input of ownInputs) { + let inputHydrated = { ...input, __proto__: Input.prototype } + if (inputHydrated.map) { + inputHydrated.map = { + ...inputHydrated.map, + __proto__: PreviousMap.prototype + } + } + inputs.push(inputHydrated) + } + } + if (defaults.nodes) { + defaults.nodes = json.nodes.map(n => fromJSON(n, inputs)) + } + if (defaults.source) { + let { inputId, ...source } = defaults.source + defaults.source = source + if (inputId != null) { + defaults.source.input = inputs[inputId] + } + } + if (defaults.type === 'root') { + return new Root(defaults) + } else if (defaults.type === 'decl') { + return new Declaration(defaults) + } else if (defaults.type === 'rule') { + return new Rule(defaults) + } else if (defaults.type === 'comment') { + return new Comment(defaults) + } else if (defaults.type === 'atrule') { + return new AtRule(defaults) + } else { + throw new Error('Unknown node type: ' + json.type) + } +} + +module.exports = fromJSON +fromJSON.default = fromJSON diff --git a/node_modules/postcss/lib/input.d.ts b/node_modules/postcss/lib/input.d.ts new file mode 100644 index 0000000..c718bd1 --- /dev/null +++ b/node_modules/postcss/lib/input.d.ts @@ -0,0 +1,194 @@ +import { CssSyntaxError, ProcessOptions } from './postcss.js' +import PreviousMap from './previous-map.js' + +declare namespace Input { + export interface FilePosition { + /** + * Column of inclusive start position in source file. + */ + column: number + + /** + * Column of exclusive end position in source file. + */ + endColumn?: number + + /** + * Line of exclusive end position in source file. + */ + endLine?: number + + /** + * Absolute path to the source file. + */ + file?: string + + /** + * Line of inclusive start position in source file. + */ + line: number + + /** + * Source code. + */ + source?: string + + /** + * URL for the source file. + */ + url: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Input_ as default } +} + +/** + * Represents the source CSS. + * + * ```js + * const root = postcss.parse(css, { from: file }) + * const input = root.source.input + * ``` + */ +declare class Input_ { + /** + * Input CSS source. + * + * ```js + * const input = postcss.parse('a{}', { from: file }).input + * input.css //=> "a{}" + * ``` + */ + css: string + + /** + * The absolute path to the CSS source file defined + * with the `from` option. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.file //=> '/home/ai/a.css' + * ``` + */ + file?: string + + /** + * The flag to indicate whether or not the source code has Unicode BOM. + */ + hasBOM: boolean + + /** + * The unique ID of the CSS source. It will be created if `from` option + * is not provided (because PostCSS does not know the file path). + * + * ```js + * const root = postcss.parse(css) + * root.source.input.file //=> undefined + * root.source.input.id //=> "" + * ``` + */ + id?: string + + /** + * The input source map passed from a compilation step before PostCSS + * (for example, from Sass compiler). + * + * ```js + * root.source.input.map.consumer().sources //=> ['a.sass'] + * ``` + */ + map: PreviousMap + + /** + * @param css Input CSS source. + * @param opts Process options. + */ + constructor(css: string, opts?: ProcessOptions) + + error( + message: string, + start: + | { + column: number + line: number + } + | { + offset: number + }, + end: + | { + column: number + line: number + } + | { + offset: number + }, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + /** + * Returns `CssSyntaxError` with information about the error and its position. + */ + error( + message: string, + line: number, + column: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + error( + message: string, + offset: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + /** + * Converts source offset to line and column. + * + * @param offset Source offset. + */ + fromOffset(offset: number): { col: number; line: number } | null + /** + * Reads the input source map and returns a symbol position + * in the input source (e.g., in a Sass file that was compiled + * to CSS before being passed to PostCSS). Optionally takes an + * end position, exclusive. + * + * ```js + * root.source.input.origin(1, 1) //=> { file: 'a.css', line: 3, column: 1 } + * root.source.input.origin(1, 1, 1, 4) + * //=> { file: 'a.css', line: 3, column: 1, endLine: 3, endColumn: 4 } + * ``` + * + * @param line Line for inclusive start position in input CSS. + * @param column Column for inclusive start position in input CSS. + * @param endLine Line for exclusive end position in input CSS. + * @param endColumn Column for exclusive end position in input CSS. + * + * @return Position in input source. + */ + origin( + line: number, + column: number, + endLine?: number, + endColumn?: number + ): false | Input.FilePosition + /** + * The CSS source identifier. Contains `Input#file` if the user + * set the `from` option, or `Input#id` if they did not. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.from //=> "/home/ai/a.css" + * + * const root = postcss.parse(css) + * root.source.input.from //=> "" + * ``` + */ + get from(): string +} + +declare class Input extends Input_ {} + +export = Input diff --git a/node_modules/postcss/lib/input.js b/node_modules/postcss/lib/input.js new file mode 100644 index 0000000..4b5ee5e --- /dev/null +++ b/node_modules/postcss/lib/input.js @@ -0,0 +1,248 @@ +'use strict' + +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { fileURLToPath, pathToFileURL } = require('url') +let { isAbsolute, resolve } = require('path') +let { nanoid } = require('nanoid/non-secure') + +let terminalHighlight = require('./terminal-highlight') +let CssSyntaxError = require('./css-syntax-error') +let PreviousMap = require('./previous-map') + +let fromOffsetCache = Symbol('fromOffsetCache') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(resolve && isAbsolute) + +class Input { + constructor(css, opts = {}) { + if ( + css === null || + typeof css === 'undefined' || + (typeof css === 'object' && !css.toString) + ) { + throw new Error(`PostCSS received ${css} instead of CSS string`) + } + + this.css = css.toString() + + if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') { + this.hasBOM = true + this.css = this.css.slice(1) + } else { + this.hasBOM = false + } + + if (opts.from) { + if ( + !pathAvailable || + /^\w+:\/\//.test(opts.from) || + isAbsolute(opts.from) + ) { + this.file = opts.from + } else { + this.file = resolve(opts.from) + } + } + + if (pathAvailable && sourceMapAvailable) { + let map = new PreviousMap(this.css, opts) + if (map.text) { + this.map = map + let file = map.consumer().file + if (!this.file && file) this.file = this.mapResolve(file) + } + } + + if (!this.file) { + this.id = '' + } + if (this.map) this.map.file = this.from + } + + error(message, line, column, opts = {}) { + let result, endLine, endColumn + + if (line && typeof line === 'object') { + let start = line + let end = column + if (typeof start.offset === 'number') { + let pos = this.fromOffset(start.offset) + line = pos.line + column = pos.col + } else { + line = start.line + column = start.column + } + if (typeof end.offset === 'number') { + let pos = this.fromOffset(end.offset) + endLine = pos.line + endColumn = pos.col + } else { + endLine = end.line + endColumn = end.column + } + } else if (!column) { + let pos = this.fromOffset(line) + line = pos.line + column = pos.col + } + + let origin = this.origin(line, column, endLine, endColumn) + if (origin) { + result = new CssSyntaxError( + message, + origin.endLine === undefined + ? origin.line + : { column: origin.column, line: origin.line }, + origin.endLine === undefined + ? origin.column + : { column: origin.endColumn, line: origin.endLine }, + origin.source, + origin.file, + opts.plugin + ) + } else { + result = new CssSyntaxError( + message, + endLine === undefined ? line : { column, line }, + endLine === undefined ? column : { column: endColumn, line: endLine }, + this.css, + this.file, + opts.plugin + ) + } + + result.input = { column, endColumn, endLine, line, source: this.css } + if (this.file) { + if (pathToFileURL) { + result.input.url = pathToFileURL(this.file).toString() + } + result.input.file = this.file + } + + return result + } + + fromOffset(offset) { + let lastLine, lineToIndex + if (!this[fromOffsetCache]) { + let lines = this.css.split('\n') + lineToIndex = new Array(lines.length) + let prevIndex = 0 + + for (let i = 0, l = lines.length; i < l; i++) { + lineToIndex[i] = prevIndex + prevIndex += lines[i].length + 1 + } + + this[fromOffsetCache] = lineToIndex + } else { + lineToIndex = this[fromOffsetCache] + } + lastLine = lineToIndex[lineToIndex.length - 1] + + let min = 0 + if (offset >= lastLine) { + min = lineToIndex.length - 1 + } else { + let max = lineToIndex.length - 2 + let mid + while (min < max) { + mid = min + ((max - min) >> 1) + if (offset < lineToIndex[mid]) { + max = mid - 1 + } else if (offset >= lineToIndex[mid + 1]) { + min = mid + 1 + } else { + min = mid + break + } + } + } + return { + col: offset - lineToIndex[min] + 1, + line: min + 1 + } + } + + mapResolve(file) { + if (/^\w+:\/\//.test(file)) { + return file + } + return resolve(this.map.consumer().sourceRoot || this.map.root || '.', file) + } + + origin(line, column, endLine, endColumn) { + if (!this.map) return false + let consumer = this.map.consumer() + + let from = consumer.originalPositionFor({ column, line }) + if (!from.source) return false + + let to + if (typeof endLine === 'number') { + to = consumer.originalPositionFor({ column: endColumn, line: endLine }) + } + + let fromUrl + + if (isAbsolute(from.source)) { + fromUrl = pathToFileURL(from.source) + } else { + fromUrl = new URL( + from.source, + this.map.consumer().sourceRoot || pathToFileURL(this.map.mapFile) + ) + } + + let result = { + column: from.column, + endColumn: to && to.column, + endLine: to && to.line, + line: from.line, + url: fromUrl.toString() + } + + if (fromUrl.protocol === 'file:') { + if (fileURLToPath) { + result.file = fileURLToPath(fromUrl) + } else { + /* c8 ignore next 2 */ + throw new Error(`file: protocol is not available in this PostCSS build`) + } + } + + let source = consumer.sourceContentFor(from.source) + if (source) result.source = source + + return result + } + + toJSON() { + let json = {} + for (let name of ['hasBOM', 'css', 'file', 'id']) { + if (this[name] != null) { + json[name] = this[name] + } + } + if (this.map) { + json.map = { ...this.map } + if (json.map.consumerCache) { + json.map.consumerCache = undefined + } + } + return json + } + + get from() { + return this.file || this.id + } +} + +module.exports = Input +Input.default = Input + +if (terminalHighlight && terminalHighlight.registerInput) { + terminalHighlight.registerInput(Input) +} diff --git a/node_modules/postcss/lib/lazy-result.d.ts b/node_modules/postcss/lib/lazy-result.d.ts new file mode 100644 index 0000000..dd291aa --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.d.ts @@ -0,0 +1,190 @@ +import Document from './document.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace LazyResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { LazyResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * + * A `LazyResult` instance is returned by `Processor#process`. + * + * ```js + * const lazy = postcss([autoprefixer]).process(css) + * ``` + */ +declare class LazyResult_ + implements PromiseLike> +{ + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onRejected for each error thrown in any plugin. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).then(result => { + * console.log(result.css) + * }).catch(error => { + * console.error(error) + * }) + * ``` + */ + catch: Promise>['catch'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onFinally on any error or when all plugins will finish work. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).finally(() => { + * console.log('processing ended') + * }) + * ``` + */ + finally: Promise>['finally'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls `onFulfilled` with a Result instance. If a plugin throws + * an error, the `onRejected` callback will be executed. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css, { from: cssPath }).then(result => { + * console.log(result.css) + * }) + * ``` + */ + then: Promise>['then'] + + /** + * @param processor Processor used for this transformation. + * @param css CSS to parse and transform. + * @param opts Options from the `Processor#process` or `Root#toResult`. + */ + constructor(processor: Processor, css: string, opts: ResultOptions) + + /** + * Run plugin in async way and return `Result`. + * + * @return Result with output content. + */ + async(): Promise> + + /** + * Run plugin in sync way and return `Result`. + * + * @return Result with output content. + */ + sync(): Result + + /** + * Alias for the `LazyResult#css` property. + * + * ```js + * lazy + '' === lazy.css + * ``` + * + * @return Output CSS. + */ + toString(): string + + /** + * Processes input CSS through synchronous plugins + * and calls `Result#warnings`. + * + * @return Warnings from plugins. + */ + warnings(): Warning[] + + /** + * An alias for the `css` property. Use it with syntaxes + * that generate non-CSS output. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get content(): string + + /** + * Processes input CSS through synchronous plugins, converts `Root` + * to a CSS string and returns `Result#css`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get css(): string + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#map`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get map(): SourceMap + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#messages`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get messages(): Message[] + + /** + * Options from the `Processor#process` call. + */ + get opts(): ResultOptions + + /** + * Returns a `Processor` instance, which will be used + * for CSS transformations. + */ + get processor(): Processor + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#root`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get root(): RootNode + + /** + * Returns the default string description of an object. + * Required to implement the Promise interface. + */ + get [Symbol.toStringTag](): string +} + +declare class LazyResult< + RootNode = Document | Root +> extends LazyResult_ {} + +export = LazyResult diff --git a/node_modules/postcss/lib/lazy-result.js b/node_modules/postcss/lib/lazy-result.js new file mode 100644 index 0000000..126f40c --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.js @@ -0,0 +1,550 @@ +'use strict' + +let { isClean, my } = require('./symbols') +let MapGenerator = require('./map-generator') +let stringify = require('./stringify') +let Container = require('./container') +let Document = require('./document') +let warnOnce = require('./warn-once') +let Result = require('./result') +let parse = require('./parse') +let Root = require('./root') + +const TYPE_TO_CLASS_NAME = { + atrule: 'AtRule', + comment: 'Comment', + decl: 'Declaration', + document: 'Document', + root: 'Root', + rule: 'Rule' +} + +const PLUGIN_PROPS = { + AtRule: true, + AtRuleExit: true, + Comment: true, + CommentExit: true, + Declaration: true, + DeclarationExit: true, + Document: true, + DocumentExit: true, + Once: true, + OnceExit: true, + postcssPlugin: true, + prepare: true, + Root: true, + RootExit: true, + Rule: true, + RuleExit: true +} + +const NOT_VISITORS = { + Once: true, + postcssPlugin: true, + prepare: true +} + +const CHILDREN = 0 + +function isPromise(obj) { + return typeof obj === 'object' && typeof obj.then === 'function' +} + +function getEvents(node) { + let key = false + let type = TYPE_TO_CLASS_NAME[node.type] + if (node.type === 'decl') { + key = node.prop.toLowerCase() + } else if (node.type === 'atrule') { + key = node.name.toLowerCase() + } + + if (key && node.append) { + return [ + type, + type + '-' + key, + CHILDREN, + type + 'Exit', + type + 'Exit-' + key + ] + } else if (key) { + return [type, type + '-' + key, type + 'Exit', type + 'Exit-' + key] + } else if (node.append) { + return [type, CHILDREN, type + 'Exit'] + } else { + return [type, type + 'Exit'] + } +} + +function toStack(node) { + let events + if (node.type === 'document') { + events = ['Document', CHILDREN, 'DocumentExit'] + } else if (node.type === 'root') { + events = ['Root', CHILDREN, 'RootExit'] + } else { + events = getEvents(node) + } + + return { + eventIndex: 0, + events, + iterator: 0, + node, + visitorIndex: 0, + visitors: [] + } +} + +function cleanMarks(node) { + node[isClean] = false + if (node.nodes) node.nodes.forEach(i => cleanMarks(i)) + return node +} + +let postcss = {} + +class LazyResult { + constructor(processor, css, opts) { + this.stringified = false + this.processed = false + + let root + if ( + typeof css === 'object' && + css !== null && + (css.type === 'root' || css.type === 'document') + ) { + root = cleanMarks(css) + } else if (css instanceof LazyResult || css instanceof Result) { + root = cleanMarks(css.root) + if (css.map) { + if (typeof opts.map === 'undefined') opts.map = {} + if (!opts.map.inline) opts.map.inline = false + opts.map.prev = css.map + } + } else { + let parser = parse + if (opts.syntax) parser = opts.syntax.parse + if (opts.parser) parser = opts.parser + if (parser.parse) parser = parser.parse + + try { + root = parser(css, opts) + } catch (error) { + this.processed = true + this.error = error + } + + if (root && !root[my]) { + /* c8 ignore next 2 */ + Container.rebuild(root) + } + } + + this.result = new Result(processor, root, opts) + this.helpers = { ...postcss, postcss, result: this.result } + this.plugins = this.processor.plugins.map(plugin => { + if (typeof plugin === 'object' && plugin.prepare) { + return { ...plugin, ...plugin.prepare(this.result) } + } else { + return plugin + } + }) + } + + async() { + if (this.error) return Promise.reject(this.error) + if (this.processed) return Promise.resolve(this.result) + if (!this.processing) { + this.processing = this.runAsync() + } + return this.processing + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + getAsyncError() { + throw new Error('Use process(css).then(cb) to work with async plugins') + } + + handleError(error, node) { + let plugin = this.result.lastPlugin + try { + if (node) node.addToError(error) + this.error = error + if (error.name === 'CssSyntaxError' && !error.plugin) { + error.plugin = plugin.postcssPlugin + error.setMessage() + } else if (plugin.postcssVersion) { + if (process.env.NODE_ENV !== 'production') { + let pluginName = plugin.postcssPlugin + let pluginVer = plugin.postcssVersion + let runtimeVer = this.result.processor.version + let a = pluginVer.split('.') + let b = runtimeVer.split('.') + + if (a[0] !== b[0] || parseInt(a[1]) > parseInt(b[1])) { + // eslint-disable-next-line no-console + console.error( + 'Unknown error from PostCSS plugin. Your current PostCSS ' + + 'version is ' + + runtimeVer + + ', but ' + + pluginName + + ' uses ' + + pluginVer + + '. Perhaps this is the source of the error below.' + ) + } + } + } + } catch (err) { + /* c8 ignore next 3 */ + // eslint-disable-next-line no-console + if (console && console.error) console.error(err) + } + return error + } + + prepareVisitors() { + this.listeners = {} + let add = (plugin, type, cb) => { + if (!this.listeners[type]) this.listeners[type] = [] + this.listeners[type].push([plugin, cb]) + } + for (let plugin of this.plugins) { + if (typeof plugin === 'object') { + for (let event in plugin) { + if (!PLUGIN_PROPS[event] && /^[A-Z]/.test(event)) { + throw new Error( + `Unknown event ${event} in ${plugin.postcssPlugin}. ` + + `Try to update PostCSS (${this.processor.version} now).` + ) + } + if (!NOT_VISITORS[event]) { + if (typeof plugin[event] === 'object') { + for (let filter in plugin[event]) { + if (filter === '*') { + add(plugin, event, plugin[event][filter]) + } else { + add( + plugin, + event + '-' + filter.toLowerCase(), + plugin[event][filter] + ) + } + } + } else if (typeof plugin[event] === 'function') { + add(plugin, event, plugin[event]) + } + } + } + } + } + this.hasListener = Object.keys(this.listeners).length > 0 + } + + async runAsync() { + this.plugin = 0 + for (let i = 0; i < this.plugins.length; i++) { + let plugin = this.plugins[i] + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + try { + await promise + } catch (error) { + throw this.handleError(error) + } + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + let stack = [toStack(root)] + while (stack.length > 0) { + let promise = this.visitTick(stack) + if (isPromise(promise)) { + try { + await promise + } catch (e) { + let node = stack[stack.length - 1].node + throw this.handleError(e, node) + } + } + } + } + + if (this.listeners.OnceExit) { + for (let [plugin, visitor] of this.listeners.OnceExit) { + this.result.lastPlugin = plugin + try { + if (root.type === 'document') { + let roots = root.nodes.map(subRoot => + visitor(subRoot, this.helpers) + ) + + await Promise.all(roots) + } else { + await visitor(root, this.helpers) + } + } catch (e) { + throw this.handleError(e) + } + } + } + } + + this.processed = true + return this.stringify() + } + + runOnRoot(plugin) { + this.result.lastPlugin = plugin + try { + if (typeof plugin === 'object' && plugin.Once) { + if (this.result.root.type === 'document') { + let roots = this.result.root.nodes.map(root => + plugin.Once(root, this.helpers) + ) + + if (isPromise(roots[0])) { + return Promise.all(roots) + } + + return roots + } + + return plugin.Once(this.result.root, this.helpers) + } else if (typeof plugin === 'function') { + return plugin(this.result.root, this.result) + } + } catch (error) { + throw this.handleError(error) + } + } + + stringify() { + if (this.error) throw this.error + if (this.stringified) return this.result + this.stringified = true + + this.sync() + + let opts = this.result.opts + let str = stringify + if (opts.syntax) str = opts.syntax.stringify + if (opts.stringifier) str = opts.stringifier + if (str.stringify) str = str.stringify + + let map = new MapGenerator(str, this.result.root, this.result.opts) + let data = map.generate() + this.result.css = data[0] + this.result.map = data[1] + + return this.result + } + + sync() { + if (this.error) throw this.error + if (this.processed) return this.result + this.processed = true + + if (this.processing) { + throw this.getAsyncError() + } + + for (let plugin of this.plugins) { + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + this.walkSync(root) + } + if (this.listeners.OnceExit) { + if (root.type === 'document') { + for (let subRoot of root.nodes) { + this.visitSync(this.listeners.OnceExit, subRoot) + } + } else { + this.visitSync(this.listeners.OnceExit, root) + } + } + } + + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this.opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this.css + } + + visitSync(visitors, node) { + for (let [plugin, visitor] of visitors) { + this.result.lastPlugin = plugin + let promise + try { + promise = visitor(node, this.helpers) + } catch (e) { + throw this.handleError(e, node.proxyOf) + } + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + return true + } + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + } + + visitTick(stack) { + let visit = stack[stack.length - 1] + let { node, visitors } = visit + + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + stack.pop() + return + } + + if (visitors.length > 0 && visit.visitorIndex < visitors.length) { + let [plugin, visitor] = visitors[visit.visitorIndex] + visit.visitorIndex += 1 + if (visit.visitorIndex === visitors.length) { + visit.visitors = [] + visit.visitorIndex = 0 + } + this.result.lastPlugin = plugin + try { + return visitor(node.toProxy(), this.helpers) + } catch (e) { + throw this.handleError(e, node) + } + } + + if (visit.iterator !== 0) { + let iterator = visit.iterator + let child + while ((child = node.nodes[node.indexes[iterator]])) { + node.indexes[iterator] += 1 + if (!child[isClean]) { + child[isClean] = true + stack.push(toStack(child)) + return + } + } + visit.iterator = 0 + delete node.indexes[iterator] + } + + let events = visit.events + while (visit.eventIndex < events.length) { + let event = events[visit.eventIndex] + visit.eventIndex += 1 + if (event === CHILDREN) { + if (node.nodes && node.nodes.length) { + node[isClean] = true + visit.iterator = node.getIterator() + } + return + } else if (this.listeners[event]) { + visit.visitors = this.listeners[event] + return + } + } + stack.pop() + } + + walkSync(node) { + node[isClean] = true + let events = getEvents(node) + for (let event of events) { + if (event === CHILDREN) { + if (node.nodes) { + node.each(child => { + if (!child[isClean]) this.walkSync(child) + }) + } + } else { + let visitors = this.listeners[event] + if (visitors) { + if (this.visitSync(visitors, node.toProxy())) return + } + } + } + } + + warnings() { + return this.sync().warnings() + } + + get content() { + return this.stringify().content + } + + get css() { + return this.stringify().css + } + + get map() { + return this.stringify().map + } + + get messages() { + return this.sync().messages + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + return this.sync().root + } + + get [Symbol.toStringTag]() { + return 'LazyResult' + } +} + +LazyResult.registerPostcss = dependant => { + postcss = dependant +} + +module.exports = LazyResult +LazyResult.default = LazyResult + +Root.registerLazyResult(LazyResult) +Document.registerLazyResult(LazyResult) diff --git a/node_modules/postcss/lib/list.d.ts b/node_modules/postcss/lib/list.d.ts new file mode 100644 index 0000000..1a74d74 --- /dev/null +++ b/node_modules/postcss/lib/list.d.ts @@ -0,0 +1,57 @@ +declare namespace list { + type List = { + /** + * Safely splits comma-separated values (such as those for `transition-*` + * and `background` properties). + * + * ```js + * Once (root, { list }) { + * list.comma('black, linear-gradient(white, black)') + * //=> ['black', 'linear-gradient(white, black)'] + * } + * ``` + * + * @param str Comma-separated values. + * @return Split values. + */ + comma(str: string): string[] + + default: List + + /** + * Safely splits space-separated values (such as those for `background`, + * `border-radius`, and other shorthand properties). + * + * ```js + * Once (root, { list }) { + * list.space('1px calc(10% + 1px)') //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param str Space-separated values. + * @return Split values. + */ + space(str: string): string[] + + /** + * Safely splits values. + * + * ```js + * Once (root, { list }) { + * list.split('1px calc(10% + 1px)', [' ', '\n', '\t']) //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param string separated values. + * @param separators array of separators. + * @param last boolean indicator. + * @return Split values. + */ + split(string: string, separators: string[], last: boolean): string[] + } +} + +// eslint-disable-next-line @typescript-eslint/no-redeclare +declare const list: list.List + +export = list diff --git a/node_modules/postcss/lib/list.js b/node_modules/postcss/lib/list.js new file mode 100644 index 0000000..1b31f98 --- /dev/null +++ b/node_modules/postcss/lib/list.js @@ -0,0 +1,58 @@ +'use strict' + +let list = { + comma(string) { + return list.split(string, [','], true) + }, + + space(string) { + let spaces = [' ', '\n', '\t'] + return list.split(string, spaces) + }, + + split(string, separators, last) { + let array = [] + let current = '' + let split = false + + let func = 0 + let inQuote = false + let prevQuote = '' + let escape = false + + for (let letter of string) { + if (escape) { + escape = false + } else if (letter === '\\') { + escape = true + } else if (inQuote) { + if (letter === prevQuote) { + inQuote = false + } + } else if (letter === '"' || letter === "'") { + inQuote = true + prevQuote = letter + } else if (letter === '(') { + func += 1 + } else if (letter === ')') { + if (func > 0) func -= 1 + } else if (func === 0) { + if (separators.includes(letter)) split = true + } + + if (split) { + if (current !== '') array.push(current.trim()) + current = '' + split = false + } else { + current += letter + } + } + + if (last || current !== '') array.push(current.trim()) + return array + } +} + +module.exports = list +list.default = list diff --git a/node_modules/postcss/lib/map-generator.js b/node_modules/postcss/lib/map-generator.js new file mode 100644 index 0000000..71b21ca --- /dev/null +++ b/node_modules/postcss/lib/map-generator.js @@ -0,0 +1,368 @@ +'use strict' + +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { dirname, relative, resolve, sep } = require('path') +let { pathToFileURL } = require('url') + +let Input = require('./input') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(dirname && resolve && relative && sep) + +class MapGenerator { + constructor(stringify, root, opts, cssString) { + this.stringify = stringify + this.mapOpts = opts.map || {} + this.root = root + this.opts = opts + this.css = cssString + this.originalCSS = cssString + this.usesFileUrls = !this.mapOpts.from && this.mapOpts.absolute + + this.memoizedFileURLs = new Map() + this.memoizedPaths = new Map() + this.memoizedURLs = new Map() + } + + addAnnotation() { + let content + + if (this.isInline()) { + content = + 'data:application/json;base64,' + this.toBase64(this.map.toString()) + } else if (typeof this.mapOpts.annotation === 'string') { + content = this.mapOpts.annotation + } else if (typeof this.mapOpts.annotation === 'function') { + content = this.mapOpts.annotation(this.opts.to, this.root) + } else { + content = this.outputFile() + '.map' + } + let eol = '\n' + if (this.css.includes('\r\n')) eol = '\r\n' + + this.css += eol + '/*# sourceMappingURL=' + content + ' */' + } + + applyPrevMaps() { + for (let prev of this.previous()) { + let from = this.toUrl(this.path(prev.file)) + let root = prev.root || dirname(prev.file) + let map + + if (this.mapOpts.sourcesContent === false) { + map = new SourceMapConsumer(prev.text) + if (map.sourcesContent) { + map.sourcesContent = null + } + } else { + map = prev.consumer() + } + + this.map.applySourceMap(map, from, this.toUrl(this.path(root))) + } + } + + clearAnnotation() { + if (this.mapOpts.annotation === false) return + + if (this.root) { + let node + for (let i = this.root.nodes.length - 1; i >= 0; i--) { + node = this.root.nodes[i] + if (node.type !== 'comment') continue + if (node.text.indexOf('# sourceMappingURL=') === 0) { + this.root.removeChild(i) + } + } + } else if (this.css) { + this.css = this.css.replace(/\n*?\/\*#[\S\s]*?\*\/$/gm, '') + } + } + + generate() { + this.clearAnnotation() + if (pathAvailable && sourceMapAvailable && this.isMap()) { + return this.generateMap() + } else { + let result = '' + this.stringify(this.root, i => { + result += i + }) + return [result] + } + } + + generateMap() { + if (this.root) { + this.generateString() + } else if (this.previous().length === 1) { + let prev = this.previous()[0].consumer() + prev.file = this.outputFile() + this.map = SourceMapGenerator.fromSourceMap(prev, { + ignoreInvalidMapping: true + }) + } else { + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + this.map.addMapping({ + generated: { column: 0, line: 1 }, + original: { column: 0, line: 1 }, + source: this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + }) + } + + if (this.isSourcesContent()) this.setSourcesContent() + if (this.root && this.previous().length > 0) this.applyPrevMaps() + if (this.isAnnotation()) this.addAnnotation() + + if (this.isInline()) { + return [this.css] + } else { + return [this.css, this.map] + } + } + + generateString() { + this.css = '' + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + + let line = 1 + let column = 1 + + let noSource = '' + let mapping = { + generated: { column: 0, line: 0 }, + original: { column: 0, line: 0 }, + source: '' + } + + let lines, last + this.stringify(this.root, (str, node, type) => { + this.css += str + + if (node && type !== 'end') { + mapping.generated.line = line + mapping.generated.column = column - 1 + if (node.source && node.source.start) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.start.line + mapping.original.column = node.source.start.column - 1 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + this.map.addMapping(mapping) + } + } + + lines = str.match(/\n/g) + if (lines) { + line += lines.length + last = str.lastIndexOf('\n') + column = str.length - last + } else { + column += str.length + } + + if (node && type !== 'start') { + let p = node.parent || { raws: {} } + let childless = + node.type === 'decl' || (node.type === 'atrule' && !node.nodes) + if (!childless || node !== p.last || p.raws.semicolon) { + if (node.source && node.source.end) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.end.line + mapping.original.column = node.source.end.column - 1 + mapping.generated.line = line + mapping.generated.column = column - 2 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + mapping.generated.line = line + mapping.generated.column = column - 1 + this.map.addMapping(mapping) + } + } + } + }) + } + + isAnnotation() { + if (this.isInline()) { + return true + } + if (typeof this.mapOpts.annotation !== 'undefined') { + return this.mapOpts.annotation + } + if (this.previous().length) { + return this.previous().some(i => i.annotation) + } + return true + } + + isInline() { + if (typeof this.mapOpts.inline !== 'undefined') { + return this.mapOpts.inline + } + + let annotation = this.mapOpts.annotation + if (typeof annotation !== 'undefined' && annotation !== true) { + return false + } + + if (this.previous().length) { + return this.previous().some(i => i.inline) + } + return true + } + + isMap() { + if (typeof this.opts.map !== 'undefined') { + return !!this.opts.map + } + return this.previous().length > 0 + } + + isSourcesContent() { + if (typeof this.mapOpts.sourcesContent !== 'undefined') { + return this.mapOpts.sourcesContent + } + if (this.previous().length) { + return this.previous().some(i => i.withContent()) + } + return true + } + + outputFile() { + if (this.opts.to) { + return this.path(this.opts.to) + } else if (this.opts.from) { + return this.path(this.opts.from) + } else { + return 'to.css' + } + } + + path(file) { + if (this.mapOpts.absolute) return file + if (file.charCodeAt(0) === 60 /* `<` */) return file + if (/^\w+:\/\//.test(file)) return file + let cached = this.memoizedPaths.get(file) + if (cached) return cached + + let from = this.opts.to ? dirname(this.opts.to) : '.' + + if (typeof this.mapOpts.annotation === 'string') { + from = dirname(resolve(from, this.mapOpts.annotation)) + } + + let path = relative(from, file) + this.memoizedPaths.set(file, path) + + return path + } + + previous() { + if (!this.previousMaps) { + this.previousMaps = [] + if (this.root) { + this.root.walk(node => { + if (node.source && node.source.input.map) { + let map = node.source.input.map + if (!this.previousMaps.includes(map)) { + this.previousMaps.push(map) + } + } + }) + } else { + let input = new Input(this.originalCSS, this.opts) + if (input.map) this.previousMaps.push(input.map) + } + } + + return this.previousMaps + } + + setSourcesContent() { + let already = {} + if (this.root) { + this.root.walk(node => { + if (node.source) { + let from = node.source.input.from + if (from && !already[from]) { + already[from] = true + let fromUrl = this.usesFileUrls + ? this.toFileUrl(from) + : this.toUrl(this.path(from)) + this.map.setSourceContent(fromUrl, node.source.input.css) + } + } + }) + } else if (this.css) { + let from = this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + this.map.setSourceContent(from, this.css) + } + } + + sourcePath(node) { + if (this.mapOpts.from) { + return this.toUrl(this.mapOpts.from) + } else if (this.usesFileUrls) { + return this.toFileUrl(node.source.input.from) + } else { + return this.toUrl(this.path(node.source.input.from)) + } + } + + toBase64(str) { + if (Buffer) { + return Buffer.from(str).toString('base64') + } else { + return window.btoa(unescape(encodeURIComponent(str))) + } + } + + toFileUrl(path) { + let cached = this.memoizedFileURLs.get(path) + if (cached) return cached + + if (pathToFileURL) { + let fileURL = pathToFileURL(path).toString() + this.memoizedFileURLs.set(path, fileURL) + + return fileURL + } else { + throw new Error( + '`map.absolute` option is not available in this PostCSS build' + ) + } + } + + toUrl(path) { + let cached = this.memoizedURLs.get(path) + if (cached) return cached + + if (sep === '\\') { + path = path.replace(/\\/g, '/') + } + + let url = encodeURI(path).replace(/[#?]/g, encodeURIComponent) + this.memoizedURLs.set(path, url) + + return url + } +} + +module.exports = MapGenerator diff --git a/node_modules/postcss/lib/no-work-result.d.ts b/node_modules/postcss/lib/no-work-result.d.ts new file mode 100644 index 0000000..8039076 --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.d.ts @@ -0,0 +1,46 @@ +import LazyResult from './lazy-result.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace NoWorkResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { NoWorkResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * This lazy result instance doesn't parse css unless `NoWorkResult#root` or `Result#root` + * are accessed. See the example below for details. + * A `NoWork` instance is returned by `Processor#process` ONLY when no plugins defined. + * + * ```js + * const noWorkResult = postcss().process(css) // No plugins are defined. + * // CSS is not parsed + * let root = noWorkResult.root // now css is parsed because we accessed the root + * ``` + */ +declare class NoWorkResult_ implements LazyResult { + catch: Promise>['catch'] + finally: Promise>['finally'] + then: Promise>['then'] + constructor(processor: Processor, css: string, opts: ResultOptions) + async(): Promise> + sync(): Result + toString(): string + warnings(): Warning[] + get content(): string + get css(): string + get map(): SourceMap + get messages(): Message[] + get opts(): ResultOptions + get processor(): Processor + get root(): Root + get [Symbol.toStringTag](): string +} + +declare class NoWorkResult extends NoWorkResult_ {} + +export = NoWorkResult diff --git a/node_modules/postcss/lib/no-work-result.js b/node_modules/postcss/lib/no-work-result.js new file mode 100644 index 0000000..05821b7 --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.js @@ -0,0 +1,138 @@ +'use strict' + +let MapGenerator = require('./map-generator') +let stringify = require('./stringify') +let warnOnce = require('./warn-once') +let parse = require('./parse') +const Result = require('./result') + +class NoWorkResult { + constructor(processor, css, opts) { + css = css.toString() + this.stringified = false + + this._processor = processor + this._css = css + this._opts = opts + this._map = undefined + let root + + let str = stringify + this.result = new Result(this._processor, root, this._opts) + this.result.css = css + + let self = this + Object.defineProperty(this.result, 'root', { + get() { + return self.root + } + }) + + let map = new MapGenerator(str, root, this._opts, css) + if (map.isMap()) { + let [generatedCSS, generatedMap] = map.generate() + if (generatedCSS) { + this.result.css = generatedCSS + } + if (generatedMap) { + this.result.map = generatedMap + } + } else { + map.clearAnnotation() + this.result.css = map.css + } + } + + async() { + if (this.error) return Promise.reject(this.error) + return Promise.resolve(this.result) + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + sync() { + if (this.error) throw this.error + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this._opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this._css + } + + warnings() { + return [] + } + + get content() { + return this.result.css + } + + get css() { + return this.result.css + } + + get map() { + return this.result.map + } + + get messages() { + return [] + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + if (this._root) { + return this._root + } + + let root + let parser = parse + + try { + root = parser(this._css, this._opts) + } catch (error) { + this.error = error + } + + if (this.error) { + throw this.error + } else { + this._root = root + return root + } + } + + get [Symbol.toStringTag]() { + return 'NoWorkResult' + } +} + +module.exports = NoWorkResult +NoWorkResult.default = NoWorkResult diff --git a/node_modules/postcss/lib/node.d.ts b/node_modules/postcss/lib/node.d.ts new file mode 100644 index 0000000..5971656 --- /dev/null +++ b/node_modules/postcss/lib/node.d.ts @@ -0,0 +1,536 @@ +import AtRule = require('./at-rule.js') + +import { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document from './document.js' +import Input from './input.js' +import { Stringifier, Syntax } from './postcss.js' +import Result from './result.js' +import Root from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +declare namespace Node { + export type ChildNode = AtRule.default | Comment | Declaration | Rule + + export type AnyNode = + | AtRule.default + | Comment + | Declaration + | Document + | Root + | Rule + + export type ChildProps = + | AtRuleProps + | CommentProps + | DeclarationProps + | RuleProps + + export interface Position { + /** + * Source line in file. In contrast to `offset` it starts from 1. + */ + column: number + + /** + * Source column in file. + */ + line: number + + /** + * Source offset in file. It starts from 0. + */ + offset: number + } + + export interface Range { + /** + * End position, exclusive. + */ + end: Position + + /** + * Start position, inclusive. + */ + start: Position + } + + /** + * Source represents an interface for the {@link Node.source} property. + */ + export interface Source { + /** + * The inclusive ending position for the source + * code of a node. + */ + end?: Position + + /** + * The source file from where a node has originated. + */ + input: Input + + /** + * The inclusive starting position for the source + * code of a node. + */ + start?: Position + } + + /** + * Interface represents an interface for an object received + * as parameter by Node class constructor. + */ + export interface NodeProps { + source?: Source + } + + export interface NodeErrorOptions { + /** + * An ending index inside a node's string that should be highlighted as + * source of error. + */ + endIndex?: number + /** + * An index inside a node's string that should be highlighted as source + * of error. + */ + index?: number + /** + * Plugin name that created this error. PostCSS will set it automatically. + */ + plugin?: string + /** + * A word inside a node's string, that should be highlighted as source + * of error. + */ + word?: string + } + + // eslint-disable-next-line @typescript-eslint/no-shadow + class Node extends Node_ {} + export { Node as default } +} + +/** + * It represents an abstract class that handles common + * methods for other CSS abstract syntax tree nodes. + * + * Any node that represents CSS selector or value should + * not extend the `Node` class. + */ +declare abstract class Node_ { + /** + * It represents parent of the current node. + * + * ```js + * root.nodes[0].parent === root //=> true + * ``` + */ + parent: Container | Document | undefined + + /** + * It represents unnecessary whitespace and characters present + * in the css source code. + * + * Information to generate byte-to-byte equal node string as it was + * in the origin input. + * + * The properties of the raws object are decided by parser, + * the default parser uses the following properties: + * + * * `before`: the space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + * * `after`: the space symbols after the last child of the node + * to the end of the node. + * * `between`: the symbols between the property and value + * for declarations, selector and `{` for rules, or last parameter + * and `{` for at-rules. + * * `semicolon`: contains true if the last child has + * an (optional) semicolon. + * * `afterName`: the space between the at-rule name and its parameters. + * * `left`: the space symbols between `/*` and the comment’s text. + * * `right`: the space symbols between the comment’s text + * and */. + * - `important`: the content of the important statement, + * if it is not just `!important`. + * + * PostCSS filters out the comments inside selectors, declaration values + * and at-rule parameters but it stores the origin content in raws. + * + * ```js + * const root = postcss.parse('a {\n color:black\n}') + * root.first.first.raws //=> { before: '\n ', between: ':' } + * ``` + */ + raws: any + + /** + * It represents information related to origin of a node and is required + * for generating source maps. + * + * The nodes that are created manually using the public APIs + * provided by PostCSS will have `source` undefined and + * will be absent in the source map. + * + * For this reason, the plugin developer should consider + * duplicating nodes as the duplicate node will have the + * same source as the original node by default or assign + * source to a node created manually. + * + * ```js + * decl.source.input.from //=> '/home/ai/source.css' + * decl.source.start //=> { line: 10, column: 2 } + * decl.source.end //=> { line: 10, column: 12 } + * ``` + * + * ```js + * // Incorrect method, source not specified! + * const prefixed = postcss.decl({ + * prop: '-moz-' + decl.prop, + * value: decl.value + * }) + * + * // Correct method, source is inherited when duplicating. + * const prefixed = decl.clone({ + * prop: '-moz-' + decl.prop + * }) + * ``` + * + * ```js + * if (atrule.name === 'add-link') { + * const rule = postcss.rule({ + * selector: 'a', + * source: atrule.source + * }) + * + * atrule.parent.insertBefore(atrule, rule) + * } + * ``` + */ + source?: Node.Source + + /** + * It represents type of a node in + * an abstract syntax tree. + * + * A type of node helps in identification of a node + * and perform operation based on it's type. + * + * ```js + * const declaration = new Declaration({ + * prop: 'color', + * value: 'black' + * }) + * + * declaration.type //=> 'decl' + * ``` + */ + type: string + + constructor(defaults?: object) + + /** + * Insert new node after current node to current node’s parent. + * + * Just alias for `node.parent.insertAfter(node, add)`. + * + * ```js + * decl.after('color: black') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + after(newNode: Node | Node.ChildProps | Node[] | string | undefined): this + + /** + * It assigns properties to an existing node instance. + * + * ```js + * decl.assign({ prop: 'word-wrap', value: 'break-word' }) + * ``` + * + * @param overrides New properties to override the node. + * + * @return `this` for method chaining. + */ + assign(overrides: object): this + + /** + * Insert new node before current node to current node’s parent. + * + * Just alias for `node.parent.insertBefore(node, add)`. + * + * ```js + * decl.before('content: ""') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + before(newNode: Node | Node.ChildProps | Node[] | string | undefined): this + + /** + * Clear the code style properties for the node and its children. + * + * ```js + * node.raws.before //=> ' ' + * node.cleanRaws() + * node.raws.before //=> undefined + * ``` + * + * @param keepBetween Keep the `raws.between` symbols. + */ + cleanRaws(keepBetween?: boolean): void + + /** + * It creates clone of an existing node, which includes all the properties + * and their values, that includes `raws` but not `type`. + * + * ```js + * decl.raws.before //=> "\n " + * const cloned = decl.clone({ prop: '-moz-' + decl.prop }) + * cloned.raws.before //=> "\n " + * cloned.toString() //=> -moz-transform: scale(0) + * ``` + * + * @param overrides New properties to override in the clone. + * + * @return Duplicate of the node instance. + */ + clone(overrides?: object): Node + + /** + * Shortcut to clone the node and insert the resulting cloned node + * after the current node. + * + * @param overrides New properties to override in the clone. + * @return New node. + */ + cloneAfter(overrides?: object): Node + + /** + * Shortcut to clone the node and insert the resulting cloned node + * before the current node. + * + * ```js + * decl.cloneBefore({ prop: '-moz-' + decl.prop }) + * ``` + * + * @param overrides Mew properties to override in the clone. + * + * @return New node + */ + cloneBefore(overrides?: object): Node + + /** + * It creates an instance of the class `CssSyntaxError` and parameters passed + * to this method are assigned to the error instance. + * + * The error instance will have description for the + * error, original position of the node in the + * source, showing line and column number. + * + * If any previous map is present, it would be used + * to get original position of the source. + * + * The Previous Map here is referred to the source map + * generated by previous compilation, example: Less, + * Stylus and Sass. + * + * This method returns the error instance instead of + * throwing it. + * + * ```js + * if (!variables[name]) { + * throw decl.error(`Unknown variable ${name}`, { word: name }) + * // CssSyntaxError: postcss-vars:a.sass:4:3: Unknown variable $black + * // color: $black + * // a + * // ^ + * // background: white + * } + * ``` + * + * @param message Description for the error instance. + * @param options Options for the error instance. + * + * @return Error instance is returned. + */ + error(message: string, options?: Node.NodeErrorOptions): CssSyntaxError + + /** + * Returns the next child of the node’s parent. + * Returns `undefined` if the current node is the last child. + * + * ```js + * if (comment.text === 'delete next') { + * const next = comment.next() + * if (next) { + * next.remove() + * } + * } + * ``` + * + * @return Next node. + */ + next(): Node.ChildNode | undefined + + /** + * Get the position for a word or an index inside the node. + * + * @param opts Options. + * @return Position. + */ + positionBy(opts?: Pick): Node.Position + + /** + * Convert string index to line/column. + * + * @param index The symbol number in the node’s string. + * @return Symbol position in file. + */ + positionInside(index: number): Node.Position + + /** + * Returns the previous child of the node’s parent. + * Returns `undefined` if the current node is the first child. + * + * ```js + * const annotation = decl.prev() + * if (annotation.type === 'comment') { + * readAnnotation(annotation.text) + * } + * ``` + * + * @return Previous node. + */ + prev(): Node.ChildNode | undefined + + /** + * Get the range for a word or start and end index inside the node. + * The start index is inclusive; the end index is exclusive. + * + * @param opts Options. + * @return Range. + */ + rangeBy( + opts?: Pick + ): Node.Range + + /** + * Returns a `raws` value. If the node is missing + * the code style property (because the node was manually built or cloned), + * PostCSS will try to autodetect the code style property by looking + * at other nodes in the tree. + * + * ```js + * const root = postcss.parse('a { background: white }') + * root.nodes[0].append({ prop: 'color', value: 'black' }) + * root.nodes[0].nodes[1].raws.before //=> undefined + * root.nodes[0].nodes[1].raw('before') //=> ' ' + * ``` + * + * @param prop Name of code style property. + * @param defaultType Name of default value, it can be missed + * if the value is the same as prop. + * @return {string} Code style value. + */ + raw(prop: string, defaultType?: string): string + + /** + * It removes the node from its parent and deletes its parent property. + * + * ```js + * if (decl.prop.match(/^-webkit-/)) { + * decl.remove() + * } + * ``` + * + * @return `this` for method chaining. + */ + remove(): this + + /** + * Inserts node(s) before the current node and removes the current node. + * + * ```js + * AtRule: { + * mixin: atrule => { + * atrule.replaceWith(mixinRules[atrule.params]) + * } + * } + * ``` + * + * @param nodes Mode(s) to replace current one. + * @return Current node to methods chain. + */ + replaceWith( + ...nodes: ( + | Node.ChildNode + | Node.ChildNode[] + | Node.ChildProps + | Node.ChildProps[] + )[] + ): this + + /** + * Finds the Root instance of the node’s tree. + * + * ```js + * root.nodes[0].nodes[0].root() === root + * ``` + * + * @return Root parent. + */ + root(): Root + + /** + * Fix circular links on `JSON.stringify()`. + * + * @return Cleaned object. + */ + toJSON(): object + + /** + * It compiles the node to browser readable cascading style sheets string + * depending on it's type. + * + * ```js + * new Rule({ selector: 'a' }).toString() //=> "a {}" + * ``` + * + * @param stringifier A syntax to use in string generation. + * @return CSS string of this node. + */ + toString(stringifier?: Stringifier | Syntax): string + + /** + * It is a wrapper for {@link Result#warn}, providing convenient + * way of generating warnings. + * + * ```js + * Declaration: { + * bad: (decl, { result }) => { + * decl.warn(result, 'Deprecated property: bad') + * } + * } + * ``` + * + * @param result The `Result` instance that will receive the warning. + * @param message Description for the warning. + * @param options Options for the warning. + * + * @return `Warning` instance is returned + */ + warn(result: Result, message: string, options?: WarningOptions): Warning +} + +declare class Node extends Node_ {} + +export = Node diff --git a/node_modules/postcss/lib/node.js b/node_modules/postcss/lib/node.js new file mode 100644 index 0000000..9e747ca --- /dev/null +++ b/node_modules/postcss/lib/node.js @@ -0,0 +1,381 @@ +'use strict' + +let { isClean, my } = require('./symbols') +let CssSyntaxError = require('./css-syntax-error') +let Stringifier = require('./stringifier') +let stringify = require('./stringify') + +function cloneNode(obj, parent) { + let cloned = new obj.constructor() + + for (let i in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, i)) { + /* c8 ignore next 2 */ + continue + } + if (i === 'proxyCache') continue + let value = obj[i] + let type = typeof value + + if (i === 'parent' && type === 'object') { + if (parent) cloned[i] = parent + } else if (i === 'source') { + cloned[i] = value + } else if (Array.isArray(value)) { + cloned[i] = value.map(j => cloneNode(j, cloned)) + } else { + if (type === 'object' && value !== null) value = cloneNode(value) + cloned[i] = value + } + } + + return cloned +} + +class Node { + constructor(defaults = {}) { + this.raws = {} + this[isClean] = false + this[my] = true + + for (let name in defaults) { + if (name === 'nodes') { + this.nodes = [] + for (let node of defaults[name]) { + if (typeof node.clone === 'function') { + this.append(node.clone()) + } else { + this.append(node) + } + } + } else { + this[name] = defaults[name] + } + } + } + + addToError(error) { + error.postcssNode = this + if (error.stack && this.source && /\n\s{4}at /.test(error.stack)) { + let s = this.source + error.stack = error.stack.replace( + /\n\s{4}at /, + `$&${s.input.from}:${s.start.line}:${s.start.column}$&` + ) + } + return error + } + + after(add) { + this.parent.insertAfter(this, add) + return this + } + + assign(overrides = {}) { + for (let name in overrides) { + this[name] = overrides[name] + } + return this + } + + before(add) { + this.parent.insertBefore(this, add) + return this + } + + cleanRaws(keepBetween) { + delete this.raws.before + delete this.raws.after + if (!keepBetween) delete this.raws.between + } + + clone(overrides = {}) { + let cloned = cloneNode(this) + for (let name in overrides) { + cloned[name] = overrides[name] + } + return cloned + } + + cloneAfter(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertAfter(this, cloned) + return cloned + } + + cloneBefore(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertBefore(this, cloned) + return cloned + } + + error(message, opts = {}) { + if (this.source) { + let { end, start } = this.rangeBy(opts) + return this.source.input.error( + message, + { column: start.column, line: start.line }, + { column: end.column, line: end.line }, + opts + ) + } + return new CssSyntaxError(message) + } + + getProxyProcessor() { + return { + get(node, prop) { + if (prop === 'proxyOf') { + return node + } else if (prop === 'root') { + return () => node.root().toProxy() + } else { + return node[prop] + } + }, + + set(node, prop, value) { + if (node[prop] === value) return true + node[prop] = value + if ( + prop === 'prop' || + prop === 'value' || + prop === 'name' || + prop === 'params' || + prop === 'important' || + /* c8 ignore next */ + prop === 'text' + ) { + node.markDirty() + } + return true + } + } + } + + markDirty() { + if (this[isClean]) { + this[isClean] = false + let next = this + while ((next = next.parent)) { + next[isClean] = false + } + } + } + + next() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index + 1] + } + + positionBy(opts, stringRepresentation) { + let pos = this.source.start + if (opts.index) { + pos = this.positionInside(opts.index, stringRepresentation) + } else if (opts.word) { + stringRepresentation = this.toString() + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) pos = this.positionInside(index, stringRepresentation) + } + return pos + } + + positionInside(index, stringRepresentation) { + let string = stringRepresentation || this.toString() + let column = this.source.start.column + let line = this.source.start.line + + for (let i = 0; i < index; i++) { + if (string[i] === '\n') { + column = 1 + line += 1 + } else { + column += 1 + } + } + + return { column, line } + } + + prev() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index - 1] + } + + rangeBy(opts) { + let start = { + column: this.source.start.column, + line: this.source.start.line + } + let end = this.source.end + ? { + column: this.source.end.column + 1, + line: this.source.end.line + } + : { + column: start.column + 1, + line: start.line + } + + if (opts.word) { + let stringRepresentation = this.toString() + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) { + start = this.positionInside(index, stringRepresentation) + end = this.positionInside(index + opts.word.length, stringRepresentation) + } + } else { + if (opts.start) { + start = { + column: opts.start.column, + line: opts.start.line + } + } else if (opts.index) { + start = this.positionInside(opts.index) + } + + if (opts.end) { + end = { + column: opts.end.column, + line: opts.end.line + } + } else if (typeof opts.endIndex === 'number') { + end = this.positionInside(opts.endIndex) + } else if (opts.index) { + end = this.positionInside(opts.index + 1) + } + } + + if ( + end.line < start.line || + (end.line === start.line && end.column <= start.column) + ) { + end = { column: start.column + 1, line: start.line } + } + + return { end, start } + } + + raw(prop, defaultType) { + let str = new Stringifier() + return str.raw(this, prop, defaultType) + } + + remove() { + if (this.parent) { + this.parent.removeChild(this) + } + this.parent = undefined + return this + } + + replaceWith(...nodes) { + if (this.parent) { + let bookmark = this + let foundSelf = false + for (let node of nodes) { + if (node === this) { + foundSelf = true + } else if (foundSelf) { + this.parent.insertAfter(bookmark, node) + bookmark = node + } else { + this.parent.insertBefore(bookmark, node) + } + } + + if (!foundSelf) { + this.remove() + } + } + + return this + } + + root() { + let result = this + while (result.parent && result.parent.type !== 'document') { + result = result.parent + } + return result + } + + toJSON(_, inputs) { + let fixed = {} + let emitInputs = inputs == null + inputs = inputs || new Map() + let inputsNextIndex = 0 + + for (let name in this) { + if (!Object.prototype.hasOwnProperty.call(this, name)) { + /* c8 ignore next 2 */ + continue + } + if (name === 'parent' || name === 'proxyCache') continue + let value = this[name] + + if (Array.isArray(value)) { + fixed[name] = value.map(i => { + if (typeof i === 'object' && i.toJSON) { + return i.toJSON(null, inputs) + } else { + return i + } + }) + } else if (typeof value === 'object' && value.toJSON) { + fixed[name] = value.toJSON(null, inputs) + } else if (name === 'source') { + let inputId = inputs.get(value.input) + if (inputId == null) { + inputId = inputsNextIndex + inputs.set(value.input, inputsNextIndex) + inputsNextIndex++ + } + fixed[name] = { + end: value.end, + inputId, + start: value.start + } + } else { + fixed[name] = value + } + } + + if (emitInputs) { + fixed.inputs = [...inputs.keys()].map(input => input.toJSON()) + } + + return fixed + } + + toProxy() { + if (!this.proxyCache) { + this.proxyCache = new Proxy(this, this.getProxyProcessor()) + } + return this.proxyCache + } + + toString(stringifier = stringify) { + if (stringifier.stringify) stringifier = stringifier.stringify + let result = '' + stringifier(this, i => { + result += i + }) + return result + } + + warn(result, text, opts) { + let data = { node: this } + for (let i in opts) data[i] = opts[i] + return result.warn(text, data) + } + + get proxyOf() { + return this + } +} + +module.exports = Node +Node.default = Node diff --git a/node_modules/postcss/lib/parse.d.ts b/node_modules/postcss/lib/parse.d.ts new file mode 100644 index 0000000..4c943a4 --- /dev/null +++ b/node_modules/postcss/lib/parse.d.ts @@ -0,0 +1,9 @@ +import { Parser } from './postcss.js' + +interface Parse extends Parser { + default: Parse +} + +declare const parse: Parse + +export = parse diff --git a/node_modules/postcss/lib/parse.js b/node_modules/postcss/lib/parse.js new file mode 100644 index 0000000..971431f --- /dev/null +++ b/node_modules/postcss/lib/parse.js @@ -0,0 +1,42 @@ +'use strict' + +let Container = require('./container') +let Parser = require('./parser') +let Input = require('./input') + +function parse(css, opts) { + let input = new Input(css, opts) + let parser = new Parser(input) + try { + parser.parse() + } catch (e) { + if (process.env.NODE_ENV !== 'production') { + if (e.name === 'CssSyntaxError' && opts && opts.from) { + if (/\.scss$/i.test(opts.from)) { + e.message += + '\nYou tried to parse SCSS with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-scss parser' + } else if (/\.sass/i.test(opts.from)) { + e.message += + '\nYou tried to parse Sass with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-sass parser' + } else if (/\.less$/i.test(opts.from)) { + e.message += + '\nYou tried to parse Less with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-less parser' + } + } + } + throw e + } + + return parser.root +} + +module.exports = parse +parse.default = parse + +Container.registerParse(parse) diff --git a/node_modules/postcss/lib/parser.js b/node_modules/postcss/lib/parser.js new file mode 100644 index 0000000..bc761de --- /dev/null +++ b/node_modules/postcss/lib/parser.js @@ -0,0 +1,609 @@ +'use strict' + +let Declaration = require('./declaration') +let tokenizer = require('./tokenize') +let Comment = require('./comment') +let AtRule = require('./at-rule') +let Root = require('./root') +let Rule = require('./rule') + +const SAFE_COMMENT_NEIGHBOR = { + empty: true, + space: true +} + +function findLastWithPosition(tokens) { + for (let i = tokens.length - 1; i >= 0; i--) { + let token = tokens[i] + let pos = token[3] || token[2] + if (pos) return pos + } +} + +class Parser { + constructor(input) { + this.input = input + + this.root = new Root() + this.current = this.root + this.spaces = '' + this.semicolon = false + + this.createTokenizer() + this.root.source = { input, start: { column: 1, line: 1, offset: 0 } } + } + + atrule(token) { + let node = new AtRule() + node.name = token[1].slice(1) + if (node.name === '') { + this.unnamedAtrule(node, token) + } + this.init(node, token[2]) + + let type + let prev + let shift + let last = false + let open = false + let params = [] + let brackets = [] + + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + type = token[0] + + if (type === '(' || type === '[') { + brackets.push(type === '(' ? ')' : ']') + } else if (type === '{' && brackets.length > 0) { + brackets.push('}') + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + } + + if (brackets.length === 0) { + if (type === ';') { + node.source.end = this.getPosition(token[2]) + node.source.end.offset++ + this.semicolon = true + break + } else if (type === '{') { + open = true + break + } else if (type === '}') { + if (params.length > 0) { + shift = params.length - 1 + prev = params[shift] + while (prev && prev[0] === 'space') { + prev = params[--shift] + } + if (prev) { + node.source.end = this.getPosition(prev[3] || prev[2]) + node.source.end.offset++ + } + } + this.end(token) + break + } else { + params.push(token) + } + } else { + params.push(token) + } + + if (this.tokenizer.endOfFile()) { + last = true + break + } + } + + node.raws.between = this.spacesAndCommentsFromEnd(params) + if (params.length) { + node.raws.afterName = this.spacesAndCommentsFromStart(params) + this.raw(node, 'params', params) + if (last) { + token = params[params.length - 1] + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + this.spaces = node.raws.between + node.raws.between = '' + } + } else { + node.raws.afterName = '' + node.params = '' + } + + if (open) { + node.nodes = [] + this.current = node + } + } + + checkMissedSemicolon(tokens) { + let colon = this.colon(tokens) + if (colon === false) return + + let founded = 0 + let token + for (let j = colon - 1; j >= 0; j--) { + token = tokens[j] + if (token[0] !== 'space') { + founded += 1 + if (founded === 2) break + } + } + // If the token is a word, e.g. `!important`, `red` or any other valid property's value. + // Then we need to return the colon after that word token. [3] is the "end" colon of that word. + // And because we need it after that one we do +1 to get the next one. + throw this.input.error( + 'Missed semicolon', + token[0] === 'word' ? token[3] + 1 : token[2] + ) + } + + colon(tokens) { + let brackets = 0 + let token, type, prev + for (let [i, element] of tokens.entries()) { + token = element + type = token[0] + + if (type === '(') { + brackets += 1 + } + if (type === ')') { + brackets -= 1 + } + if (brackets === 0 && type === ':') { + if (!prev) { + this.doubleColon(token) + } else if (prev[0] === 'word' && prev[1] === 'progid') { + continue + } else { + return i + } + } + + prev = token + } + return false + } + + comment(token) { + let node = new Comment() + this.init(node, token[2]) + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + + let text = token[1].slice(2, -2) + if (/^\s*$/.test(text)) { + node.text = '' + node.raws.left = text + node.raws.right = '' + } else { + let match = text.match(/^(\s*)([^]*\S)(\s*)$/) + node.text = match[2] + node.raws.left = match[1] + node.raws.right = match[3] + } + } + + createTokenizer() { + this.tokenizer = tokenizer(this.input) + } + + decl(tokens, customProperty) { + let node = new Declaration() + this.init(node, tokens[0][2]) + + let last = tokens[tokens.length - 1] + if (last[0] === ';') { + this.semicolon = true + tokens.pop() + } + + node.source.end = this.getPosition( + last[3] || last[2] || findLastWithPosition(tokens) + ) + node.source.end.offset++ + + while (tokens[0][0] !== 'word') { + if (tokens.length === 1) this.unknownWord(tokens) + node.raws.before += tokens.shift()[1] + } + node.source.start = this.getPosition(tokens[0][2]) + + node.prop = '' + while (tokens.length) { + let type = tokens[0][0] + if (type === ':' || type === 'space' || type === 'comment') { + break + } + node.prop += tokens.shift()[1] + } + + node.raws.between = '' + + let token + while (tokens.length) { + token = tokens.shift() + + if (token[0] === ':') { + node.raws.between += token[1] + break + } else { + if (token[0] === 'word' && /\w/.test(token[1])) { + this.unknownWord([token]) + } + node.raws.between += token[1] + } + } + + if (node.prop[0] === '_' || node.prop[0] === '*') { + node.raws.before += node.prop[0] + node.prop = node.prop.slice(1) + } + + let firstSpaces = [] + let next + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + firstSpaces.push(tokens.shift()) + } + + this.precheckMissedSemicolon(tokens) + + for (let i = tokens.length - 1; i >= 0; i--) { + token = tokens[i] + if (token[1].toLowerCase() === '!important') { + node.important = true + let string = this.stringFrom(tokens, i) + string = this.spacesFromEnd(tokens) + string + if (string !== ' !important') node.raws.important = string + break + } else if (token[1].toLowerCase() === 'important') { + let cache = tokens.slice(0) + let str = '' + for (let j = i; j > 0; j--) { + let type = cache[j][0] + if (str.trim().indexOf('!') === 0 && type !== 'space') { + break + } + str = cache.pop()[1] + str + } + if (str.trim().indexOf('!') === 0) { + node.important = true + node.raws.important = str + tokens = cache + } + } + + if (token[0] !== 'space' && token[0] !== 'comment') { + break + } + } + + let hasWord = tokens.some(i => i[0] !== 'space' && i[0] !== 'comment') + + if (hasWord) { + node.raws.between += firstSpaces.map(i => i[1]).join('') + firstSpaces = [] + } + this.raw(node, 'value', firstSpaces.concat(tokens), customProperty) + + if (node.value.includes(':') && !customProperty) { + this.checkMissedSemicolon(tokens) + } + } + + doubleColon(token) { + throw this.input.error( + 'Double colon', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } + + emptyRule(token) { + let node = new Rule() + this.init(node, token[2]) + node.selector = '' + node.raws.between = '' + this.current = node + } + + end(token) { + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.semicolon = false + + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.spaces = '' + + if (this.current.parent) { + this.current.source.end = this.getPosition(token[2]) + this.current.source.end.offset++ + this.current = this.current.parent + } else { + this.unexpectedClose(token) + } + } + + endFile() { + if (this.current.parent) this.unclosedBlock() + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.root.source.end = this.getPosition(this.tokenizer.position()) + } + + freeSemicolon(token) { + this.spaces += token[1] + if (this.current.nodes) { + let prev = this.current.nodes[this.current.nodes.length - 1] + if (prev && prev.type === 'rule' && !prev.raws.ownSemicolon) { + prev.raws.ownSemicolon = this.spaces + this.spaces = '' + } + } + } + + // Helpers + + getPosition(offset) { + let pos = this.input.fromOffset(offset) + return { + column: pos.col, + line: pos.line, + offset + } + } + + init(node, offset) { + this.current.push(node) + node.source = { + input: this.input, + start: this.getPosition(offset) + } + node.raws.before = this.spaces + this.spaces = '' + if (node.type !== 'comment') this.semicolon = false + } + + other(start) { + let end = false + let type = null + let colon = false + let bracket = null + let brackets = [] + let customProperty = start[1].startsWith('--') + + let tokens = [] + let token = start + while (token) { + type = token[0] + tokens.push(token) + + if (type === '(' || type === '[') { + if (!bracket) bracket = token + brackets.push(type === '(' ? ')' : ']') + } else if (customProperty && colon && type === '{') { + if (!bracket) bracket = token + brackets.push('}') + } else if (brackets.length === 0) { + if (type === ';') { + if (colon) { + this.decl(tokens, customProperty) + return + } else { + break + } + } else if (type === '{') { + this.rule(tokens) + return + } else if (type === '}') { + this.tokenizer.back(tokens.pop()) + end = true + break + } else if (type === ':') { + colon = true + } + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + if (brackets.length === 0) bracket = null + } + + token = this.tokenizer.nextToken() + } + + if (this.tokenizer.endOfFile()) end = true + if (brackets.length > 0) this.unclosedBracket(bracket) + + if (end && colon) { + if (!customProperty) { + while (tokens.length) { + token = tokens[tokens.length - 1][0] + if (token !== 'space' && token !== 'comment') break + this.tokenizer.back(tokens.pop()) + } + } + this.decl(tokens, customProperty) + } else { + this.unknownWord(tokens) + } + } + + parse() { + let token + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + + switch (token[0]) { + case 'space': + this.spaces += token[1] + break + + case ';': + this.freeSemicolon(token) + break + + case '}': + this.end(token) + break + + case 'comment': + this.comment(token) + break + + case 'at-word': + this.atrule(token) + break + + case '{': + this.emptyRule(token) + break + + default: + this.other(token) + break + } + } + this.endFile() + } + + precheckMissedSemicolon(/* tokens */) { + // Hook for Safe Parser + } + + raw(node, prop, tokens, customProperty) { + let token, type + let length = tokens.length + let value = '' + let clean = true + let next, prev + + for (let i = 0; i < length; i += 1) { + token = tokens[i] + type = token[0] + if (type === 'space' && i === length - 1 && !customProperty) { + clean = false + } else if (type === 'comment') { + prev = tokens[i - 1] ? tokens[i - 1][0] : 'empty' + next = tokens[i + 1] ? tokens[i + 1][0] : 'empty' + if (!SAFE_COMMENT_NEIGHBOR[prev] && !SAFE_COMMENT_NEIGHBOR[next]) { + if (value.slice(-1) === ',') { + clean = false + } else { + value += token[1] + } + } else { + clean = false + } + } else { + value += token[1] + } + } + if (!clean) { + let raw = tokens.reduce((all, i) => all + i[1], '') + node.raws[prop] = { raw, value } + } + node[prop] = value + } + + rule(tokens) { + tokens.pop() + + let node = new Rule() + this.init(node, tokens[0][2]) + + node.raws.between = this.spacesAndCommentsFromEnd(tokens) + this.raw(node, 'selector', tokens) + this.current = node + } + + spacesAndCommentsFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space' && lastTokenType !== 'comment') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + // Errors + + spacesAndCommentsFromStart(tokens) { + let next + let spaces = '' + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + spaces += tokens.shift()[1] + } + return spaces + } + + spacesFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + stringFrom(tokens, from) { + let result = '' + for (let i = from; i < tokens.length; i++) { + result += tokens[i][1] + } + tokens.splice(from, tokens.length - from) + return result + } + + unclosedBlock() { + let pos = this.current.source.start + throw this.input.error('Unclosed block', pos.line, pos.column) + } + + unclosedBracket(bracket) { + throw this.input.error( + 'Unclosed bracket', + { offset: bracket[2] }, + { offset: bracket[2] + 1 } + ) + } + + unexpectedClose(token) { + throw this.input.error( + 'Unexpected }', + { offset: token[2] }, + { offset: token[2] + 1 } + ) + } + + unknownWord(tokens) { + throw this.input.error( + 'Unknown word', + { offset: tokens[0][2] }, + { offset: tokens[0][2] + tokens[0][1].length } + ) + } + + unnamedAtrule(node, token) { + throw this.input.error( + 'At-rule without name', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } +} + +module.exports = Parser diff --git a/node_modules/postcss/lib/postcss.d.mts b/node_modules/postcss/lib/postcss.d.mts new file mode 100644 index 0000000..a8ca8c7 --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.mts @@ -0,0 +1,72 @@ +export { + // postcss function / namespace + default, + + // Value exports from postcss.mjs + stringify, + fromJSON, + // @ts-expect-error This value exists, but it’s untyped. + plugin, + parse, + list, + + document, + comment, + atRule, + rule, + decl, + root, + + CssSyntaxError, + Declaration, + Container, + Processor, + Document, + Comment, + Warning, + AtRule, + Result, + Input, + Rule, + Root, + Node, + + // Type-only exports + AcceptedPlugin, + AnyNode, + AtRuleProps, + Builder, + ChildNode, + ChildProps, + CommentProps, + ContainerProps, + DeclarationProps, + DocumentProps, + FilePosition, + Helpers, + JSONHydrator, + Message, + NodeErrorOptions, + NodeProps, + OldPlugin, + Parser, + Plugin, + PluginCreator, + Position, + Postcss, + ProcessOptions, + RootProps, + RuleProps, + Source, + SourceMap, + SourceMapOptions, + Stringifier, + Syntax, + TransformCallback, + Transformer, + WarningOptions, + + // This is a class, but it’s not re-exported. That’s why it’s exported as type-only here. + type LazyResult, + +} from './postcss.js' diff --git a/node_modules/postcss/lib/postcss.d.ts b/node_modules/postcss/lib/postcss.d.ts new file mode 100644 index 0000000..49af61c --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.ts @@ -0,0 +1,441 @@ +import { RawSourceMap, SourceMapGenerator } from 'source-map-js' + +import AtRule, { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container, { ContainerProps } from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document, { DocumentProps } from './document.js' +import Input, { FilePosition } from './input.js' +import LazyResult from './lazy-result.js' +import list from './list.js' +import Node, { + AnyNode, + ChildNode, + ChildProps, + NodeErrorOptions, + NodeProps, + Position, + Source +} from './node.js' +import Processor from './processor.js' +import Result, { Message } from './result.js' +import Root, { RootProps } from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +type DocumentProcessor = ( + document: Document, + helper: postcss.Helpers +) => Promise | void +type RootProcessor = (root: Root, helper: postcss.Helpers) => Promise | void +type DeclarationProcessor = ( + decl: Declaration, + helper: postcss.Helpers +) => Promise | void +type RuleProcessor = (rule: Rule, helper: postcss.Helpers) => Promise | void +type AtRuleProcessor = (atRule: AtRule, helper: postcss.Helpers) => Promise | void +type CommentProcessor = ( + comment: Comment, + helper: postcss.Helpers +) => Promise | void + +interface Processors { + /** + * Will be called on all`AtRule` nodes. + * + * Will be called again on node or children changes. + */ + AtRule?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `AtRule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + AtRuleExit?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `Comment` nodes. + * + * Will be called again on node or children changes. + */ + Comment?: CommentProcessor + + /** + * Will be called on all `Comment` nodes after listeners + * for `Comment` event. + * + * Will be called again on node or children changes. + */ + CommentExit?: CommentProcessor + + /** + * Will be called on all `Declaration` nodes after listeners + * for `Declaration` event. + * + * Will be called again on node or children changes. + */ + Declaration?: { [prop: string]: DeclarationProcessor } | DeclarationProcessor + + /** + * Will be called on all `Declaration` nodes. + * + * Will be called again on node or children changes. + */ + DeclarationExit?: + | { [prop: string]: DeclarationProcessor } + | DeclarationProcessor + + /** + * Will be called on `Document` node. + * + * Will be called again on children changes. + */ + Document?: DocumentProcessor + + /** + * Will be called on `Document` node, when all children will be processed. + * + * Will be called again on children changes. + */ + DocumentExit?: DocumentProcessor + + /** + * Will be called on `Root` node once. + */ + Once?: RootProcessor + + /** + * Will be called on `Root` node once, when all children will be processed. + */ + OnceExit?: RootProcessor + + /** + * Will be called on `Root` node. + * + * Will be called again on children changes. + */ + Root?: RootProcessor + + /** + * Will be called on `Root` node, when all children will be processed. + * + * Will be called again on children changes. + */ + RootExit?: RootProcessor + + /** + * Will be called on all `Rule` nodes. + * + * Will be called again on node or children changes. + */ + Rule?: RuleProcessor + + /** + * Will be called on all `Rule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + RuleExit?: RuleProcessor +} + +declare namespace postcss { + export { + AnyNode, + AtRule, + AtRuleProps, + ChildNode, + ChildProps, + Comment, + CommentProps, + Container, + ContainerProps, + CssSyntaxError, + Declaration, + DeclarationProps, + Document, + DocumentProps, + FilePosition, + Input, + LazyResult, + list, + Message, + Node, + NodeErrorOptions, + NodeProps, + Position, + Processor, + Result, + Root, + RootProps, + Rule, + RuleProps, + Source, + Warning, + WarningOptions + } + + export type SourceMap = SourceMapGenerator & { + toJSON(): RawSourceMap + } + + export type Helpers = { postcss: Postcss; result: Result } & Postcss + + export interface Plugin extends Processors { + postcssPlugin: string + prepare?: (result: Result) => Processors + } + + export interface PluginCreator { + (opts?: PluginOptions): Plugin | Processor + postcss: true + } + + export interface Transformer extends TransformCallback { + postcssPlugin: string + postcssVersion: string + } + + export interface TransformCallback { + (root: Root, result: Result): Promise | void + } + + export interface OldPlugin extends Transformer { + (opts?: T): Transformer + postcss: Transformer + } + + export type AcceptedPlugin = + | { + postcss: Processor | TransformCallback + } + | OldPlugin + | Plugin + | PluginCreator + | Processor + | TransformCallback + + export interface Parser { + ( + css: { toString(): string } | string, + opts?: Pick + ): RootNode + } + + export interface Builder { + (part: string, node?: AnyNode, type?: 'end' | 'start'): void + } + + export interface Stringifier { + (node: AnyNode, builder: Builder): void + } + + export interface JSONHydrator { + (data: object): Node + (data: object[]): Node[] + } + + export interface Syntax { + /** + * Function to generate AST by string. + */ + parse?: Parser + + /** + * Class to generate string by AST. + */ + stringify?: Stringifier + } + + export interface SourceMapOptions { + /** + * Use absolute path in generated source map. + */ + absolute?: boolean + + /** + * Indicates that PostCSS should add annotation comments to the CSS. + * By default, PostCSS will always add a comment with a path + * to the source map. PostCSS will not add annotations to CSS files + * that do not contain any comments. + * + * By default, PostCSS presumes that you want to save the source map as + * `opts.to + '.map'` and will use this path in the annotation comment. + * A different path can be set by providing a string value for annotation. + * + * If you have set `inline: true`, annotation cannot be disabled. + */ + annotation?: ((file: string, root: Root) => string) | boolean | string + + /** + * Override `from` in map’s sources. + */ + from?: string + + /** + * Indicates that the source map should be embedded in the output CSS + * as a Base64-encoded comment. By default, it is `true`. + * But if all previous maps are external, not inline, PostCSS will not embed + * the map even if you do not set this option. + * + * If you have an inline source map, the result.map property will be empty, + * as the source map will be contained within the text of `result.css`. + */ + inline?: boolean + + /** + * Source map content from a previous processing step (e.g., Sass). + * + * PostCSS will try to read the previous source map + * automatically (based on comments within the source CSS), but you can use + * this option to identify it manually. + * + * If desired, you can omit the previous map with prev: `false`. + */ + prev?: ((file: string) => string) | boolean | object | string + + /** + * Indicates that PostCSS should set the origin content (e.g., Sass source) + * of the source map. By default, it is true. But if all previous maps do not + * contain sources content, PostCSS will also leave it out even if you + * do not set this option. + */ + sourcesContent?: boolean + } + + export interface ProcessOptions { + /** + * The path of the CSS source file. You should always set `from`, + * because it is used in source map generation and syntax error messages. + */ + from?: string | undefined + + /** + * Source map options + */ + map?: boolean | SourceMapOptions + + /** + * Function to generate AST by string. + */ + parser?: Parser | Syntax + + /** + * Class to generate string by AST. + */ + stringifier?: Stringifier | Syntax + + /** + * Object with parse and stringify. + */ + syntax?: Syntax + + /** + * The path where you'll put the output CSS file. You should always set `to` + * to generate correct source maps. + */ + to?: string + } + + export type Postcss = typeof postcss + + /** + * Default function to convert a node tree into a CSS string. + */ + export let stringify: Stringifier + + /** + * Parses source css and returns a new `Root` or `Document` node, + * which contains the source CSS nodes. + * + * ```js + * // Simple CSS concatenation with source map support + * const root1 = postcss.parse(css1, { from: file1 }) + * const root2 = postcss.parse(css2, { from: file2 }) + * root1.append(root2).toResult().css + * ``` + */ + export let parse: Parser + + /** + * Rehydrate a JSON AST (from `Node#toJSON`) back into the AST classes. + * + * ```js + * const json = root.toJSON() + * // save to file, send by network, etc + * const root2 = postcss.fromJSON(json) + * ``` + */ + export let fromJSON: JSONHydrator + + /** + * Creates a new `Comment` node. + * + * @param defaults Properties for the new node. + * @return New comment node + */ + export function comment(defaults?: CommentProps): Comment + + /** + * Creates a new `AtRule` node. + * + * @param defaults Properties for the new node. + * @return New at-rule node. + */ + export function atRule(defaults?: AtRuleProps): AtRule + + /** + * Creates a new `Declaration` node. + * + * @param defaults Properties for the new node. + * @return New declaration node. + */ + export function decl(defaults?: DeclarationProps): Declaration + + /** + * Creates a new `Rule` node. + * + * @param default Properties for the new node. + * @return New rule node. + */ + export function rule(defaults?: RuleProps): Rule + + /** + * Creates a new `Root` node. + * + * @param defaults Properties for the new node. + * @return New root node. + */ + export function root(defaults?: RootProps): Root + + /** + * Creates a new `Document` node. + * + * @param defaults Properties for the new node. + * @return New document node. + */ + export function document(defaults?: DocumentProps): Document + + export { postcss as default } +} + +/** + * Create a new `Processor` instance that will apply `plugins` + * as CSS processors. + * + * ```js + * let postcss = require('postcss') + * + * postcss(plugins).process(css, { from, to }).then(result => { + * console.log(result.css) + * }) + * ``` + * + * @param plugins PostCSS plugins. + * @return Processor to process multiple CSS. + */ +declare function postcss(plugins?: postcss.AcceptedPlugin[]): Processor +declare function postcss(...plugins: postcss.AcceptedPlugin[]): Processor + +export = postcss diff --git a/node_modules/postcss/lib/postcss.js b/node_modules/postcss/lib/postcss.js new file mode 100644 index 0000000..080ee83 --- /dev/null +++ b/node_modules/postcss/lib/postcss.js @@ -0,0 +1,101 @@ +'use strict' + +let CssSyntaxError = require('./css-syntax-error') +let Declaration = require('./declaration') +let LazyResult = require('./lazy-result') +let Container = require('./container') +let Processor = require('./processor') +let stringify = require('./stringify') +let fromJSON = require('./fromJSON') +let Document = require('./document') +let Warning = require('./warning') +let Comment = require('./comment') +let AtRule = require('./at-rule') +let Result = require('./result.js') +let Input = require('./input') +let parse = require('./parse') +let list = require('./list') +let Rule = require('./rule') +let Root = require('./root') +let Node = require('./node') + +function postcss(...plugins) { + if (plugins.length === 1 && Array.isArray(plugins[0])) { + plugins = plugins[0] + } + return new Processor(plugins) +} + +postcss.plugin = function plugin(name, initializer) { + let warningPrinted = false + function creator(...args) { + // eslint-disable-next-line no-console + if (console && console.warn && !warningPrinted) { + warningPrinted = true + // eslint-disable-next-line no-console + console.warn( + name + + ': postcss.plugin was deprecated. Migration guide:\n' + + 'https://evilmartians.com/chronicles/postcss-8-plugin-migration' + ) + if (process.env.LANG && process.env.LANG.startsWith('cn')) { + /* c8 ignore next 7 */ + // eslint-disable-next-line no-console + console.warn( + name + + ': 里面 postcss.plugin 被弃用. 迁移指南:\n' + + 'https://www.w3ctech.com/topic/2226' + ) + } + } + let transformer = initializer(...args) + transformer.postcssPlugin = name + transformer.postcssVersion = new Processor().version + return transformer + } + + let cache + Object.defineProperty(creator, 'postcss', { + get() { + if (!cache) cache = creator() + return cache + } + }) + + creator.process = function (css, processOpts, pluginOpts) { + return postcss([creator(pluginOpts)]).process(css, processOpts) + } + + return creator +} + +postcss.stringify = stringify +postcss.parse = parse +postcss.fromJSON = fromJSON +postcss.list = list + +postcss.comment = defaults => new Comment(defaults) +postcss.atRule = defaults => new AtRule(defaults) +postcss.decl = defaults => new Declaration(defaults) +postcss.rule = defaults => new Rule(defaults) +postcss.root = defaults => new Root(defaults) +postcss.document = defaults => new Document(defaults) + +postcss.CssSyntaxError = CssSyntaxError +postcss.Declaration = Declaration +postcss.Container = Container +postcss.Processor = Processor +postcss.Document = Document +postcss.Comment = Comment +postcss.Warning = Warning +postcss.AtRule = AtRule +postcss.Result = Result +postcss.Input = Input +postcss.Rule = Rule +postcss.Root = Root +postcss.Node = Node + +LazyResult.registerPostcss(postcss) + +module.exports = postcss +postcss.default = postcss diff --git a/node_modules/postcss/lib/postcss.mjs b/node_modules/postcss/lib/postcss.mjs new file mode 100644 index 0000000..3507598 --- /dev/null +++ b/node_modules/postcss/lib/postcss.mjs @@ -0,0 +1,30 @@ +import postcss from './postcss.js' + +export default postcss + +export const stringify = postcss.stringify +export const fromJSON = postcss.fromJSON +export const plugin = postcss.plugin +export const parse = postcss.parse +export const list = postcss.list + +export const document = postcss.document +export const comment = postcss.comment +export const atRule = postcss.atRule +export const rule = postcss.rule +export const decl = postcss.decl +export const root = postcss.root + +export const CssSyntaxError = postcss.CssSyntaxError +export const Declaration = postcss.Declaration +export const Container = postcss.Container +export const Processor = postcss.Processor +export const Document = postcss.Document +export const Comment = postcss.Comment +export const Warning = postcss.Warning +export const AtRule = postcss.AtRule +export const Result = postcss.Result +export const Input = postcss.Input +export const Rule = postcss.Rule +export const Root = postcss.Root +export const Node = postcss.Node diff --git a/node_modules/postcss/lib/previous-map.d.ts b/node_modules/postcss/lib/previous-map.d.ts new file mode 100644 index 0000000..23edeb5 --- /dev/null +++ b/node_modules/postcss/lib/previous-map.d.ts @@ -0,0 +1,81 @@ +import { SourceMapConsumer } from 'source-map-js' + +import { ProcessOptions } from './postcss.js' + +declare namespace PreviousMap { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { PreviousMap_ as default } +} + +/** + * Source map information from input CSS. + * For example, source map after Sass compiler. + * + * This class will automatically find source map in input CSS or in file system + * near input file (according `from` option). + * + * ```js + * const root = parse(css, { from: 'a.sass.css' }) + * root.input.map //=> PreviousMap + * ``` + */ +declare class PreviousMap_ { + /** + * `sourceMappingURL` content. + */ + annotation?: string + + /** + * The CSS source identifier. Contains `Input#file` if the user + * set the `from` option, or `Input#id` if they did not. + */ + file?: string + + /** + * Was source map inlined by data-uri to input CSS. + */ + inline: boolean + + /** + * Path to source map file. + */ + mapFile?: string + + /** + * The directory with source map file, if source map is in separated file. + */ + root?: string + + /** + * Source map file content. + */ + text?: string + + /** + * @param css Input CSS source. + * @param opts Process options. + */ + constructor(css: string, opts?: ProcessOptions) + + /** + * Create a instance of `SourceMapGenerator` class + * from the `source-map` library to work with source map information. + * + * It is lazy method, so it will create object only on first call + * and then it will use cache. + * + * @return Object with source map information. + */ + consumer(): SourceMapConsumer + + /** + * Does source map contains `sourcesContent` with input source text. + * + * @return Is `sourcesContent` present. + */ + withContent(): boolean +} + +declare class PreviousMap extends PreviousMap_ {} + +export = PreviousMap diff --git a/node_modules/postcss/lib/previous-map.js b/node_modules/postcss/lib/previous-map.js new file mode 100644 index 0000000..f3093df --- /dev/null +++ b/node_modules/postcss/lib/previous-map.js @@ -0,0 +1,142 @@ +'use strict' + +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { existsSync, readFileSync } = require('fs') +let { dirname, join } = require('path') + +function fromBase64(str) { + if (Buffer) { + return Buffer.from(str, 'base64').toString() + } else { + /* c8 ignore next 2 */ + return window.atob(str) + } +} + +class PreviousMap { + constructor(css, opts) { + if (opts.map === false) return + this.loadAnnotation(css) + this.inline = this.startWith(this.annotation, 'data:') + + let prev = opts.map ? opts.map.prev : undefined + let text = this.loadMap(opts.from, prev) + if (!this.mapFile && opts.from) { + this.mapFile = opts.from + } + if (this.mapFile) this.root = dirname(this.mapFile) + if (text) this.text = text + } + + consumer() { + if (!this.consumerCache) { + this.consumerCache = new SourceMapConsumer(this.text) + } + return this.consumerCache + } + + decodeInline(text) { + let baseCharsetUri = /^data:application\/json;charset=utf-?8;base64,/ + let baseUri = /^data:application\/json;base64,/ + let charsetUri = /^data:application\/json;charset=utf-?8,/ + let uri = /^data:application\/json,/ + + if (charsetUri.test(text) || uri.test(text)) { + return decodeURIComponent(text.substr(RegExp.lastMatch.length)) + } + + if (baseCharsetUri.test(text) || baseUri.test(text)) { + return fromBase64(text.substr(RegExp.lastMatch.length)) + } + + let encoding = text.match(/data:application\/json;([^,]+),/)[1] + throw new Error('Unsupported source map encoding ' + encoding) + } + + getAnnotationURL(sourceMapString) { + return sourceMapString.replace(/^\/\*\s*# sourceMappingURL=/, '').trim() + } + + isMap(map) { + if (typeof map !== 'object') return false + return ( + typeof map.mappings === 'string' || + typeof map._mappings === 'string' || + Array.isArray(map.sections) + ) + } + + loadAnnotation(css) { + let comments = css.match(/\/\*\s*# sourceMappingURL=/gm) + if (!comments) return + + // sourceMappingURLs from comments, strings, etc. + let start = css.lastIndexOf(comments.pop()) + let end = css.indexOf('*/', start) + + if (start > -1 && end > -1) { + // Locate the last sourceMappingURL to avoid pickin + this.annotation = this.getAnnotationURL(css.substring(start, end)) + } + } + + loadFile(path) { + this.root = dirname(path) + if (existsSync(path)) { + this.mapFile = path + return readFileSync(path, 'utf-8').toString().trim() + } + } + + loadMap(file, prev) { + if (prev === false) return false + + if (prev) { + if (typeof prev === 'string') { + return prev + } else if (typeof prev === 'function') { + let prevPath = prev(file) + if (prevPath) { + let map = this.loadFile(prevPath) + if (!map) { + throw new Error( + 'Unable to load previous source map: ' + prevPath.toString() + ) + } + return map + } + } else if (prev instanceof SourceMapConsumer) { + return SourceMapGenerator.fromSourceMap(prev).toString() + } else if (prev instanceof SourceMapGenerator) { + return prev.toString() + } else if (this.isMap(prev)) { + return JSON.stringify(prev) + } else { + throw new Error( + 'Unsupported previous source map format: ' + prev.toString() + ) + } + } else if (this.inline) { + return this.decodeInline(this.annotation) + } else if (this.annotation) { + let map = this.annotation + if (file) map = join(dirname(file), map) + return this.loadFile(map) + } + } + + startWith(string, start) { + if (!string) return false + return string.substr(0, start.length) === start + } + + withContent() { + return !!( + this.consumer().sourcesContent && + this.consumer().sourcesContent.length > 0 + ) + } +} + +module.exports = PreviousMap +PreviousMap.default = PreviousMap diff --git a/node_modules/postcss/lib/processor.d.ts b/node_modules/postcss/lib/processor.d.ts new file mode 100644 index 0000000..50c9a07 --- /dev/null +++ b/node_modules/postcss/lib/processor.d.ts @@ -0,0 +1,115 @@ +import Document from './document.js' +import LazyResult from './lazy-result.js' +import NoWorkResult from './no-work-result.js' +import { + AcceptedPlugin, + Plugin, + ProcessOptions, + TransformCallback, + Transformer +} from './postcss.js' +import Result from './result.js' +import Root from './root.js' + +declare namespace Processor { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Processor_ as default } +} + +/** + * Contains plugins to process CSS. Create one `Processor` instance, + * initialize its plugins, and then use that instance on numerous CSS files. + * + * ```js + * const processor = postcss([autoprefixer, postcssNested]) + * processor.process(css1).then(result => console.log(result.css)) + * processor.process(css2).then(result => console.log(result.css)) + * ``` + */ +declare class Processor_ { + /** + * Plugins added to this processor. + * + * ```js + * const processor = postcss([autoprefixer, postcssNested]) + * processor.plugins.length //=> 2 + * ``` + */ + plugins: (Plugin | TransformCallback | Transformer)[] + + /** + * Current PostCSS version. + * + * ```js + * if (result.processor.version.split('.')[0] !== '6') { + * throw new Error('This plugin works only with PostCSS 6') + * } + * ``` + */ + version: string + + /** + * @param plugins PostCSS plugins + */ + constructor(plugins?: AcceptedPlugin[]) + + /** + * Parses source CSS and returns a `LazyResult` Promise proxy. + * Because some plugins can be asynchronous it doesn’t make + * any transformations. Transformations will be applied + * in the `LazyResult` methods. + * + * ```js + * processor.process(css, { from: 'a.css', to: 'a.out.css' }) + * .then(result => { + * console.log(result.css) + * }) + * ``` + * + * @param css String with input CSS or any object with a `toString()` method, + * like a Buffer. Optionally, send a `Result` instance + * and the processor will take the `Root` from it. + * @param opts Options. + * @return Promise proxy. + */ + process( + css: { toString(): string } | LazyResult | Result | Root | string + ): LazyResult | NoWorkResult + process( + css: { toString(): string } | LazyResult | Result | Root | string, + options: ProcessOptions + ): LazyResult + + /** + * Adds a plugin to be used as a CSS processor. + * + * PostCSS plugin can be in 4 formats: + * * A plugin in `Plugin` format. + * * A plugin creator function with `pluginCreator.postcss = true`. + * PostCSS will call this function without argument to get plugin. + * * A function. PostCSS will pass the function a {@link Root} + * as the first argument and current `Result` instance + * as the second. + * * Another `Processor` instance. PostCSS will copy plugins + * from that instance into this one. + * + * Plugins can also be added by passing them as arguments when creating + * a `postcss` instance (see [`postcss(plugins)`]). + * + * Asynchronous plugins should return a `Promise` instance. + * + * ```js + * const processor = postcss() + * .use(autoprefixer) + * .use(postcssNested) + * ``` + * + * @param plugin PostCSS plugin or `Processor` with plugins. + * @return Current processor to make methods chain. + */ + use(plugin: AcceptedPlugin): this +} + +declare class Processor extends Processor_ {} + +export = Processor diff --git a/node_modules/postcss/lib/processor.js b/node_modules/postcss/lib/processor.js new file mode 100644 index 0000000..d8a16f4 --- /dev/null +++ b/node_modules/postcss/lib/processor.js @@ -0,0 +1,67 @@ +'use strict' + +let NoWorkResult = require('./no-work-result') +let LazyResult = require('./lazy-result') +let Document = require('./document') +let Root = require('./root') + +class Processor { + constructor(plugins = []) { + this.version = '8.4.38' + this.plugins = this.normalize(plugins) + } + + normalize(plugins) { + let normalized = [] + for (let i of plugins) { + if (i.postcss === true) { + i = i() + } else if (i.postcss) { + i = i.postcss + } + + if (typeof i === 'object' && Array.isArray(i.plugins)) { + normalized = normalized.concat(i.plugins) + } else if (typeof i === 'object' && i.postcssPlugin) { + normalized.push(i) + } else if (typeof i === 'function') { + normalized.push(i) + } else if (typeof i === 'object' && (i.parse || i.stringify)) { + if (process.env.NODE_ENV !== 'production') { + throw new Error( + 'PostCSS syntaxes cannot be used as plugins. Instead, please use ' + + 'one of the syntax/parser/stringifier options as outlined ' + + 'in your PostCSS runner documentation.' + ) + } + } else { + throw new Error(i + ' is not a PostCSS plugin') + } + } + return normalized + } + + process(css, opts = {}) { + if ( + !this.plugins.length && + !opts.parser && + !opts.stringifier && + !opts.syntax + ) { + return new NoWorkResult(this, css, opts) + } else { + return new LazyResult(this, css, opts) + } + } + + use(plugin) { + this.plugins = this.plugins.concat(this.normalize([plugin])) + return this + } +} + +module.exports = Processor +Processor.default = Processor + +Root.registerProcessor(Processor) +Document.registerProcessor(Processor) diff --git a/node_modules/postcss/lib/result.d.ts b/node_modules/postcss/lib/result.d.ts new file mode 100644 index 0000000..c3dcbda --- /dev/null +++ b/node_modules/postcss/lib/result.d.ts @@ -0,0 +1,206 @@ +import { + Document, + Node, + Plugin, + ProcessOptions, + Root, + SourceMap, + TransformCallback, + Warning, + WarningOptions +} from './postcss.js' +import Processor from './processor.js' + +declare namespace Result { + export interface Message { + [others: string]: any + + /** + * Source PostCSS plugin name. + */ + plugin?: string + + /** + * Message type. + */ + type: string + } + + export interface ResultOptions extends ProcessOptions { + /** + * The CSS node that was the source of the warning. + */ + node?: Node + + /** + * Name of plugin that created this warning. `Result#warn` will fill it + * automatically with `Plugin#postcssPlugin` value. + */ + plugin?: string + } + + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Result_ as default } +} + +/** + * Provides the result of the PostCSS transformations. + * + * A Result instance is returned by `LazyResult#then` + * or `Root#toResult` methods. + * + * ```js + * postcss([autoprefixer]).process(css).then(result => { + * console.log(result.css) + * }) + * ``` + * + * ```js + * const result2 = postcss.parse(css).toResult() + * ``` + */ +declare class Result_ { + /** + * A CSS string representing of `Result#root`. + * + * ```js + * postcss.parse('a{}').toResult().css //=> "a{}" + * ``` + */ + css: string + + /** + * Last runned PostCSS plugin. + */ + lastPlugin: Plugin | TransformCallback + + /** + * An instance of `SourceMapGenerator` class from the `source-map` library, + * representing changes to the `Result#root` instance. + * + * ```js + * result.map.toJSON() //=> { version: 3, file: 'a.css', … } + * ``` + * + * ```js + * if (result.map) { + * fs.writeFileSync(result.opts.to + '.map', result.map.toString()) + * } + * ``` + */ + map: SourceMap + + /** + * Contains messages from plugins (e.g., warnings or custom messages). + * Each message should have type and plugin properties. + * + * ```js + * AtRule: { + * import: (atRule, { result }) { + * const importedFile = parseImport(atRule) + * result.messages.push({ + * type: 'dependency', + * plugin: 'postcss-import', + * file: importedFile, + * parent: result.opts.from + * }) + * } + * } + * ``` + */ + messages: Result.Message[] + + /** + * Options from the `Processor#process` or `Root#toResult` call + * that produced this Result instance.] + * + * ```js + * root.toResult(opts).opts === opts + * ``` + */ + opts: Result.ResultOptions + + /** + * The Processor instance used for this transformation. + * + * ```js + * for (const plugin of result.processor.plugins) { + * if (plugin.postcssPlugin === 'postcss-bad') { + * throw 'postcss-good is incompatible with postcss-bad' + * } + * }) + * ``` + */ + processor: Processor + + /** + * Root node after all transformations. + * + * ```js + * root.toResult().root === root + * ``` + */ + root: RootNode + + /** + * @param processor Processor used for this transformation. + * @param root Root node after all transformations. + * @param opts Options from the `Processor#process` or `Root#toResult`. + */ + constructor(processor: Processor, root: RootNode, opts: Result.ResultOptions) + + /** + * Returns for `Result#css` content. + * + * ```js + * result + '' === result.css + * ``` + * + * @return String representing of `Result#root`. + */ + toString(): string + + /** + * Creates an instance of `Warning` and adds it to `Result#messages`. + * + * ```js + * if (decl.important) { + * result.warn('Avoid !important', { node: decl, word: '!important' }) + * } + * ``` + * + * @param text Warning message. + * @param opts Warning options. + * @return Created warning. + */ + warn(message: string, options?: WarningOptions): Warning + + /** + * Returns warnings from plugins. Filters `Warning` instances + * from `Result#messages`. + * + * ```js + * result.warnings().forEach(warn => { + * console.warn(warn.toString()) + * }) + * ``` + * + * @return Warnings from plugins. + */ + warnings(): Warning[] + + /** + * An alias for the `Result#css` property. + * Use it with syntaxes that generate non-CSS output. + * + * ```js + * result.css === result.content + * ``` + */ + get content(): string +} + +declare class Result extends Result_ {} + +export = Result diff --git a/node_modules/postcss/lib/result.js b/node_modules/postcss/lib/result.js new file mode 100644 index 0000000..a39751d --- /dev/null +++ b/node_modules/postcss/lib/result.js @@ -0,0 +1,42 @@ +'use strict' + +let Warning = require('./warning') + +class Result { + constructor(processor, root, opts) { + this.processor = processor + this.messages = [] + this.root = root + this.opts = opts + this.css = undefined + this.map = undefined + } + + toString() { + return this.css + } + + warn(text, opts = {}) { + if (!opts.plugin) { + if (this.lastPlugin && this.lastPlugin.postcssPlugin) { + opts.plugin = this.lastPlugin.postcssPlugin + } + } + + let warning = new Warning(text, opts) + this.messages.push(warning) + + return warning + } + + warnings() { + return this.messages.filter(i => i.type === 'warning') + } + + get content() { + return this.css + } +} + +module.exports = Result +Result.default = Result diff --git a/node_modules/postcss/lib/root.d.ts b/node_modules/postcss/lib/root.d.ts new file mode 100644 index 0000000..9046aac --- /dev/null +++ b/node_modules/postcss/lib/root.d.ts @@ -0,0 +1,87 @@ +import Container, { ContainerProps } from './container.js' +import Document from './document.js' +import { ProcessOptions } from './postcss.js' +import Result from './result.js' + +declare namespace Root { + export interface RootRaws extends Record { + /** + * The space symbols after the last child to the end of file. + */ + after?: string + + /** + * Non-CSS code after `Root`, when `Root` is inside `Document`. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + */ + codeAfter?: string + + /** + * Non-CSS code before `Root`, when `Root` is inside `Document`. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + */ + codeBefore?: string + + /** + * Is the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export interface RootProps extends ContainerProps { + /** + * Information used to generate byte-to-byte equal node string + * as it was in the origin input. + * */ + raws?: RootRaws + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Root_ as default } +} + +/** + * Represents a CSS file and contains all its parsed nodes. + * + * ```js + * const root = postcss.parse('a{color:black} b{z-index:2}') + * root.type //=> 'root' + * root.nodes.length //=> 2 + * ``` + */ +declare class Root_ extends Container { + nodes: NonNullable + parent: Document | undefined + raws: Root.RootRaws + type: 'root' + + constructor(defaults?: Root.RootProps) + + assign(overrides: object | Root.RootProps): this + clone(overrides?: Partial): Root + cloneAfter(overrides?: Partial): Root + cloneBefore(overrides?: Partial): Root + + /** + * Returns a `Result` instance representing the root’s CSS. + * + * ```js + * const root1 = postcss.parse(css1, { from: 'a.css' }) + * const root2 = postcss.parse(css2, { from: 'b.css' }) + * root1.append(root2) + * const result = root1.toResult({ to: 'all.css', map: true }) + * ``` + * + * @param opts Options. + * @return Result with current root’s CSS. + */ + toResult(options?: ProcessOptions): Result +} + +declare class Root extends Root_ {} + +export = Root diff --git a/node_modules/postcss/lib/root.js b/node_modules/postcss/lib/root.js new file mode 100644 index 0000000..ea574ed --- /dev/null +++ b/node_modules/postcss/lib/root.js @@ -0,0 +1,61 @@ +'use strict' + +let Container = require('./container') + +let LazyResult, Processor + +class Root extends Container { + constructor(defaults) { + super(defaults) + this.type = 'root' + if (!this.nodes) this.nodes = [] + } + + normalize(child, sample, type) { + let nodes = super.normalize(child) + + if (sample) { + if (type === 'prepend') { + if (this.nodes.length > 1) { + sample.raws.before = this.nodes[1].raws.before + } else { + delete sample.raws.before + } + } else if (this.first !== sample) { + for (let node of nodes) { + node.raws.before = sample.raws.before + } + } + } + + return nodes + } + + removeChild(child, ignore) { + let index = this.index(child) + + if (!ignore && index === 0 && this.nodes.length > 1) { + this.nodes[1].raws.before = this.nodes[index].raws.before + } + + return super.removeChild(child) + } + + toResult(opts = {}) { + let lazy = new LazyResult(new Processor(), this, opts) + return lazy.stringify() + } +} + +Root.registerLazyResult = dependant => { + LazyResult = dependant +} + +Root.registerProcessor = dependant => { + Processor = dependant +} + +module.exports = Root +Root.default = Root + +Container.registerRoot(Root) diff --git a/node_modules/postcss/lib/rule.d.ts b/node_modules/postcss/lib/rule.d.ts new file mode 100644 index 0000000..fc5dd72 --- /dev/null +++ b/node_modules/postcss/lib/rule.d.ts @@ -0,0 +1,117 @@ +import Container, { + ContainerProps, + ContainerWithChildren +} from './container.js' + +declare namespace Rule { + export interface RuleRaws extends Record { + /** + * The space symbols after the last child of the node to the end of the node. + */ + after?: string + + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the selector and `{` for rules. + */ + between?: string + + /** + * Contains `true` if there is semicolon after rule. + */ + ownSemicolon?: string + + /** + * The rule’s selector with comments. + */ + selector?: { + raw: string + value: string + } + + /** + * Contains `true` if the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export interface RuleProps extends ContainerProps { + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: RuleRaws + /** Selector or selectors of the rule. */ + selector?: string + /** Selectors of the rule represented as an array of strings. */ + selectors?: string[] + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Rule_ as default } +} + +/** + * Represents a CSS rule: a selector followed by a declaration block. + * + * ```js + * Once (root, { Rule }) { + * let a = new Rule({ selector: 'a' }) + * a.append(…) + * root.append(a) + * } + * ``` + * + * ```js + * const root = postcss.parse('a{}') + * const rule = root.first + * rule.type //=> 'rule' + * rule.toString() //=> 'a{}' + * ``` + */ +declare class Rule_ extends Container { + nodes: NonNullable + parent: ContainerWithChildren | undefined + raws: Rule.RuleRaws + /** + * The rule’s full selector represented as a string. + * + * ```js + * const root = postcss.parse('a, b { }') + * const rule = root.first + * rule.selector //=> 'a, b' + * ``` + */ + selector: string + + /** + * An array containing the rule’s individual selectors. + * Groups of selectors are split at commas. + * + * ```js + * const root = postcss.parse('a, b { }') + * const rule = root.first + * + * rule.selector //=> 'a, b' + * rule.selectors //=> ['a', 'b'] + * + * rule.selectors = ['a', 'strong'] + * rule.selector //=> 'a, strong' + * ``` + */ + selectors: string[] + + type: 'rule' + + constructor(defaults?: Rule.RuleProps) + assign(overrides: object | Rule.RuleProps): this + clone(overrides?: Partial): Rule + cloneAfter(overrides?: Partial): Rule + cloneBefore(overrides?: Partial): Rule +} + +declare class Rule extends Rule_ {} + +export = Rule diff --git a/node_modules/postcss/lib/rule.js b/node_modules/postcss/lib/rule.js new file mode 100644 index 0000000..a93ab25 --- /dev/null +++ b/node_modules/postcss/lib/rule.js @@ -0,0 +1,27 @@ +'use strict' + +let Container = require('./container') +let list = require('./list') + +class Rule extends Container { + constructor(defaults) { + super(defaults) + this.type = 'rule' + if (!this.nodes) this.nodes = [] + } + + get selectors() { + return list.comma(this.selector) + } + + set selectors(values) { + let match = this.selector ? this.selector.match(/,\s*/) : null + let sep = match ? match[0] : ',' + this.raw('between', 'beforeOpen') + this.selector = values.join(sep) + } +} + +module.exports = Rule +Rule.default = Rule + +Container.registerRule(Rule) diff --git a/node_modules/postcss/lib/stringifier.d.ts b/node_modules/postcss/lib/stringifier.d.ts new file mode 100644 index 0000000..f707a6a --- /dev/null +++ b/node_modules/postcss/lib/stringifier.d.ts @@ -0,0 +1,46 @@ +import { + AnyNode, + AtRule, + Builder, + Comment, + Container, + Declaration, + Document, + Root, + Rule +} from './postcss.js' + +declare namespace Stringifier { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Stringifier_ as default } +} + +declare class Stringifier_ { + builder: Builder + constructor(builder: Builder) + atrule(node: AtRule, semicolon?: boolean): void + beforeAfter(node: AnyNode, detect: 'after' | 'before'): string + block(node: AnyNode, start: string): void + body(node: Container): void + comment(node: Comment): void + decl(node: Declaration, semicolon?: boolean): void + document(node: Document): void + raw(node: AnyNode, own: null | string, detect?: string): string + rawBeforeClose(root: Root): string | undefined + rawBeforeComment(root: Root, node: Comment): string | undefined + rawBeforeDecl(root: Root, node: Declaration): string | undefined + rawBeforeOpen(root: Root): string | undefined + rawBeforeRule(root: Root): string | undefined + rawColon(root: Root): string | undefined + rawEmptyBody(root: Root): string | undefined + rawIndent(root: Root): string | undefined + rawSemicolon(root: Root): boolean | undefined + rawValue(node: AnyNode, prop: string): string + root(node: Root): void + rule(node: Rule): void + stringify(node: AnyNode, semicolon?: boolean): void +} + +declare class Stringifier extends Stringifier_ {} + +export = Stringifier diff --git a/node_modules/postcss/lib/stringifier.js b/node_modules/postcss/lib/stringifier.js new file mode 100644 index 0000000..e07ad12 --- /dev/null +++ b/node_modules/postcss/lib/stringifier.js @@ -0,0 +1,353 @@ +'use strict' + +const DEFAULT_RAW = { + after: '\n', + beforeClose: '\n', + beforeComment: '\n', + beforeDecl: '\n', + beforeOpen: ' ', + beforeRule: '\n', + colon: ': ', + commentLeft: ' ', + commentRight: ' ', + emptyBody: '', + indent: ' ', + semicolon: false +} + +function capitalize(str) { + return str[0].toUpperCase() + str.slice(1) +} + +class Stringifier { + constructor(builder) { + this.builder = builder + } + + atrule(node, semicolon) { + let name = '@' + node.name + let params = node.params ? this.rawValue(node, 'params') : '' + + if (typeof node.raws.afterName !== 'undefined') { + name += node.raws.afterName + } else if (params) { + name += ' ' + } + + if (node.nodes) { + this.block(node, name + params) + } else { + let end = (node.raws.between || '') + (semicolon ? ';' : '') + this.builder(name + params + end, node) + } + } + + beforeAfter(node, detect) { + let value + if (node.type === 'decl') { + value = this.raw(node, null, 'beforeDecl') + } else if (node.type === 'comment') { + value = this.raw(node, null, 'beforeComment') + } else if (detect === 'before') { + value = this.raw(node, null, 'beforeRule') + } else { + value = this.raw(node, null, 'beforeClose') + } + + let buf = node.parent + let depth = 0 + while (buf && buf.type !== 'root') { + depth += 1 + buf = buf.parent + } + + if (value.includes('\n')) { + let indent = this.raw(node, null, 'indent') + if (indent.length) { + for (let step = 0; step < depth; step++) value += indent + } + } + + return value + } + + block(node, start) { + let between = this.raw(node, 'between', 'beforeOpen') + this.builder(start + between + '{', node, 'start') + + let after + if (node.nodes && node.nodes.length) { + this.body(node) + after = this.raw(node, 'after') + } else { + after = this.raw(node, 'after', 'emptyBody') + } + + if (after) this.builder(after) + this.builder('}', node, 'end') + } + + body(node) { + let last = node.nodes.length - 1 + while (last > 0) { + if (node.nodes[last].type !== 'comment') break + last -= 1 + } + + let semicolon = this.raw(node, 'semicolon') + for (let i = 0; i < node.nodes.length; i++) { + let child = node.nodes[i] + let before = this.raw(child, 'before') + if (before) this.builder(before) + this.stringify(child, last !== i || semicolon) + } + } + + comment(node) { + let left = this.raw(node, 'left', 'commentLeft') + let right = this.raw(node, 'right', 'commentRight') + this.builder('/*' + left + node.text + right + '*/', node) + } + + decl(node, semicolon) { + let between = this.raw(node, 'between', 'colon') + let string = node.prop + between + this.rawValue(node, 'value') + + if (node.important) { + string += node.raws.important || ' !important' + } + + if (semicolon) string += ';' + this.builder(string, node) + } + + document(node) { + this.body(node) + } + + raw(node, own, detect) { + let value + if (!detect) detect = own + + // Already had + if (own) { + value = node.raws[own] + if (typeof value !== 'undefined') return value + } + + let parent = node.parent + + if (detect === 'before') { + // Hack for first rule in CSS + if (!parent || (parent.type === 'root' && parent.first === node)) { + return '' + } + + // `root` nodes in `document` should use only their own raws + if (parent && parent.type === 'document') { + return '' + } + } + + // Floating child without parent + if (!parent) return DEFAULT_RAW[detect] + + // Detect style by other nodes + let root = node.root() + if (!root.rawCache) root.rawCache = {} + if (typeof root.rawCache[detect] !== 'undefined') { + return root.rawCache[detect] + } + + if (detect === 'before' || detect === 'after') { + return this.beforeAfter(node, detect) + } else { + let method = 'raw' + capitalize(detect) + if (this[method]) { + value = this[method](root, node) + } else { + root.walk(i => { + value = i.raws[own] + if (typeof value !== 'undefined') return false + }) + } + } + + if (typeof value === 'undefined') value = DEFAULT_RAW[detect] + + root.rawCache[detect] = value + return value + } + + rawBeforeClose(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length > 0) { + if (typeof i.raws.after !== 'undefined') { + value = i.raws.after + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + } + }) + if (value) value = value.replace(/\S/g, '') + return value + } + + rawBeforeComment(root, node) { + let value + root.walkComments(i => { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + }) + if (typeof value === 'undefined') { + value = this.raw(node, null, 'beforeDecl') + } else if (value) { + value = value.replace(/\S/g, '') + } + return value + } + + rawBeforeDecl(root, node) { + let value + root.walkDecls(i => { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + }) + if (typeof value === 'undefined') { + value = this.raw(node, null, 'beforeRule') + } else if (value) { + value = value.replace(/\S/g, '') + } + return value + } + + rawBeforeOpen(root) { + let value + root.walk(i => { + if (i.type !== 'decl') { + value = i.raws.between + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawBeforeRule(root) { + let value + root.walk(i => { + if (i.nodes && (i.parent !== root || root.first !== i)) { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + } + }) + if (value) value = value.replace(/\S/g, '') + return value + } + + rawColon(root) { + let value + root.walkDecls(i => { + if (typeof i.raws.between !== 'undefined') { + value = i.raws.between.replace(/[^\s:]/g, '') + return false + } + }) + return value + } + + rawEmptyBody(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length === 0) { + value = i.raws.after + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawIndent(root) { + if (root.raws.indent) return root.raws.indent + let value + root.walk(i => { + let p = i.parent + if (p && p !== root && p.parent && p.parent === root) { + if (typeof i.raws.before !== 'undefined') { + let parts = i.raws.before.split('\n') + value = parts[parts.length - 1] + value = value.replace(/\S/g, '') + return false + } + } + }) + return value + } + + rawSemicolon(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length && i.last.type === 'decl') { + value = i.raws.semicolon + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawValue(node, prop) { + let value = node[prop] + let raw = node.raws[prop] + if (raw && raw.value === value) { + return raw.raw + } + + return value + } + + root(node) { + this.body(node) + if (node.raws.after) this.builder(node.raws.after) + } + + rule(node) { + this.block(node, this.rawValue(node, 'selector')) + if (node.raws.ownSemicolon) { + this.builder(node.raws.ownSemicolon, node, 'end') + } + } + + stringify(node, semicolon) { + /* c8 ignore start */ + if (!this[node.type]) { + throw new Error( + 'Unknown AST node type ' + + node.type + + '. ' + + 'Maybe you need to change PostCSS stringifier.' + ) + } + /* c8 ignore stop */ + this[node.type](node, semicolon) + } +} + +module.exports = Stringifier +Stringifier.default = Stringifier diff --git a/node_modules/postcss/lib/stringify.d.ts b/node_modules/postcss/lib/stringify.d.ts new file mode 100644 index 0000000..06ad0b4 --- /dev/null +++ b/node_modules/postcss/lib/stringify.d.ts @@ -0,0 +1,9 @@ +import { Stringifier } from './postcss.js' + +interface Stringify extends Stringifier { + default: Stringify +} + +declare const stringify: Stringify + +export = stringify diff --git a/node_modules/postcss/lib/stringify.js b/node_modules/postcss/lib/stringify.js new file mode 100644 index 0000000..77bd017 --- /dev/null +++ b/node_modules/postcss/lib/stringify.js @@ -0,0 +1,11 @@ +'use strict' + +let Stringifier = require('./stringifier') + +function stringify(node, builder) { + let str = new Stringifier(builder) + str.stringify(node) +} + +module.exports = stringify +stringify.default = stringify diff --git a/node_modules/postcss/lib/symbols.js b/node_modules/postcss/lib/symbols.js new file mode 100644 index 0000000..a142c26 --- /dev/null +++ b/node_modules/postcss/lib/symbols.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports.isClean = Symbol('isClean') + +module.exports.my = Symbol('my') diff --git a/node_modules/postcss/lib/terminal-highlight.js b/node_modules/postcss/lib/terminal-highlight.js new file mode 100644 index 0000000..6196c9d --- /dev/null +++ b/node_modules/postcss/lib/terminal-highlight.js @@ -0,0 +1,70 @@ +'use strict' + +let pico = require('picocolors') + +let tokenizer = require('./tokenize') + +let Input + +function registerInput(dependant) { + Input = dependant +} + +const HIGHLIGHT_THEME = { + ';': pico.yellow, + ':': pico.yellow, + '(': pico.cyan, + ')': pico.cyan, + '[': pico.yellow, + ']': pico.yellow, + '{': pico.yellow, + '}': pico.yellow, + 'at-word': pico.cyan, + 'brackets': pico.cyan, + 'call': pico.cyan, + 'class': pico.yellow, + 'comment': pico.gray, + 'hash': pico.magenta, + 'string': pico.green +} + +function getTokenType([type, value], processor) { + if (type === 'word') { + if (value[0] === '.') { + return 'class' + } + if (value[0] === '#') { + return 'hash' + } + } + + if (!processor.endOfFile()) { + let next = processor.nextToken() + processor.back(next) + if (next[0] === 'brackets' || next[0] === '(') return 'call' + } + + return type +} + +function terminalHighlight(css) { + let processor = tokenizer(new Input(css), { ignoreErrors: true }) + let result = '' + while (!processor.endOfFile()) { + let token = processor.nextToken() + let color = HIGHLIGHT_THEME[getTokenType(token, processor)] + if (color) { + result += token[1] + .split(/\r?\n/) + .map(i => color(i)) + .join('\n') + } else { + result += token[1] + } + } + return result +} + +terminalHighlight.registerInput = registerInput + +module.exports = terminalHighlight diff --git a/node_modules/postcss/lib/tokenize.js b/node_modules/postcss/lib/tokenize.js new file mode 100644 index 0000000..39a20a3 --- /dev/null +++ b/node_modules/postcss/lib/tokenize.js @@ -0,0 +1,266 @@ +'use strict' + +const SINGLE_QUOTE = "'".charCodeAt(0) +const DOUBLE_QUOTE = '"'.charCodeAt(0) +const BACKSLASH = '\\'.charCodeAt(0) +const SLASH = '/'.charCodeAt(0) +const NEWLINE = '\n'.charCodeAt(0) +const SPACE = ' '.charCodeAt(0) +const FEED = '\f'.charCodeAt(0) +const TAB = '\t'.charCodeAt(0) +const CR = '\r'.charCodeAt(0) +const OPEN_SQUARE = '['.charCodeAt(0) +const CLOSE_SQUARE = ']'.charCodeAt(0) +const OPEN_PARENTHESES = '('.charCodeAt(0) +const CLOSE_PARENTHESES = ')'.charCodeAt(0) +const OPEN_CURLY = '{'.charCodeAt(0) +const CLOSE_CURLY = '}'.charCodeAt(0) +const SEMICOLON = ';'.charCodeAt(0) +const ASTERISK = '*'.charCodeAt(0) +const COLON = ':'.charCodeAt(0) +const AT = '@'.charCodeAt(0) + +const RE_AT_END = /[\t\n\f\r "#'()/;[\\\]{}]/g +const RE_WORD_END = /[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g +const RE_BAD_BRACKET = /.[\r\n"'(/\\]/ +const RE_HEX_ESCAPE = /[\da-f]/i + +module.exports = function tokenizer(input, options = {}) { + let css = input.css.valueOf() + let ignore = options.ignoreErrors + + let code, next, quote, content, escape + let escaped, escapePos, prev, n, currentToken + + let length = css.length + let pos = 0 + let buffer = [] + let returned = [] + + function position() { + return pos + } + + function unclosed(what) { + throw input.error('Unclosed ' + what, pos) + } + + function endOfFile() { + return returned.length === 0 && pos >= length + } + + function nextToken(opts) { + if (returned.length) return returned.pop() + if (pos >= length) return + + let ignoreUnclosed = opts ? opts.ignoreUnclosed : false + + code = css.charCodeAt(pos) + + switch (code) { + case NEWLINE: + case SPACE: + case TAB: + case CR: + case FEED: { + next = pos + do { + next += 1 + code = css.charCodeAt(next) + } while ( + code === SPACE || + code === NEWLINE || + code === TAB || + code === CR || + code === FEED + ) + + currentToken = ['space', css.slice(pos, next)] + pos = next - 1 + break + } + + case OPEN_SQUARE: + case CLOSE_SQUARE: + case OPEN_CURLY: + case CLOSE_CURLY: + case COLON: + case SEMICOLON: + case CLOSE_PARENTHESES: { + let controlChar = String.fromCharCode(code) + currentToken = [controlChar, controlChar, pos] + break + } + + case OPEN_PARENTHESES: { + prev = buffer.length ? buffer.pop()[1] : '' + n = css.charCodeAt(pos + 1) + if ( + prev === 'url' && + n !== SINGLE_QUOTE && + n !== DOUBLE_QUOTE && + n !== SPACE && + n !== NEWLINE && + n !== TAB && + n !== FEED && + n !== CR + ) { + next = pos + do { + escaped = false + next = css.indexOf(')', next + 1) + if (next === -1) { + if (ignore || ignoreUnclosed) { + next = pos + break + } else { + unclosed('bracket') + } + } + escapePos = next + while (css.charCodeAt(escapePos - 1) === BACKSLASH) { + escapePos -= 1 + escaped = !escaped + } + } while (escaped) + + currentToken = ['brackets', css.slice(pos, next + 1), pos, next] + + pos = next + } else { + next = css.indexOf(')', pos + 1) + content = css.slice(pos, next + 1) + + if (next === -1 || RE_BAD_BRACKET.test(content)) { + currentToken = ['(', '(', pos] + } else { + currentToken = ['brackets', content, pos, next] + pos = next + } + } + + break + } + + case SINGLE_QUOTE: + case DOUBLE_QUOTE: { + quote = code === SINGLE_QUOTE ? "'" : '"' + next = pos + do { + escaped = false + next = css.indexOf(quote, next + 1) + if (next === -1) { + if (ignore || ignoreUnclosed) { + next = pos + 1 + break + } else { + unclosed('string') + } + } + escapePos = next + while (css.charCodeAt(escapePos - 1) === BACKSLASH) { + escapePos -= 1 + escaped = !escaped + } + } while (escaped) + + currentToken = ['string', css.slice(pos, next + 1), pos, next] + pos = next + break + } + + case AT: { + RE_AT_END.lastIndex = pos + 1 + RE_AT_END.test(css) + if (RE_AT_END.lastIndex === 0) { + next = css.length - 1 + } else { + next = RE_AT_END.lastIndex - 2 + } + + currentToken = ['at-word', css.slice(pos, next + 1), pos, next] + + pos = next + break + } + + case BACKSLASH: { + next = pos + escape = true + while (css.charCodeAt(next + 1) === BACKSLASH) { + next += 1 + escape = !escape + } + code = css.charCodeAt(next + 1) + if ( + escape && + code !== SLASH && + code !== SPACE && + code !== NEWLINE && + code !== TAB && + code !== CR && + code !== FEED + ) { + next += 1 + if (RE_HEX_ESCAPE.test(css.charAt(next))) { + while (RE_HEX_ESCAPE.test(css.charAt(next + 1))) { + next += 1 + } + if (css.charCodeAt(next + 1) === SPACE) { + next += 1 + } + } + } + + currentToken = ['word', css.slice(pos, next + 1), pos, next] + + pos = next + break + } + + default: { + if (code === SLASH && css.charCodeAt(pos + 1) === ASTERISK) { + next = css.indexOf('*/', pos + 2) + 1 + if (next === 0) { + if (ignore || ignoreUnclosed) { + next = css.length + } else { + unclosed('comment') + } + } + + currentToken = ['comment', css.slice(pos, next + 1), pos, next] + pos = next + } else { + RE_WORD_END.lastIndex = pos + 1 + RE_WORD_END.test(css) + if (RE_WORD_END.lastIndex === 0) { + next = css.length - 1 + } else { + next = RE_WORD_END.lastIndex - 2 + } + + currentToken = ['word', css.slice(pos, next + 1), pos, next] + buffer.push(currentToken) + pos = next + } + + break + } + } + + pos++ + return currentToken + } + + function back(token) { + returned.push(token) + } + + return { + back, + endOfFile, + nextToken, + position + } +} diff --git a/node_modules/postcss/lib/warn-once.js b/node_modules/postcss/lib/warn-once.js new file mode 100644 index 0000000..316e1cf --- /dev/null +++ b/node_modules/postcss/lib/warn-once.js @@ -0,0 +1,13 @@ +/* eslint-disable no-console */ +'use strict' + +let printed = {} + +module.exports = function warnOnce(message) { + if (printed[message]) return + printed[message] = true + + if (typeof console !== 'undefined' && console.warn) { + console.warn(message) + } +} diff --git a/node_modules/postcss/lib/warning.d.ts b/node_modules/postcss/lib/warning.d.ts new file mode 100644 index 0000000..b25bba8 --- /dev/null +++ b/node_modules/postcss/lib/warning.d.ts @@ -0,0 +1,147 @@ +import { RangePosition } from './css-syntax-error.js' +import Node from './node.js' + +declare namespace Warning { + export interface WarningOptions { + /** + * End position, exclusive, in CSS node string that caused the warning. + */ + end?: RangePosition + + /** + * End index, exclusive, in CSS node string that caused the warning. + */ + endIndex?: number + + /** + * Start index, inclusive, in CSS node string that caused the warning. + */ + index?: number + + /** + * CSS node that caused the warning. + */ + node?: Node + + /** + * Name of the plugin that created this warning. `Result#warn` fills + * this property automatically. + */ + plugin?: string + + /** + * Start position, inclusive, in CSS node string that caused the warning. + */ + start?: RangePosition + + /** + * Word in CSS source that caused the warning. + */ + word?: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Warning_ as default } +} + +/** + * Represents a plugin’s warning. It can be created using `Node#warn`. + * + * ```js + * if (decl.important) { + * decl.warn(result, 'Avoid !important', { word: '!important' }) + * } + * ``` + */ +declare class Warning_ { + /** + * Column for inclusive start position in the input file with this warning’s source. + * + * ```js + * warning.column //=> 6 + * ``` + */ + column: number + + /** + * Column for exclusive end position in the input file with this warning’s source. + * + * ```js + * warning.endColumn //=> 4 + * ``` + */ + endColumn?: number + + /** + * Line for exclusive end position in the input file with this warning’s source. + * + * ```js + * warning.endLine //=> 6 + * ``` + */ + endLine?: number + + /** + * Line for inclusive start position in the input file with this warning’s source. + * + * ```js + * warning.line //=> 5 + * ``` + */ + line: number + + /** + * Contains the CSS node that caused the warning. + * + * ```js + * warning.node.toString() //=> 'color: white !important' + * ``` + */ + node: Node + + /** + * The name of the plugin that created this warning. + * When you call `Node#warn` it will fill this property automatically. + * + * ```js + * warning.plugin //=> 'postcss-important' + * ``` + */ + plugin: string + + /** + * The warning message. + * + * ```js + * warning.text //=> 'Try to avoid !important' + * ``` + */ + text: string + + /** + * Type to filter warnings from `Result#messages`. + * Always equal to `"warning"`. + */ + type: 'warning' + + /** + * @param text Warning message. + * @param opts Warning options. + */ + constructor(text: string, opts?: Warning.WarningOptions) + + /** + * Returns a warning position and message. + * + * ```js + * warning.toString() //=> 'postcss-lint:a.css:10:14: Avoid !important' + * ``` + * + * @return Warning position and message. + */ + toString(): string +} + +declare class Warning extends Warning_ {} + +export = Warning diff --git a/node_modules/postcss/lib/warning.js b/node_modules/postcss/lib/warning.js new file mode 100644 index 0000000..3a3d79c --- /dev/null +++ b/node_modules/postcss/lib/warning.js @@ -0,0 +1,37 @@ +'use strict' + +class Warning { + constructor(text, opts = {}) { + this.type = 'warning' + this.text = text + + if (opts.node && opts.node.source) { + let range = opts.node.rangeBy(opts) + this.line = range.start.line + this.column = range.start.column + this.endLine = range.end.line + this.endColumn = range.end.column + } + + for (let opt in opts) this[opt] = opts[opt] + } + + toString() { + if (this.node) { + return this.node.error(this.text, { + index: this.index, + plugin: this.plugin, + word: this.word + }).message + } + + if (this.plugin) { + return this.plugin + ': ' + this.text + } + + return this.text + } +} + +module.exports = Warning +Warning.default = Warning diff --git a/node_modules/postcss/package.json b/node_modules/postcss/package.json new file mode 100755 index 0000000..2cfe97f --- /dev/null +++ b/node_modules/postcss/package.json @@ -0,0 +1,88 @@ +{ + "name": "postcss", + "version": "8.4.38", + "description": "Tool for transforming styles with JS plugins", + "engines": { + "node": "^10 || ^12 || >=14" + }, + "exports": { + ".": { + "require": "./lib/postcss.js", + "import": "./lib/postcss.mjs" + }, + "./lib/at-rule": "./lib/at-rule.js", + "./lib/comment": "./lib/comment.js", + "./lib/container": "./lib/container.js", + "./lib/css-syntax-error": "./lib/css-syntax-error.js", + "./lib/declaration": "./lib/declaration.js", + "./lib/fromJSON": "./lib/fromJSON.js", + "./lib/input": "./lib/input.js", + "./lib/lazy-result": "./lib/lazy-result.js", + "./lib/no-work-result": "./lib/no-work-result.js", + "./lib/list": "./lib/list.js", + "./lib/map-generator": "./lib/map-generator.js", + "./lib/node": "./lib/node.js", + "./lib/parse": "./lib/parse.js", + "./lib/parser": "./lib/parser.js", + "./lib/postcss": "./lib/postcss.js", + "./lib/previous-map": "./lib/previous-map.js", + "./lib/processor": "./lib/processor.js", + "./lib/result": "./lib/result.js", + "./lib/root": "./lib/root.js", + "./lib/rule": "./lib/rule.js", + "./lib/stringifier": "./lib/stringifier.js", + "./lib/stringify": "./lib/stringify.js", + "./lib/symbols": "./lib/symbols.js", + "./lib/terminal-highlight": "./lib/terminal-highlight.js", + "./lib/tokenize": "./lib/tokenize.js", + "./lib/warn-once": "./lib/warn-once.js", + "./lib/warning": "./lib/warning.js", + "./package.json": "./package.json" + }, + "main": "./lib/postcss.js", + "types": "./lib/postcss.d.ts", + "keywords": [ + "css", + "postcss", + "rework", + "preprocessor", + "parser", + "source map", + "transform", + "manipulation", + "transpiler" + ], + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "author": "Andrey Sitnik ", + "license": "MIT", + "homepage": "https://postcss.org/", + "repository": "postcss/postcss", + "bugs": { + "url": "https://github.com/postcss/postcss/issues" + }, + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "browser": { + "./lib/terminal-highlight": false, + "source-map-js": false, + "path": false, + "url": false, + "fs": false + } +} diff --git a/node_modules/pretty-hrtime/.jshintignore b/node_modules/pretty-hrtime/.jshintignore new file mode 100644 index 0000000..cb28eb3 --- /dev/null +++ b/node_modules/pretty-hrtime/.jshintignore @@ -0,0 +1 @@ +node_modules/** diff --git a/node_modules/pretty-hrtime/.npmignore b/node_modules/pretty-hrtime/.npmignore new file mode 100644 index 0000000..094a5f3 --- /dev/null +++ b/node_modules/pretty-hrtime/.npmignore @@ -0,0 +1,10 @@ +.DS_Store +*.log +node_modules +build +*.node +components +*.orig +.idea +test +.travis.yml diff --git a/node_modules/pretty-hrtime/LICENSE b/node_modules/pretty-hrtime/LICENSE new file mode 100644 index 0000000..b7346ab --- /dev/null +++ b/node_modules/pretty-hrtime/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 [Richardson & Sons, LLC](http://richardsonandsons.com/) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pretty-hrtime/README.md b/node_modules/pretty-hrtime/README.md new file mode 100644 index 0000000..f4be28d --- /dev/null +++ b/node_modules/pretty-hrtime/README.md @@ -0,0 +1,57 @@ +[![Build Status](https://secure.travis-ci.org/robrich/pretty-hrtime.png?branch=master)](https://travis-ci.org/robrich/pretty-hrtime) +[![Dependency Status](https://david-dm.org/robrich/pretty-hrtime.png)](https://david-dm.org/robrich/pretty-hrtime) + +pretty-hrtime +============ + +[process.hrtime()](http://nodejs.org/api/process.html#process_process_hrtime) to words + +Usage +----- + +```javascript +var prettyHrtime = require('pretty-hrtime'); + +var start = process.hrtime(); +// do stuff +var end = process.hrtime(start); + +var words = prettyHrtime(end); +console.log(words); // '1.2 ms' + +words = prettyHrtime(end, {verbose:true}); +console.log(words); // '1 millisecond 209 microseconds' + +words = prettyHrtime(end, {precise:true}); +console.log(words); // '1.20958 ms' +``` + +Note: process.hrtime() has been available since 0.7.6. +See [http://nodejs.org/changelog.html](http://nodejs.org/changelog.html) +and [https://github.com/joyent/node/commit/f06abd](https://github.com/joyent/node/commit/f06abd). + +LICENSE +------- + +(MIT License) + +Copyright (c) 2013 [Richardson & Sons, LLC](http://richardsonandsons.com/) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pretty-hrtime/index.js b/node_modules/pretty-hrtime/index.js new file mode 100644 index 0000000..bed3f89 --- /dev/null +++ b/node_modules/pretty-hrtime/index.js @@ -0,0 +1,80 @@ +/*jshint node:true */ + +"use strict"; + +var minimalDesc = ['h', 'min', 's', 'ms', 'μs', 'ns']; +var verboseDesc = ['hour', 'minute', 'second', 'millisecond', 'microsecond', 'nanosecond']; +var convert = [60*60, 60, 1, 1e6, 1e3, 1]; + +module.exports = function (source, opts) { + var verbose, precise, i, spot, sourceAtStep, valAtStep, decimals, strAtStep, results, totalSeconds; + + verbose = false; + precise = false; + if (opts) { + verbose = opts.verbose || false; + precise = opts.precise || false; + } + + if (!Array.isArray(source) || source.length !== 2) { + return ''; + } + if (typeof source[0] !== 'number' || typeof source[1] !== 'number') { + return ''; + } + + // normalize source array due to changes in node v5.4+ + if (source[1] < 0) { + totalSeconds = source[0] + source[1] / 1e9; + source[0] = parseInt(totalSeconds); + source[1] = parseFloat((totalSeconds % 1).toPrecision(9)) * 1e9; + } + + results = ''; + + // foreach unit + for (i = 0; i < 6; i++) { + spot = i < 3 ? 0 : 1; // grabbing first or second spot in source array + sourceAtStep = source[spot]; + if (i !== 3 && i !== 0) { + sourceAtStep = sourceAtStep % convert[i-1]; // trim off previous portions + } + if (i === 2) { + sourceAtStep += source[1]/1e9; // get partial seconds from other portion of the array + } + valAtStep = sourceAtStep / convert[i]; // val at this unit + if (valAtStep >= 1) { + if (verbose) { + valAtStep = Math.floor(valAtStep); // deal in whole units, subsequent laps will get the decimal portion + } + if (!precise) { + // don't fling too many decimals + decimals = valAtStep >= 10 ? 0 : 2; + strAtStep = valAtStep.toFixed(decimals); + } else { + strAtStep = valAtStep.toString(); + } + if (strAtStep.indexOf('.') > -1 && strAtStep[strAtStep.length-1] === '0') { + strAtStep = strAtStep.replace(/\.?0+$/,''); // remove trailing zeros + } + if (results) { + results += ' '; // append space if we have a previous value + } + results += strAtStep; // append the value + // append units + if (verbose) { + results += ' '+verboseDesc[i]; + if (strAtStep !== '1') { + results += 's'; + } + } else { + results += ' '+minimalDesc[i]; + } + if (!verbose) { + break; // verbose gets as many groups as necessary, the rest get only one + } + } + } + + return results; +}; diff --git a/node_modules/pretty-hrtime/package.json b/node_modules/pretty-hrtime/package.json new file mode 100644 index 0000000..e4a7985 --- /dev/null +++ b/node_modules/pretty-hrtime/package.json @@ -0,0 +1,25 @@ +{ + "name": "pretty-hrtime", + "description": "process.hrtime() to words", + "version": "1.0.3", + "homepage": "https://github.com/robrich/pretty-hrtime", + "repository": "git://github.com/robrich/pretty-hrtime.git", + "author": "Rob Richardson (http://robrich.org/)", + "main": "./index.js", + "keywords": [ + "hrtime", + "benchmark" + ], + "devDependencies": { + "jshint": "^2.9.4", + "mocha": "^3.1.2", + "should": "^11.1.1" + }, + "scripts": { + "test": "mocha && jshint ." + }, + "engines": { + "node": ">= 0.8" + }, + "license": "MIT" +} diff --git a/node_modules/process-nextick-args/index.js b/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..3eecf11 --- /dev/null +++ b/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/node_modules/process-nextick-args/license.md b/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/process-nextick-args/package.json b/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..6070b72 --- /dev/null +++ b/node_modules/process-nextick-args/package.json @@ -0,0 +1,25 @@ +{ + "name": "process-nextick-args", + "version": "2.0.1", + "description": "process.nextTick but always with args", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "devDependencies": { + "tap": "~0.2.6" + } +} diff --git a/node_modules/process-nextick-args/readme.md b/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..ecb432c --- /dev/null +++ b/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/queue-microtask/LICENSE b/node_modules/queue-microtask/LICENSE new file mode 100755 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/queue-microtask/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/queue-microtask/README.md b/node_modules/queue-microtask/README.md new file mode 100644 index 0000000..0be05a6 --- /dev/null +++ b/node_modules/queue-microtask/README.md @@ -0,0 +1,90 @@ +# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master +[ci-url]: https://github.com/feross/queue-microtask/actions +[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg +[npm-url]: https://npmjs.org/package/queue-microtask +[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg +[downloads-url]: https://npmjs.org/package/queue-microtask +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines + +- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. +- No dependencies. Less than 10 lines. No shims or complicated fallbacks. +- Optimal performance in all modern environments + - Uses `queueMicrotask` in modern environments + - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) + +## install + +``` +npm install queue-microtask +``` + +## usage + +```js +const queueMicrotask = require('queue-microtask') + +queueMicrotask(() => { /* this will run soon */ }) +``` + +## What is `queueMicrotask` and why would one use it? + +The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. + +A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. + +The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. + +Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. + +See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. + +## Who is this package for? + +This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. + +If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. + +## Why not use `process.nextTick`? + +In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. + +You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. + +## Why not use `setTimeout(fn, 0)`? + +This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. + +## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? + +These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. + +This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. + +Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. + +Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. + +## What is a shim? + +> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) + +This package could also be described as a "ponyfill". + +> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) + +## API + +### `queueMicrotask(fn)` + +The `queueMicrotask()` method queues a microtask. + +The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/queue-microtask/index.d.ts b/node_modules/queue-microtask/index.d.ts new file mode 100644 index 0000000..b6a8646 --- /dev/null +++ b/node_modules/queue-microtask/index.d.ts @@ -0,0 +1,2 @@ +declare const queueMicrotask: (cb: () => void) => void +export = queueMicrotask diff --git a/node_modules/queue-microtask/index.js b/node_modules/queue-microtask/index.js new file mode 100644 index 0000000..5560534 --- /dev/null +++ b/node_modules/queue-microtask/index.js @@ -0,0 +1,9 @@ +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/queue-microtask/package.json b/node_modules/queue-microtask/package.json new file mode 100644 index 0000000..d29a401 --- /dev/null +++ b/node_modules/queue-microtask/package.json @@ -0,0 +1,55 @@ +{ + "name": "queue-microtask", + "description": "fast, tiny `queueMicrotask` shim for modern engines", + "version": "1.2.3", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/queue-microtask/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.2.2" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/queue-microtask", + "keywords": [ + "asap", + "immediate", + "micro task", + "microtask", + "nextTick", + "process.nextTick", + "queue micro task", + "queue microtask", + "queue-microtask", + "queueMicrotask", + "setImmediate", + "task" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/queue-microtask.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/quick-lru/index.d.ts b/node_modules/quick-lru/index.d.ts new file mode 100644 index 0000000..fa58889 --- /dev/null +++ b/node_modules/quick-lru/index.d.ts @@ -0,0 +1,97 @@ +declare namespace QuickLRU { + interface Options { + /** + The maximum number of items before evicting the least recently used items. + */ + readonly maxSize: number; + + /** + Called right before an item is evicted from the cache. + + Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`). + */ + onEviction?: (key: KeyType, value: ValueType) => void; + } +} + +declare class QuickLRU + implements Iterable<[KeyType, ValueType]> { + /** + The stored item count. + */ + readonly size: number; + + /** + Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29). + + The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + + @example + ``` + import QuickLRU = require('quick-lru'); + + const lru = new QuickLRU({maxSize: 1000}); + + lru.set('🦄', '🌈'); + + lru.has('🦄'); + //=> true + + lru.get('🦄'); + //=> '🌈' + ``` + */ + constructor(options: QuickLRU.Options); + + [Symbol.iterator](): IterableIterator<[KeyType, ValueType]>; + + /** + Set an item. + + @returns The list instance. + */ + set(key: KeyType, value: ValueType): this; + + /** + Get an item. + + @returns The stored item or `undefined`. + */ + get(key: KeyType): ValueType | undefined; + + /** + Check if an item exists. + */ + has(key: KeyType): boolean; + + /** + Get an item without marking it as recently used. + + @returns The stored item or `undefined`. + */ + peek(key: KeyType): ValueType | undefined; + + /** + Delete an item. + + @returns `true` if the item is removed or `false` if the item doesn't exist. + */ + delete(key: KeyType): boolean; + + /** + Delete all items. + */ + clear(): void; + + /** + Iterable for all the keys. + */ + keys(): IterableIterator; + + /** + Iterable for all the values. + */ + values(): IterableIterator; +} + +export = QuickLRU; diff --git a/node_modules/quick-lru/index.js b/node_modules/quick-lru/index.js new file mode 100644 index 0000000..7d7032e --- /dev/null +++ b/node_modules/quick-lru/index.js @@ -0,0 +1,123 @@ +'use strict'; + +class QuickLRU { + constructor(options = {}) { + if (!(options.maxSize && options.maxSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + this.maxSize = options.maxSize; + this.onEviction = options.onEviction; + this.cache = new Map(); + this.oldCache = new Map(); + this._size = 0; + } + + _set(key, value) { + this.cache.set(key, value); + this._size++; + + if (this._size >= this.maxSize) { + this._size = 0; + + if (typeof this.onEviction === 'function') { + for (const [key, value] of this.oldCache.entries()) { + this.onEviction(key, value); + } + } + + this.oldCache = this.cache; + this.cache = new Map(); + } + } + + get(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + const value = this.oldCache.get(key); + this.oldCache.delete(key); + this._set(key, value); + return value; + } + } + + set(key, value) { + if (this.cache.has(key)) { + this.cache.set(key, value); + } else { + this._set(key, value); + } + + return this; + } + + has(key) { + return this.cache.has(key) || this.oldCache.has(key); + } + + peek(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + return this.oldCache.get(key); + } + } + + delete(key) { + const deleted = this.cache.delete(key); + if (deleted) { + this._size--; + } + + return this.oldCache.delete(key) || deleted; + } + + clear() { + this.cache.clear(); + this.oldCache.clear(); + this._size = 0; + } + + * keys() { + for (const [key] of this) { + yield key; + } + } + + * values() { + for (const [, value] of this) { + yield value; + } + } + + * [Symbol.iterator]() { + for (const item of this.cache) { + yield item; + } + + for (const item of this.oldCache) { + const [key] = item; + if (!this.cache.has(key)) { + yield item; + } + } + } + + get size() { + let oldCacheSize = 0; + for (const key of this.oldCache.keys()) { + if (!this.cache.has(key)) { + oldCacheSize++; + } + } + + return Math.min(this._size + oldCacheSize, this.maxSize); + } +} + +module.exports = QuickLRU; diff --git a/node_modules/quick-lru/license b/node_modules/quick-lru/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/quick-lru/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/quick-lru/package.json b/node_modules/quick-lru/package.json new file mode 100644 index 0000000..ff0dd9a --- /dev/null +++ b/node_modules/quick-lru/package.json @@ -0,0 +1,43 @@ +{ + "name": "quick-lru", + "version": "5.1.1", + "description": "Simple “Least Recently Used” (LRU) cache", + "license": "MIT", + "repository": "sindresorhus/quick-lru", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "lru", + "quick", + "cache", + "caching", + "least", + "recently", + "used", + "fast", + "map", + "hash", + "buffer" + ], + "devDependencies": { + "ava": "^2.0.0", + "coveralls": "^3.0.3", + "nyc": "^15.0.0", + "tsd": "^0.11.0", + "xo": "^0.26.0" + } +} diff --git a/node_modules/quick-lru/readme.md b/node_modules/quick-lru/readme.md new file mode 100644 index 0000000..234294a --- /dev/null +++ b/node_modules/quick-lru/readme.md @@ -0,0 +1,111 @@ +# quick-lru [![Build Status](https://travis-ci.org/sindresorhus/quick-lru.svg?branch=master)](https://travis-ci.org/sindresorhus/quick-lru) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/quick-lru/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/quick-lru?branch=master) + +> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29) + +Useful when you need to cache something and limit memory usage. + +Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`. + +## Install + +``` +$ npm install quick-lru +``` + +## Usage + +```js +const QuickLRU = require('quick-lru'); + +const lru = new QuickLRU({maxSize: 1000}); + +lru.set('🦄', '🌈'); + +lru.has('🦄'); +//=> true + +lru.get('🦄'); +//=> '🌈' +``` + +## API + +### new QuickLRU(options?) + +Returns a new instance. + +### options + +Type: `object` + +#### maxSize + +*Required*\ +Type: `number` + +The maximum number of items before evicting the least recently used items. + +#### onEviction + +*Optional*\ +Type: `(key, value) => void` + +Called right before an item is evicted from the cache. + +Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`). + +### Instance + +The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + +Both `key` and `value` can be of any type. + +#### .set(key, value) + +Set an item. Returns the instance. + +#### .get(key) + +Get an item. + +#### .has(key) + +Check if an item exists. + +#### .peek(key) + +Get an item without marking it as recently used. + +#### .delete(key) + +Delete an item. + +Returns `true` if the item is removed or `false` if the item doesn't exist. + +#### .clear() + +Delete all items. + +#### .keys() + +Iterable for all the keys. + +#### .values() + +Iterable for all the values. + +#### .size + +The stored item count. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/read-cache/LICENSE b/node_modules/read-cache/LICENSE new file mode 100644 index 0000000..4b98a41 --- /dev/null +++ b/node_modules/read-cache/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2016 Bogdan Chadkin + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/read-cache/README.md b/node_modules/read-cache/README.md new file mode 100644 index 0000000..16a5c36 --- /dev/null +++ b/node_modules/read-cache/README.md @@ -0,0 +1,46 @@ +# read-cache [![Build Status](https://travis-ci.org/TrySound/read-cache.svg?branch=master)](https://travis-ci.org/TrySound/read-cache) + +Reads and caches the entire contents of a file until it is modified. + + +## Install + +``` +$ npm i read-cache +``` + + +## Usage + +```js +// foo.js +var readCache = require('read-cache'); + +readCache('foo.js').then(function (contents) { + console.log(contents); +}); +``` + + +## API + +### readCache(path[, encoding]) + +Returns a promise that resolves with the file's contents. + +### readCache.sync(path[, encoding]) + +Returns the content of the file. + +### readCache.get(path[, encoding]) + +Returns the content of cached file or null. + +### readCache.clear() + +Clears the contents of the cache. + + +## License + +MIT © [Bogdan Chadkin](mailto:trysound@yandex.ru) diff --git a/node_modules/read-cache/index.js b/node_modules/read-cache/index.js new file mode 100644 index 0000000..b5263e6 --- /dev/null +++ b/node_modules/read-cache/index.js @@ -0,0 +1,78 @@ +var fs = require('fs'); +var path = require('path'); +var pify = require('pify'); + +var stat = pify(fs.stat); +var readFile = pify(fs.readFile); +var resolve = path.resolve; + +var cache = Object.create(null); + +function convert(content, encoding) { + if (Buffer.isEncoding(encoding)) { + return content.toString(encoding); + } + return content; +} + +module.exports = function (path, encoding) { + path = resolve(path); + + return stat(path).then(function (stats) { + var item = cache[path]; + + if (item && item.mtime.getTime() === stats.mtime.getTime()) { + return convert(item.content, encoding); + } + + return readFile(path).then(function (data) { + cache[path] = { + mtime: stats.mtime, + content: data + }; + + return convert(data, encoding); + }); + }).catch(function (err) { + cache[path] = null; + return Promise.reject(err); + }); +}; + +module.exports.sync = function (path, encoding) { + path = resolve(path); + + try { + var stats = fs.statSync(path); + var item = cache[path]; + + if (item && item.mtime.getTime() === stats.mtime.getTime()) { + return convert(item.content, encoding); + } + + var data = fs.readFileSync(path); + + cache[path] = { + mtime: stats.mtime, + content: data + }; + + return convert(data, encoding); + } catch (err) { + cache[path] = null; + throw err; + } + +}; + +module.exports.get = function (path, encoding) { + path = resolve(path); + if (cache[path]) { + return convert(cache[path].content, encoding); + } + return null; +}; + +module.exports.clear = function () { + cache = Object.create(null); +}; diff --git a/node_modules/read-cache/package.json b/node_modules/read-cache/package.json new file mode 100644 index 0000000..87199b0 --- /dev/null +++ b/node_modules/read-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "read-cache", + "version": "1.0.0", + "description": "Reads and caches the entire contents of a file until it is modified", + "files": [ + "index.js" + ], + "main": "index.js", + "scripts": { + "test": "ava" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/TrySound/read-cache.git" + }, + "keywords": [ + "fs", + "read", + "cache" + ], + "author": "Bogdan Chadkin ", + "license": "MIT", + "bugs": { + "url": "https://github.com/TrySound/read-cache/issues" + }, + "homepage": "https://github.com/TrySound/read-cache#readme", + "devDependencies": { + "ava": "^0.9.1", + "del": "^2.2.0" + }, + "dependencies": { + "pify": "^2.3.0" + } +} diff --git a/node_modules/read-pkg-up/index.d.ts b/node_modules/read-pkg-up/index.d.ts new file mode 100644 index 0000000..7829b7a --- /dev/null +++ b/node_modules/read-pkg-up/index.d.ts @@ -0,0 +1,77 @@ +import {Except} from 'type-fest'; +import {readPackage, readPackageSync, Options as ReadPackageOptions, NormalizeOptions as ReadPackageNormalizeOptions, PackageJson, NormalizedPackageJson} from 'read-pkg'; + +export type Options = { + /** + The directory to start looking for a package.json file. + + @default process.cwd() + */ + cwd?: URL | string; +} & Except; + +export type NormalizeOptions = { + /** + The directory to start looking for a package.json file. + + @default process.cwd() + */ + cwd?: URL | string; +} & Except; + +export interface ReadResult { + packageJson: PackageJson; + path: string; +} + +export interface NormalizedReadResult { + packageJson: NormalizedPackageJson; + path: string; +} + +export { + PackageJson, + NormalizedPackageJson, +}; + +/** +Read the closest `package.json` file. + +@example +``` +import {readPackageUp} from 'read-pkg-up'; + +console.log(await readPackageUp()); +// { +// packageJson: { +// name: 'awesome-package', +// version: '1.0.0', +// … +// }, +// path: '/Users/sindresorhus/dev/awesome-package/package.json' +// } +``` +*/ +export function readPackageUp(options?: NormalizeOptions): Promise; +export function readPackageUp(options: Options): Promise; + +/** +Synchronously read the closest `package.json` file. + +@example +``` +import {readPackageUpSync} from 'read-pkg-up'; + +console.log(readPackageUpSync()); +// { +// packageJson: { +// name: 'awesome-package', +// version: '1.0.0', +// … +// }, +// path: '/Users/sindresorhus/dev/awesome-package/package.json' +// } +``` +*/ +export function readPackageUpSync(options?: NormalizeOptions): NormalizedReadResult | undefined; +export function readPackageUpSync(options: Options): ReadResult | undefined; diff --git a/node_modules/read-pkg-up/index.js b/node_modules/read-pkg-up/index.js new file mode 100644 index 0000000..4e3a231 --- /dev/null +++ b/node_modules/read-pkg-up/index.js @@ -0,0 +1,27 @@ +import path from 'node:path'; +import {findUp, findUpSync} from 'find-up'; +import {readPackage, readPackageSync} from 'read-pkg'; + +export async function readPackageUp(options) { + const filePath = await findUp('package.json', options); + if (!filePath) { + return; + } + + return { + packageJson: await readPackage({...options, cwd: path.dirname(filePath)}), + path: filePath, + }; +} + +export function readPackageUpSync(options) { + const filePath = findUpSync('package.json', options); + if (!filePath) { + return; + } + + return { + packageJson: readPackageSync({...options, cwd: path.dirname(filePath)}), + path: filePath, + }; +} diff --git a/node_modules/read-pkg-up/license b/node_modules/read-pkg-up/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/read-pkg-up/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/read-pkg-up/package.json b/node_modules/read-pkg-up/package.json new file mode 100644 index 0000000..bd7d1d2 --- /dev/null +++ b/node_modules/read-pkg-up/package.json @@ -0,0 +1,61 @@ +{ + "name": "read-pkg-up", + "version": "9.1.0", + "description": "Read the closest package.json file", + "license": "MIT", + "repository": "sindresorhus/read-pkg-up", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "json", + "read", + "parse", + "file", + "fs", + "graceful", + "load", + "package", + "find", + "up", + "find-up", + "findup", + "look-up", + "look", + "search", + "match", + "resolve", + "parent", + "parents", + "folder", + "directory", + "walk", + "walking", + "path" + ], + "dependencies": { + "find-up": "^6.3.0", + "read-pkg": "^7.1.0", + "type-fest": "^2.5.0" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.18.0", + "xo": "^0.45.0" + } +} diff --git a/node_modules/read-pkg-up/readme.md b/node_modules/read-pkg-up/readme.md new file mode 100644 index 0000000..ca0e55a --- /dev/null +++ b/node_modules/read-pkg-up/readme.md @@ -0,0 +1,74 @@ +# read-pkg-up + +> Read the closest package.json file + +## Why + +- [Finds the closest package.json](https://github.com/sindresorhus/find-up) +- [Throws more helpful JSON errors](https://github.com/sindresorhus/parse-json) +- [Normalizes the data](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) + +## Install + +```sh +npm install read-pkg-up +``` + +## Usage + +```js +import {readPackageUp} from 'read-pkg-up'; + +console.log(await readPackageUp()); +/* +{ + packageJson: { + name: 'awesome-package', + version: '1.0.0', + … + }, + path: '/Users/sindresorhus/dev/awesome-package/package.json' +} +*/ +``` + +## API + +### readPackageUp(options?) + +Returns a `Promise` or `Promise` if no `package.json` was found. + +### readPackageUpSync(options?) + +Returns the result object or `undefined` if no `package.json` was found. + +#### options + +Type: `object` + +##### cwd + +Type: `URL | string`\ +Default: `process.cwd()` + +The directory to start looking for a package.json file. + +##### normalize + +Type: `boolean`\ +Default: `true` + +[Normalize](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) the package data. + +## read-pkg-up for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of read-pkg-up and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-read-pkg-up?utm_source=npm-read-pkg-up&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + +## Related + +- [read-pkg](https://github.com/sindresorhus/read-pkg) - Read a package.json file +- [pkg-up](https://github.com/sindresorhus/pkg-up) - Find the closest package.json file +- [find-up](https://github.com/sindresorhus/find-up) - Find a file by walking up parent directories +- [pkg-conf](https://github.com/sindresorhus/pkg-conf) - Get namespaced config from the closest package.json diff --git a/node_modules/read-pkg/index.d.ts b/node_modules/read-pkg/index.d.ts new file mode 100644 index 0000000..d9ab992 --- /dev/null +++ b/node_modules/read-pkg/index.d.ts @@ -0,0 +1,59 @@ +import * as typeFest from 'type-fest'; +import * as normalize from 'normalize-package-data'; + +export interface Options { + /** + Current working directory. + + @default process.cwd() + */ + readonly cwd?: URL | string; + + /** + [Normalize](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) the package data. + + @default true + */ + readonly normalize?: boolean; +} + +export interface NormalizeOptions extends Options { + readonly normalize?: true; +} + +export type NormalizedPackageJson = PackageJson & normalize.Package; +export type PackageJson = typeFest.PackageJson; + +/** +@returns The parsed JSON. + +@example +``` +import {readPackage} from 'read-pkg'; + +console.log(await readPackage()); +//=> {name: 'read-pkg', …} + +console.log(await readPackage({cwd: 'some-other-directory'}); +//=> {name: 'unicorn', …} +``` +*/ +export function readPackage(options?: NormalizeOptions): Promise; +export function readPackage(options: Options): Promise; + +/** +@returns The parsed JSON. + +@example +``` +import {readPackageSync} from 'read-pkg'; + +console.log(readPackageSync()); +//=> {name: 'read-pkg', …} + +console.log(readPackageSync({cwd: 'some-other-directory'}); +//=> {name: 'unicorn', …} +``` +*/ +export function readPackageSync(options?: NormalizeOptions): NormalizedPackageJson; +export function readPackageSync(options: Options): PackageJson; diff --git a/node_modules/read-pkg/index.js b/node_modules/read-pkg/index.js new file mode 100644 index 0000000..36ee75b --- /dev/null +++ b/node_modules/read-pkg/index.js @@ -0,0 +1,32 @@ +import process from 'node:process'; +import fs, {promises as fsPromises} from 'node:fs'; +import path from 'node:path'; +import {fileURLToPath} from 'node:url'; +import parseJson from 'parse-json'; +import normalizePackageData from 'normalize-package-data'; + +const toPath = urlOrPath => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; + +export async function readPackage({cwd, normalize = true} = {}) { + cwd = toPath(cwd) || process.cwd(); + const filePath = path.resolve(cwd, 'package.json'); + const json = parseJson(await fsPromises.readFile(filePath, 'utf8')); + + if (normalize) { + normalizePackageData(json); + } + + return json; +} + +export function readPackageSync({cwd, normalize = true} = {}) { + cwd = toPath(cwd) || process.cwd(); + const filePath = path.resolve(cwd, 'package.json'); + const json = parseJson(fs.readFileSync(filePath, 'utf8')); + + if (normalize) { + normalizePackageData(json); + } + + return json; +} diff --git a/node_modules/read-pkg/license b/node_modules/read-pkg/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/read-pkg/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/read-pkg/package.json b/node_modules/read-pkg/package.json new file mode 100644 index 0000000..6e1efd6 --- /dev/null +++ b/node_modules/read-pkg/package.json @@ -0,0 +1,52 @@ +{ + "name": "read-pkg", + "version": "7.1.0", + "description": "Read a package.json file", + "license": "MIT", + "repository": "sindresorhus/read-pkg", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12.20" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "json", + "read", + "parse", + "file", + "fs", + "graceful", + "load", + "package", + "normalize" + ], + "dependencies": { + "@types/normalize-package-data": "^2.4.1", + "normalize-package-data": "^3.0.2", + "parse-json": "^5.2.0", + "type-fest": "^2.0.0" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.17.0", + "xo": "^0.44.0" + }, + "xo": { + "ignores": [ + "test/test.js" + ] + } +} diff --git a/node_modules/read-pkg/readme.md b/node_modules/read-pkg/readme.md new file mode 100644 index 0000000..c15dc92 --- /dev/null +++ b/node_modules/read-pkg/readme.md @@ -0,0 +1,72 @@ +# read-pkg + +> Read a package.json file + +## Why + +- [Throws more helpful JSON errors](https://github.com/sindresorhus/parse-json) +- [Normalizes the data](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) + +## Install + +``` +$ npm install read-pkg +``` + +## Usage + +```js +import {readPackage} from 'read-pkg'; + +console.log(await readPackage()); +//=> {name: 'read-pkg', …} + +console.log(await readPackage({cwd: 'some-other-directory'})); +//=> {name: 'unicorn', …} +``` + +## API + +### readPackage(options?) + +Returns a `Promise` with the parsed JSON. + +### readPackageSync(options?) + +Returns the parsed JSON. + +#### options + +Type: `object` + +##### cwd + +Type: `URL | string`\ +Default: `process.cwd()` + +Current working directory. + +##### normalize + +Type: `boolean`\ +Default: `true` + +[Normalize](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) the package data. + +## Related + +- [read-pkg-up](https://github.com/sindresorhus/read-pkg-up) - Read the closest package.json file +- [write-pkg](https://github.com/sindresorhus/write-pkg) - Write a `package.json` file +- [load-json-file](https://github.com/sindresorhus/load-json-file) - Read and parse a JSON file + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..f62cdac --- /dev/null +++ b/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..f1c5a93 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/readable-stream/duplex-browser.js b/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000..f8b2db8 --- /dev/null +++ b/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..46924cb --- /dev/null +++ b/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..57003c3 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..612edb4 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..3af95cb --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..fcfc105 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..e1e897f --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,685 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..5e08097 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,78 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..85a8214 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,84 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } + + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/node_modules/safe-buffer/LICENSE b/node_modules/readable-stream/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/readable-stream/node_modules/safe-buffer/README.md b/node_modules/readable-stream/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts b/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/readable-stream/node_modules/safe-buffer/index.js b/node_modules/readable-stream/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..22438da --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/readable-stream/node_modules/safe-buffer/package.json b/node_modules/readable-stream/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..623fbc3 --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..514c178 --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,52 @@ +{ + "name": "readable-stream", + "version": "2.3.8", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..ffd791d --- /dev/null +++ b/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..e503725 --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..ec89ec5 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..b1baba2 --- /dev/null +++ b/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/readable-stream/writable-browser.js b/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000..ebdde6a --- /dev/null +++ b/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..3211a6f --- /dev/null +++ b/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/readdirp/LICENSE b/node_modules/readdirp/LICENSE new file mode 100644 index 0000000..037cbb4 --- /dev/null +++ b/node_modules/readdirp/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/readdirp/README.md b/node_modules/readdirp/README.md new file mode 100644 index 0000000..465593c --- /dev/null +++ b/node_modules/readdirp/README.md @@ -0,0 +1,122 @@ +# readdirp [![Weekly downloads](https://img.shields.io/npm/dw/readdirp.svg)](https://github.com/paulmillr/readdirp) + +Recursive version of [fs.readdir](https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback). Exposes a **stream API** and a **promise API**. + + +```sh +npm install readdirp +``` + +```javascript +const readdirp = require('readdirp'); + +// Use streams to achieve small RAM & CPU footprint. +// 1) Streams example with for-await. +for await (const entry of readdirp('.')) { + const {path} = entry; + console.log(`${JSON.stringify({path})}`); +} + +// 2) Streams example, non for-await. +// Print out all JS files along with their size within the current folder & subfolders. +readdirp('.', {fileFilter: '*.js', alwaysStat: true}) + .on('data', (entry) => { + const {path, stats: {size}} = entry; + console.log(`${JSON.stringify({path, size})}`); + }) + // Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted + .on('warn', error => console.error('non-fatal error', error)) + .on('error', error => console.error('fatal error', error)) + .on('end', () => console.log('done')); + +// 3) Promise example. More RAM and CPU than streams / for-await. +const files = await readdirp.promise('.'); +console.log(files.map(file => file.path)); + +// Other options. +readdirp('test', { + fileFilter: '*.js', + directoryFilter: ['!.git', '!*modules'] + // directoryFilter: (di) => di.basename.length === 9 + type: 'files_directories', + depth: 1 +}); +``` + +For more examples, check out `examples` directory. + +## API + +`const stream = readdirp(root[, options])` — **Stream API** + +- Reads given root recursively and returns a `stream` of [entry infos](#entryinfo) +- Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`). +- `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir. +- `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user. +- `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed. +- `on('end')` — we are done. Called when all entries were found and no more will be emitted. +- `on('close')` — stream is destroyed via `stream.destroy()`. + Could be useful if you want to manually abort even on a non fatal error. + At that point the stream is no longer `readable` and no more entries, warning or errors are emitted +- To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html) + or the [stream-handbook](https://github.com/substack/stream-handbook) + +`const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo). + +First argument is awalys `root`, path in which to start reading and recursing into subdirectories. + +### options + +- `fileFilter: ["*.js"]`: filter to include or exclude files. A `Function`, Glob string or Array of glob strings. + - **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry + - **Glob string**: a string (e.g., `*.js`) which is matched using [picomatch](https://github.com/micromatch/picomatch), so go there for more + information. Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense. Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. + - **Array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. + `['*.json', '*.js']` includes all JavaScript and Json files. + `['!.git', '!node_modules']` includes all directories except the '.git' and 'node_modules'. + - Directories that do not pass a filter will not be recursed into. +- `directoryFilter: ['!.git']`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into. +- `depth: 5`: depth at which to stop recursing even if more subdirectories are found +- `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. +- `alwaysStat: false`: always return `stats` property for every file. Default is `false`, readdirp will return `Dirent` entries. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0. +- `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat` + +### `EntryInfo` + +Has the following properties: + +- `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root) +- `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found +- `basename: 'react.js'`: name of the file/directory +- `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false` +- `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true` + +## Changelog + +- 3.5 (Oct 13, 2020) disallows recursive directory-based symlinks. + Before, it could have entered infinite loop. +- 3.4 (Mar 19, 2020) adds support for directory-based symlinks. +- 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping. +- 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic. +- 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions". +- 3.0 brings huge performance improvements and stream backpressure support. +- Upgrading 2.x to 3.x: + - Signature changed from `readdirp(options)` to `readdirp(root, options)` + - Replaced callback API with promise API. + - Renamed `entryType` option to `type` + - Renamed `entryType: 'both'` to `'files_directories'` + - `EntryInfo` + - Renamed `stat` to `stats` + - Emitted only when `alwaysStat: true` + - `dirent` is emitted instead of `stats` by default with `alwaysStat: false` + - Renamed `name` to `basename` + - Removed `parentDir` and `fullParentDir` properties +- Supported node.js versions: + - 3.x: node 8+ + - 2.x: node 0.6+ + +## License + +Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller () + +MIT License, see [LICENSE](LICENSE) file. diff --git a/node_modules/readdirp/index.d.ts b/node_modules/readdirp/index.d.ts new file mode 100644 index 0000000..cbbd76c --- /dev/null +++ b/node_modules/readdirp/index.d.ts @@ -0,0 +1,43 @@ +// TypeScript Version: 3.2 + +/// + +import * as fs from 'fs'; +import { Readable } from 'stream'; + +declare namespace readdir { + interface EntryInfo { + path: string; + fullPath: string; + basename: string; + stats?: fs.Stats; + dirent?: fs.Dirent; + } + + interface ReaddirpOptions { + root?: string; + fileFilter?: string | string[] | ((entry: EntryInfo) => boolean); + directoryFilter?: string | string[] | ((entry: EntryInfo) => boolean); + type?: 'files' | 'directories' | 'files_directories' | 'all'; + lstat?: boolean; + depth?: number; + alwaysStat?: boolean; + } + + interface ReaddirpStream extends Readable, AsyncIterable { + read(): EntryInfo; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + function promise( + root: string, + options?: ReaddirpOptions + ): Promise; +} + +declare function readdir( + root: string, + options?: readdir.ReaddirpOptions +): readdir.ReaddirpStream; + +export = readdir; diff --git a/node_modules/readdirp/index.js b/node_modules/readdirp/index.js new file mode 100644 index 0000000..cf739b2 --- /dev/null +++ b/node_modules/readdirp/index.js @@ -0,0 +1,287 @@ +'use strict'; + +const fs = require('fs'); +const { Readable } = require('stream'); +const sysPath = require('path'); +const { promisify } = require('util'); +const picomatch = require('picomatch'); + +const readdir = promisify(fs.readdir); +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const realpath = promisify(fs.realpath); + +/** + * @typedef {Object} EntryInfo + * @property {String} path + * @property {String} fullPath + * @property {fs.Stats=} stats + * @property {fs.Dirent=} dirent + * @property {String} basename + */ + +const BANG = '!'; +const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR'; +const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]); +const FILE_TYPE = 'files'; +const DIR_TYPE = 'directories'; +const FILE_DIR_TYPE = 'files_directories'; +const EVERYTHING_TYPE = 'all'; +const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE]; + +const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code); +const [maj, min] = process.versions.node.split('.').slice(0, 2).map(n => Number.parseInt(n, 10)); +const wantBigintFsStats = process.platform === 'win32' && (maj > 10 || (maj === 10 && min >= 5)); + +const normalizeFilter = filter => { + if (filter === undefined) return; + if (typeof filter === 'function') return filter; + + if (typeof filter === 'string') { + const glob = picomatch(filter.trim()); + return entry => glob(entry.basename); + } + + if (Array.isArray(filter)) { + const positive = []; + const negative = []; + for (const item of filter) { + const trimmed = item.trim(); + if (trimmed.charAt(0) === BANG) { + negative.push(picomatch(trimmed.slice(1))); + } else { + positive.push(picomatch(trimmed)); + } + } + + if (negative.length > 0) { + if (positive.length > 0) { + return entry => + positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename)); + } + return entry => !negative.some(f => f(entry.basename)); + } + return entry => positive.some(f => f(entry.basename)); + } +}; + +class ReaddirpStream extends Readable { + static get defaultOptions() { + return { + root: '.', + /* eslint-disable no-unused-vars */ + fileFilter: (path) => true, + directoryFilter: (path) => true, + /* eslint-enable no-unused-vars */ + type: FILE_TYPE, + lstat: false, + depth: 2147483648, + alwaysStat: false + }; + } + + constructor(options = {}) { + super({ + objectMode: true, + autoDestroy: true, + highWaterMark: options.highWaterMark || 4096 + }); + const opts = { ...ReaddirpStream.defaultOptions, ...options }; + const { root, type } = opts; + + this._fileFilter = normalizeFilter(opts.fileFilter); + this._directoryFilter = normalizeFilter(opts.directoryFilter); + + const statMethod = opts.lstat ? lstat : stat; + // Use bigint stats if it's windows and stat() supports options (node 10+). + if (wantBigintFsStats) { + this._stat = path => statMethod(path, { bigint: true }); + } else { + this._stat = statMethod; + } + + this._maxDepth = opts.depth; + this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); + this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); + this._wantsEverything = type === EVERYTHING_TYPE; + this._root = sysPath.resolve(root); + this._isDirent = ('Dirent' in fs) && !opts.alwaysStat; + this._statsProp = this._isDirent ? 'dirent' : 'stats'; + this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent }; + + // Launch stream with one parent, the root dir. + this.parents = [this._exploreDir(root, 1)]; + this.reading = false; + this.parent = undefined; + } + + async _read(batch) { + if (this.reading) return; + this.reading = true; + + try { + while (!this.destroyed && batch > 0) { + const { path, depth, files = [] } = this.parent || {}; + + if (files.length > 0) { + const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path)); + for (const entry of await Promise.all(slice)) { + if (this.destroyed) return; + + const entryType = await this._getEntryType(entry); + if (entryType === 'directory' && this._directoryFilter(entry)) { + if (depth <= this._maxDepth) { + this.parents.push(this._exploreDir(entry.fullPath, depth + 1)); + } + + if (this._wantsDir) { + this.push(entry); + batch--; + } + } else if ((entryType === 'file' || this._includeAsFile(entry)) && this._fileFilter(entry)) { + if (this._wantsFile) { + this.push(entry); + batch--; + } + } + } + } else { + const parent = this.parents.pop(); + if (!parent) { + this.push(null); + break; + } + this.parent = await parent; + if (this.destroyed) return; + } + } + } catch (error) { + this.destroy(error); + } finally { + this.reading = false; + } + } + + async _exploreDir(path, depth) { + let files; + try { + files = await readdir(path, this._rdOptions); + } catch (error) { + this._onError(error); + } + return { files, depth, path }; + } + + async _formatEntry(dirent, path) { + let entry; + try { + const basename = this._isDirent ? dirent.name : dirent; + const fullPath = sysPath.resolve(sysPath.join(path, basename)); + entry = { path: sysPath.relative(this._root, fullPath), fullPath, basename }; + entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath); + } catch (err) { + this._onError(err); + } + return entry; + } + + _onError(err) { + if (isNormalFlowError(err) && !this.destroyed) { + this.emit('warn', err); + } else { + this.destroy(err); + } + } + + async _getEntryType(entry) { + // entry may be undefined, because a warning or an error were emitted + // and the statsProp is undefined + const stats = entry && entry[this._statsProp]; + if (!stats) { + return; + } + if (stats.isFile()) { + return 'file'; + } + if (stats.isDirectory()) { + return 'directory'; + } + if (stats && stats.isSymbolicLink()) { + const full = entry.fullPath; + try { + const entryRealPath = await realpath(full); + const entryRealPathStats = await lstat(entryRealPath); + if (entryRealPathStats.isFile()) { + return 'file'; + } + if (entryRealPathStats.isDirectory()) { + const len = entryRealPath.length; + if (full.startsWith(entryRealPath) && full.substr(len, 1) === sysPath.sep) { + const recursiveError = new Error( + `Circular symlink detected: "${full}" points to "${entryRealPath}"` + ); + recursiveError.code = RECURSIVE_ERROR_CODE; + return this._onError(recursiveError); + } + return 'directory'; + } + } catch (error) { + this._onError(error); + } + } + } + + _includeAsFile(entry) { + const stats = entry && entry[this._statsProp]; + + return stats && this._wantsEverything && !stats.isDirectory(); + } +} + +/** + * @typedef {Object} ReaddirpArguments + * @property {Function=} fileFilter + * @property {Function=} directoryFilter + * @property {String=} type + * @property {Number=} depth + * @property {String=} root + * @property {Boolean=} lstat + * @property {Boolean=} bigint + */ + +/** + * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. + * @param {String} root Root directory + * @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth + */ +const readdirp = (root, options = {}) => { + let type = options.entryType || options.type; + if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility + if (type) options.type = type; + if (!root) { + throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)'); + } else if (typeof root !== 'string') { + throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)'); + } else if (type && !ALL_TYPES.includes(type)) { + throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`); + } + + options.root = root; + return new ReaddirpStream(options); +}; + +const readdirpPromise = (root, options = {}) => { + return new Promise((resolve, reject) => { + const files = []; + readdirp(root, options) + .on('data', entry => files.push(entry)) + .on('end', () => resolve(files)) + .on('error', error => reject(error)); + }); +}; + +readdirp.promise = readdirpPromise; +readdirp.ReaddirpStream = ReaddirpStream; +readdirp.default = readdirp; + +module.exports = readdirp; diff --git a/node_modules/readdirp/package.json b/node_modules/readdirp/package.json new file mode 100644 index 0000000..dba5388 --- /dev/null +++ b/node_modules/readdirp/package.json @@ -0,0 +1,122 @@ +{ + "name": "readdirp", + "description": "Recursive version of fs.readdir with streaming API.", + "version": "3.6.0", + "homepage": "https://github.com/paulmillr/readdirp", + "repository": { + "type": "git", + "url": "git://github.com/paulmillr/readdirp.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/paulmillr/readdirp/issues" + }, + "author": "Thorsten Lorenz (thlorenz.com)", + "contributors": [ + "Thorsten Lorenz (thlorenz.com)", + "Paul Miller (https://paulmillr.com)" + ], + "main": "index.js", + "engines": { + "node": ">=8.10.0" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "recursive", + "fs", + "stream", + "streams", + "readdir", + "filesystem", + "find", + "filter" + ], + "scripts": { + "dtslint": "dtslint", + "nyc": "nyc", + "mocha": "mocha --exit", + "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", + "test": "npm run lint && nyc npm run mocha" + }, + "dependencies": { + "picomatch": "^2.2.1" + }, + "devDependencies": { + "@types/node": "^14", + "chai": "^4.2", + "chai-subset": "^1.6", + "dtslint": "^3.3.0", + "eslint": "^7.0.0", + "mocha": "^7.1.1", + "nyc": "^15.0.0", + "rimraf": "^3.0.0", + "typescript": "^4.0.3" + }, + "nyc": { + "reporter": [ + "html", + "text" + ] + }, + "eslintConfig": { + "root": true, + "extends": "eslint:recommended", + "parserOptions": { + "ecmaVersion": 9, + "sourceType": "script" + }, + "env": { + "node": true, + "es6": true + }, + "rules": { + "array-callback-return": "error", + "no-empty": [ + "error", + { + "allowEmptyCatch": true + } + ], + "no-else-return": [ + "error", + { + "allowElseIf": false + } + ], + "no-lonely-if": "error", + "no-var": "error", + "object-shorthand": "error", + "prefer-arrow-callback": [ + "error", + { + "allowNamedFunctions": true + } + ], + "prefer-const": [ + "error", + { + "ignoreReadBeforeAssign": true + } + ], + "prefer-destructuring": [ + "error", + { + "object": true, + "array": false + } + ], + "prefer-spread": "error", + "prefer-template": "error", + "radix": "error", + "semi": "error", + "strict": "error", + "quotes": [ + "error", + "single" + ] + } + } +} diff --git a/node_modules/require-directory/.jshintrc b/node_modules/require-directory/.jshintrc new file mode 100644 index 0000000..e14e4dc --- /dev/null +++ b/node_modules/require-directory/.jshintrc @@ -0,0 +1,67 @@ +{ + "maxerr" : 50, + "bitwise" : true, + "camelcase" : true, + "curly" : true, + "eqeqeq" : true, + "forin" : true, + "immed" : true, + "indent" : 2, + "latedef" : true, + "newcap" : true, + "noarg" : true, + "noempty" : true, + "nonew" : true, + "plusplus" : true, + "quotmark" : true, + "undef" : true, + "unused" : true, + "strict" : true, + "trailing" : true, + "maxparams" : false, + "maxdepth" : false, + "maxstatements" : false, + "maxcomplexity" : false, + "maxlen" : false, + "asi" : false, + "boss" : false, + "debug" : false, + "eqnull" : true, + "es5" : false, + "esnext" : false, + "moz" : false, + "evil" : false, + "expr" : true, + "funcscope" : true, + "globalstrict" : true, + "iterator" : true, + "lastsemic" : false, + "laxbreak" : false, + "laxcomma" : false, + "loopfunc" : false, + "multistr" : false, + "proto" : false, + "scripturl" : false, + "smarttabs" : false, + "shadow" : false, + "sub" : false, + "supernew" : false, + "validthis" : false, + "browser" : true, + "couch" : false, + "devel" : true, + "dojo" : false, + "jquery" : false, + "mootools" : false, + "node" : true, + "nonstandard" : false, + "prototypejs" : false, + "rhino" : false, + "worker" : false, + "wsh" : false, + "yui" : false, + "nomen" : true, + "onevar" : true, + "passfail" : false, + "white" : true +} diff --git a/node_modules/require-directory/.npmignore b/node_modules/require-directory/.npmignore new file mode 100644 index 0000000..47cf365 --- /dev/null +++ b/node_modules/require-directory/.npmignore @@ -0,0 +1 @@ +test/** diff --git a/node_modules/require-directory/.travis.yml b/node_modules/require-directory/.travis.yml new file mode 100644 index 0000000..20fd86b --- /dev/null +++ b/node_modules/require-directory/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - 0.10 diff --git a/node_modules/require-directory/LICENSE b/node_modules/require-directory/LICENSE new file mode 100644 index 0000000..a70f253 --- /dev/null +++ b/node_modules/require-directory/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2011 Troy Goode + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/require-directory/README.markdown b/node_modules/require-directory/README.markdown new file mode 100644 index 0000000..926a063 --- /dev/null +++ b/node_modules/require-directory/README.markdown @@ -0,0 +1,184 @@ +# require-directory + +Recursively iterates over specified directory, `require()`'ing each file, and returning a nested hash structure containing those modules. + +**[Follow me (@troygoode) on Twitter!](https://twitter.com/intent/user?screen_name=troygoode)** + +[![NPM](https://nodei.co/npm/require-directory.png?downloads=true&stars=true)](https://nodei.co/npm/require-directory/) + +[![build status](https://secure.travis-ci.org/troygoode/node-require-directory.png)](http://travis-ci.org/troygoode/node-require-directory) + +## How To Use + +### Installation (via [npm](https://npmjs.org/package/require-directory)) + +```bash +$ npm install require-directory +``` + +### Usage + +A common pattern in node.js is to include an index file which creates a hash of the files in its current directory. Given a directory structure like so: + +* app.js +* routes/ + * index.js + * home.js + * auth/ + * login.js + * logout.js + * register.js + +`routes/index.js` uses `require-directory` to build the hash (rather than doing so manually) like so: + +```javascript +var requireDirectory = require('require-directory'); +module.exports = requireDirectory(module); +``` + +`app.js` references `routes/index.js` like any other module, but it now has a hash/tree of the exports from the `./routes/` directory: + +```javascript +var routes = require('./routes'); + +// snip + +app.get('/', routes.home); +app.get('/register', routes.auth.register); +app.get('/login', routes.auth.login); +app.get('/logout', routes.auth.logout); +``` + +The `routes` variable above is the equivalent of this: + +```javascript +var routes = { + home: require('routes/home.js'), + auth: { + login: require('routes/auth/login.js'), + logout: require('routes/auth/logout.js'), + register: require('routes/auth/register.js') + } +}; +``` + +*Note that `routes.index` will be `undefined` as you would hope.* + +### Specifying Another Directory + +You can specify which directory you want to build a tree of (if it isn't the current directory for whatever reason) by passing it as the second parameter. Not specifying the path (`requireDirectory(module)`) is the equivelant of `requireDirectory(module, __dirname)`: + +```javascript +var requireDirectory = require('require-directory'); +module.exports = requireDirectory(module, './some/subdirectory'); +``` + +For example, in the [example in the Usage section](#usage) we could have avoided creating `routes/index.js` and instead changed the first lines of `app.js` to: + +```javascript +var requireDirectory = require('require-directory'); +var routes = requireDirectory(module, './routes'); +``` + +## Options + +You can pass an options hash to `require-directory` as the 2nd parameter (or 3rd if you're passing the path to another directory as the 2nd parameter already). Here are the available options: + +### Whitelisting + +Whitelisting (either via RegExp or function) allows you to specify that only certain files be loaded. + +```javascript +var requireDirectory = require('require-directory'), + whitelist = /onlyinclude.js$/, + hash = requireDirectory(module, {include: whitelist}); +``` + +```javascript +var requireDirectory = require('require-directory'), + check = function(path){ + if(/onlyinclude.js$/.test(path)){ + return true; // don't include + }else{ + return false; // go ahead and include + } + }, + hash = requireDirectory(module, {include: check}); +``` + +### Blacklisting + +Blacklisting (either via RegExp or function) allows you to specify that all but certain files should be loaded. + +```javascript +var requireDirectory = require('require-directory'), + blacklist = /dontinclude\.js$/, + hash = requireDirectory(module, {exclude: blacklist}); +``` + +```javascript +var requireDirectory = require('require-directory'), + check = function(path){ + if(/dontinclude\.js$/.test(path)){ + return false; // don't include + }else{ + return true; // go ahead and include + } + }, + hash = requireDirectory(module, {exclude: check}); +``` + +### Visiting Objects As They're Loaded + +`require-directory` takes a function as the `visit` option that will be called for each module that is added to module.exports. + +```javascript +var requireDirectory = require('require-directory'), + visitor = function(obj) { + console.log(obj); // will be called for every module that is loaded + }, + hash = requireDirectory(module, {visit: visitor}); +``` + +The visitor can also transform the objects by returning a value: + +```javascript +var requireDirectory = require('require-directory'), + visitor = function(obj) { + return obj(new Date()); + }, + hash = requireDirectory(module, {visit: visitor}); +``` + +### Renaming Keys + +```javascript +var requireDirectory = require('require-directory'), + renamer = function(name) { + return name.toUpperCase(); + }, + hash = requireDirectory(module, {rename: renamer}); +``` + +### No Recursion + +```javascript +var requireDirectory = require('require-directory'), + hash = requireDirectory(module, {recurse: false}); +``` + +## Run Unit Tests + +```bash +$ npm run lint +$ npm test +``` + +## License + +[MIT License](http://www.opensource.org/licenses/mit-license.php) + +## Author + +[Troy Goode](https://github.com/TroyGoode) ([troygoode@gmail.com](mailto:troygoode@gmail.com)) + diff --git a/node_modules/require-directory/index.js b/node_modules/require-directory/index.js new file mode 100644 index 0000000..cd37da7 --- /dev/null +++ b/node_modules/require-directory/index.js @@ -0,0 +1,86 @@ +'use strict'; + +var fs = require('fs'), + join = require('path').join, + resolve = require('path').resolve, + dirname = require('path').dirname, + defaultOptions = { + extensions: ['js', 'json', 'coffee'], + recurse: true, + rename: function (name) { + return name; + }, + visit: function (obj) { + return obj; + } + }; + +function checkFileInclusion(path, filename, options) { + return ( + // verify file has valid extension + (new RegExp('\\.(' + options.extensions.join('|') + ')$', 'i').test(filename)) && + + // if options.include is a RegExp, evaluate it and make sure the path passes + !(options.include && options.include instanceof RegExp && !options.include.test(path)) && + + // if options.include is a function, evaluate it and make sure the path passes + !(options.include && typeof options.include === 'function' && !options.include(path, filename)) && + + // if options.exclude is a RegExp, evaluate it and make sure the path doesn't pass + !(options.exclude && options.exclude instanceof RegExp && options.exclude.test(path)) && + + // if options.exclude is a function, evaluate it and make sure the path doesn't pass + !(options.exclude && typeof options.exclude === 'function' && options.exclude(path, filename)) + ); +} + +function requireDirectory(m, path, options) { + var retval = {}; + + // path is optional + if (path && !options && typeof path !== 'string') { + options = path; + path = null; + } + + // default options + options = options || {}; + for (var prop in defaultOptions) { + if (typeof options[prop] === 'undefined') { + options[prop] = defaultOptions[prop]; + } + } + + // if no path was passed in, assume the equivelant of __dirname from caller + // otherwise, resolve path relative to the equivalent of __dirname + path = !path ? dirname(m.filename) : resolve(dirname(m.filename), path); + + // get the path of each file in specified directory, append to current tree node, recurse + fs.readdirSync(path).forEach(function (filename) { + var joined = join(path, filename), + files, + key, + obj; + + if (fs.statSync(joined).isDirectory() && options.recurse) { + // this node is a directory; recurse + files = requireDirectory(m, joined, options); + // exclude empty directories + if (Object.keys(files).length) { + retval[options.rename(filename, joined, filename)] = files; + } + } else { + if (joined !== m.filename && checkFileInclusion(joined, filename, options)) { + // hash node key shouldn't include file extension + key = filename.substring(0, filename.lastIndexOf('.')); + obj = m.require(joined); + retval[options.rename(key, joined, filename)] = options.visit(obj, joined, filename) || obj; + } + } + }); + + return retval; +} + +module.exports = requireDirectory; +module.exports.defaults = defaultOptions; diff --git a/node_modules/require-directory/package.json b/node_modules/require-directory/package.json new file mode 100644 index 0000000..25ece4b --- /dev/null +++ b/node_modules/require-directory/package.json @@ -0,0 +1,40 @@ +{ + "author": "Troy Goode (http://github.com/troygoode/)", + "name": "require-directory", + "version": "2.1.1", + "description": "Recursively iterates over specified directory, require()'ing each file, and returning a nested hash structure containing those modules.", + "keywords": [ + "require", + "directory", + "library", + "recursive" + ], + "homepage": "https://github.com/troygoode/node-require-directory/", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/troygoode/node-require-directory.git" + }, + "contributors": [ + { + "name": "Troy Goode", + "email": "troygoode@gmail.com", + "web": "http://github.com/troygoode/" + } + ], + "license": "MIT", + "bugs": { + "url": "http://github.com/troygoode/node-require-directory/issues/" + }, + "engines": { + "node": ">=0.10.0" + }, + "devDependencies": { + "jshint": "^2.6.0", + "mocha": "^2.1.0" + }, + "scripts": { + "test": "mocha", + "lint": "jshint index.js test/test.js" + } +} diff --git a/node_modules/resolve-alpn/LICENSE b/node_modules/resolve-alpn/LICENSE new file mode 100644 index 0000000..f4fe9c4 --- /dev/null +++ b/node_modules/resolve-alpn/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2018 Szymon Marczak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/resolve-alpn/README.md b/node_modules/resolve-alpn/README.md new file mode 100644 index 0000000..a49950c --- /dev/null +++ b/node_modules/resolve-alpn/README.md @@ -0,0 +1,60 @@ +# `resolve-alpn` + +[![Node CI](https://github.com/szmarczak/resolve-alpn/workflows/Node%20CI/badge.svg)](https://github.com/szmarczak/resolve-alpn/actions) +[![codecov](https://codecov.io/gh/szmarczak/resolve-alpn/branch/master/graph/badge.svg)](https://codecov.io/gh/szmarczak/resolve-alpn) + +## API + +### resolveALPN(options, connect = tls.connect) + +Returns an object with an `alpnProtocol` property. The `socket` property may be also present. + +```js +const result = await resolveALPN({ + host: 'nghttp2.org', + port: 443, + ALPNProtocols: ['h2', 'http/1.1'], + servername: 'nghttp2.org' +}); + +console.log(result); // {alpnProtocol: 'h2'} +``` + +**Note:** While the `servername` option is not required in this case, many other servers do. It's best practice to set it anyway. + +**Note:** If the socket times out, the promise will resolve and `result.timeout` will be set to `true`. + +#### options + +Same as [TLS options](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback). + +##### options.resolveSocket + +By default, the socket gets destroyed and the promise resolves.
+If you set this to true, it will return the socket in a `socket` property. + +```js +const result = await resolveALPN({ + host: 'nghttp2.org', + port: 443, + ALPNProtocols: ['h2', 'http/1.1'], + servername: 'nghttp2.org', + resolveSocket: true +}); + +console.log(result); // {alpnProtocol: 'h2', socket: tls.TLSSocket} + +// Remember to destroy the socket if you don't use it! +result.socket.destroy(); +``` + +#### connect + +Type: `Function | AsyncFunction`\ +Default: [`tls.connect`](https://nodejs.org/dist/latest-v16.x/docs/api/tls.html#tls_tls_connect_options_callback) + +**Note:** No matter which function is used (synchronous or asynchronous), it **must** accept a `callback` function as a second argument. The `callback` function gets executed when the socket has successfully connected. + +## License + +MIT diff --git a/node_modules/resolve-alpn/index.js b/node_modules/resolve-alpn/index.js new file mode 100644 index 0000000..2d6c043 --- /dev/null +++ b/node_modules/resolve-alpn/index.js @@ -0,0 +1,43 @@ +'use strict'; +const tls = require('tls'); + +module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { + let timeout = false; + + let socket; + + const callback = async () => { + await socketPromise; + + socket.off('timeout', onTimeout); + socket.off('error', reject); + + if (options.resolveSocket) { + resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); + + if (timeout) { + await Promise.resolve(); + socket.emit('timeout'); + } + } else { + socket.destroy(); + resolve({alpnProtocol: socket.alpnProtocol, timeout}); + } + }; + + const onTimeout = async () => { + timeout = true; + callback(); + }; + + const socketPromise = (async () => { + try { + socket = await connect(options, callback); + + socket.on('error', reject); + socket.once('timeout', onTimeout); + } catch (error) { + reject(error); + } + })(); +}); diff --git a/node_modules/resolve-alpn/package.json b/node_modules/resolve-alpn/package.json new file mode 100644 index 0000000..73a7756 --- /dev/null +++ b/node_modules/resolve-alpn/package.json @@ -0,0 +1,34 @@ +{ + "name": "resolve-alpn", + "version": "1.2.1", + "description": "Detects the ALPN protocol", + "main": "index.js", + "scripts": { + "test": "xo && nyc --reporter=lcovonly --reporter=text --reporter=html ava" + }, + "files": [ + "index.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/szmarczak/resolve-alpn.git" + }, + "keywords": [ + "alpn", + "tls", + "socket", + "http2" + ], + "author": "Szymon Marczak", + "license": "MIT", + "bugs": { + "url": "https://github.com/szmarczak/resolve-alpn/issues" + }, + "homepage": "https://github.com/szmarczak/resolve-alpn#readme", + "devDependencies": { + "ava": "^3.15.0", + "nyc": "^15.1.0", + "pem": "1.14.3", + "xo": "^0.38.2" + } +} diff --git a/node_modules/responselike/index.d.ts b/node_modules/responselike/index.d.ts new file mode 100644 index 0000000..c99c4db --- /dev/null +++ b/node_modules/responselike/index.d.ts @@ -0,0 +1,86 @@ +import {Buffer} from 'node:buffer'; +import {Readable as ReadableStream} from 'node:stream'; + +export type Options = { + /** + The HTTP response status code. + */ + readonly statusCode: number; + + /** + The HTTP headers object. + + Keys are in lowercase. + */ + readonly headers: Record; + + /** + The response body. + + The contents will be streamable but is also exposed directly as `response.body`. + */ + readonly body: Buffer; + + /** + The request URL string. + */ + readonly url: string; +}; + +/** +Returns a streamable response object similar to a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage). + +@example +``` +import Response from 'responselike'; + +const response = new Response({ + statusCode: 200, + headers: { + foo: 'bar' + }, + body: Buffer.from('Hi!'), + url: 'https://example.com' +}); + +response.statusCode; +// 200 + +response.headers; +// {foo: 'bar'} + +response.body; +// + +response.url; +// 'https://example.com' + +response.pipe(process.stdout); +// 'Hi!' +``` +*/ +export default class Response extends ReadableStream { + /** + The HTTP response status code. + */ + readonly statusCode: number; + + /** + The HTTP headers. + + Keys will be automatically lowercased. + */ + readonly headers: Record; + + /** + The response body. + */ + readonly body: Buffer; + + /** + The request URL string. + */ + readonly url: string; + + constructor(options?: Options); +} diff --git a/node_modules/responselike/index.js b/node_modules/responselike/index.js new file mode 100644 index 0000000..6e2d40b --- /dev/null +++ b/node_modules/responselike/index.js @@ -0,0 +1,39 @@ +import {Readable as ReadableStream} from 'node:stream'; +import lowercaseKeys from 'lowercase-keys'; + +export default class Response extends ReadableStream { + statusCode; + headers; + body; + url; + + constructor({statusCode, headers, body, url}) { + if (typeof statusCode !== 'number') { + throw new TypeError('Argument `statusCode` should be a number'); + } + + if (typeof headers !== 'object') { + throw new TypeError('Argument `headers` should be an object'); + } + + if (!(body instanceof Uint8Array)) { + throw new TypeError('Argument `body` should be a buffer'); + } + + if (typeof url !== 'string') { + throw new TypeError('Argument `url` should be a string'); + } + + super({ + read() { + this.push(body); + this.push(null); + }, + }); + + this.statusCode = statusCode; + this.headers = lowercaseKeys(headers); + this.body = body; + this.url = url; + } +} diff --git a/node_modules/responselike/license b/node_modules/responselike/license new file mode 100644 index 0000000..feeb70f --- /dev/null +++ b/node_modules/responselike/license @@ -0,0 +1,10 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) +Copyright (c) Luke Childs (https://lukechilds.co.uk) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/responselike/package.json b/node_modules/responselike/package.json new file mode 100644 index 0000000..c9d873a --- /dev/null +++ b/node_modules/responselike/package.json @@ -0,0 +1,39 @@ +{ + "name": "responselike", + "version": "3.0.0", + "description": "A response-like object for mocking a Node.js HTTP response stream", + "license": "MIT", + "repository": "sindresorhus/responselike", + "funding": "https://github.com/sponsors/sindresorhus", + "author": "Luke Childs (https://lukechilds.co.uk)", + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=14.16" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "http", + "https", + "response", + "mock", + "test", + "request", + "responselike" + ], + "dependencies": { + "lowercase-keys": "^3.0.0" + }, + "devDependencies": { + "ava": "^4.3.1", + "get-stream": "^6.0.1", + "tsd": "^0.22.0", + "xo": "^0.50.0" + } +} diff --git a/node_modules/responselike/readme.md b/node_modules/responselike/readme.md new file mode 100644 index 0000000..51b5ecd --- /dev/null +++ b/node_modules/responselike/readme.md @@ -0,0 +1,75 @@ +# responselike + +> A response-like object for mocking a Node.js HTTP response stream + +Returns a streamable response object similar to a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage). Useful for formatting cached responses so they can be consumed by code expecting a real response. + +## Install + +```sh +npm install responselike +``` + +## Usage + +```js +import Response from 'responselike'; + +const response = new Response({ + statusCode: 200, + headers: { + foo: 'bar' + }, + body: Buffer.from('Hi!'), + url: 'https://example.com' +}); + +response.statusCode; +// 200 + +response.headers; +// {foo: 'bar'} + +response.body; +// + +response.url; +// 'https://example.com' + +response.pipe(process.stdout); +// 'Hi!' +``` + +## API + +### new Response(options?) + +Returns a streamable response object similar to a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage). + +#### options + +Type: `object` + +##### statusCode + +Type: `number` + +The HTTP response status code. + +##### headers + +Type: `object` + +The HTTP headers. Keys will be automatically lowercased. + +##### body + +Type: `Buffer` + +The response body. The Buffer contents will be streamable but is also exposed directly as `response.body`. + +##### url + +Type: `string` + +The request URL string. diff --git a/node_modules/reusify/.coveralls.yml b/node_modules/reusify/.coveralls.yml new file mode 100644 index 0000000..359f683 --- /dev/null +++ b/node_modules/reusify/.coveralls.yml @@ -0,0 +1 @@ +repo_token: yIxhFqtaaz5iGVYfie9mODehFYogm8S8L diff --git a/node_modules/reusify/.travis.yml b/node_modules/reusify/.travis.yml new file mode 100644 index 0000000..1970476 --- /dev/null +++ b/node_modules/reusify/.travis.yml @@ -0,0 +1,28 @@ +language: node_js +sudo: false + +node_js: + - 9 + - 8 + - 7 + - 6 + - 5 + - 4 + - 4.0 + - iojs-v3 + - iojs-v2 + - iojs-v1 + - 0.12 + - 0.10 + +cache: + directories: + - node_modules + +after_script: +- npm run coverage + +notifications: + email: + on_success: never + on_failure: always diff --git a/node_modules/reusify/LICENSE b/node_modules/reusify/LICENSE new file mode 100644 index 0000000..fbf3a01 --- /dev/null +++ b/node_modules/reusify/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/reusify/README.md b/node_modules/reusify/README.md new file mode 100644 index 0000000..badcb7c --- /dev/null +++ b/node_modules/reusify/README.md @@ -0,0 +1,145 @@ +# reusify + +[![npm version][npm-badge]][npm-url] +[![Build Status][travis-badge]][travis-url] +[![Coverage Status][coveralls-badge]][coveralls-url] + +Reuse your objects and functions for maximum speed. This technique will +make any function run ~10% faster. You call your functions a +lot, and it adds up quickly in hot code paths. + +``` +$ node benchmarks/createNoCodeFunction.js +Total time 53133 +Total iterations 100000000 +Iteration/s 1882069.5236482036 + +$ node benchmarks/reuseNoCodeFunction.js +Total time 50617 +Total iterations 100000000 +Iteration/s 1975620.838848608 +``` + +The above benchmark uses fibonacci to simulate a real high-cpu load. +The actual numbers might differ for your use case, but the difference +should not. + +The benchmark was taken using Node v6.10.0. + +This library was extracted from +[fastparallel](http://npm.im/fastparallel). + +## Example + +```js +var reusify = require('reusify') +var fib = require('reusify/benchmarks/fib') +var instance = reusify(MyObject) + +// get an object from the cache, +// or creates a new one when cache is empty +var obj = instance.get() + +// set the state +obj.num = 100 +obj.func() + +// reset the state. +// if the state contains any external object +// do not use delete operator (it is slow) +// prefer set them to null +obj.num = 0 + +// store an object in the cache +instance.release(obj) + +function MyObject () { + // you need to define this property + // so V8 can compile MyObject into an + // hidden class + this.next = null + this.num = 0 + + var that = this + + // this function is never reallocated, + // so it can be optimized by V8 + this.func = function () { + if (null) { + // do nothing + } else { + // calculates fibonacci + fib(that.num) + } + } +} +``` + +The above example was intended for synchronous code, let's see async: +```js +var reusify = require('reusify') +var instance = reusify(MyObject) + +for (var i = 0; i < 100; i++) { + getData(i, console.log) +} + +function getData (value, cb) { + var obj = instance.get() + + obj.value = value + obj.cb = cb + obj.run() +} + +function MyObject () { + this.next = null + this.value = null + + var that = this + + this.run = function () { + asyncOperation(that.value, that.handle) + } + + this.handle = function (err, result) { + that.cb(err, result) + that.value = null + that.cb = null + instance.release(that) + } +} +``` + +Also note how in the above examples, the code, that consumes an istance of `MyObject`, +reset the state to initial condition, just before storing it in the cache. +That's needed so that every subsequent request for an instance from the cache, +could get a clean instance. + +## Why + +It is faster because V8 doesn't have to collect all the functions you +create. On a short-lived benchmark, it is as fast as creating the +nested function, but on a longer time frame it creates less +pressure on the garbage collector. + +## Other examples +If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). + +## Acknowledgements + +Thanks to [Trevor Norris](https://github.com/trevnorris) for +getting me down the rabbit hole of performance, and thanks to [Mathias +Buss](http://github.com/mafintosh) for suggesting me to share this +trick. + +## License + +MIT + +[npm-badge]: https://badge.fury.io/js/reusify.svg +[npm-url]: https://badge.fury.io/js/reusify +[travis-badge]: https://api.travis-ci.org/mcollina/reusify.svg +[travis-url]: https://travis-ci.org/mcollina/reusify +[coveralls-badge]: https://coveralls.io/repos/mcollina/reusify/badge.svg?branch=master&service=github +[coveralls-url]: https://coveralls.io/github/mcollina/reusify?branch=master diff --git a/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/reusify/benchmarks/createNoCodeFunction.js new file mode 100644 index 0000000..ce1aac7 --- /dev/null +++ b/node_modules/reusify/benchmarks/createNoCodeFunction.js @@ -0,0 +1,30 @@ +'use strict' + +var fib = require('./fib') +var max = 100000000 +var start = Date.now() + +// create a funcion with the typical error +// pattern, that delegates the heavy load +// to something else +function createNoCodeFunction () { + /* eslint no-constant-condition: "off" */ + var num = 100 + + ;(function () { + if (null) { + // do nothing + } else { + fib(num) + } + })() +} + +for (var i = 0; i < max; i++) { + createNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/benchmarks/fib.js b/node_modules/reusify/benchmarks/fib.js new file mode 100644 index 0000000..e22cc48 --- /dev/null +++ b/node_modules/reusify/benchmarks/fib.js @@ -0,0 +1,13 @@ +'use strict' + +function fib (num) { + var fib = [] + + fib[0] = 0 + fib[1] = 1 + for (var i = 2; i <= num; i++) { + fib[i] = fib[i - 2] + fib[i - 1] + } +} + +module.exports = fib diff --git a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js new file mode 100644 index 0000000..3358d6e --- /dev/null +++ b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js @@ -0,0 +1,38 @@ +'use strict' + +var reusify = require('../') +var fib = require('./fib') +var instance = reusify(MyObject) +var max = 100000000 +var start = Date.now() + +function reuseNoCodeFunction () { + var obj = instance.get() + obj.num = 100 + obj.func() + obj.num = 0 + instance.release(obj) +} + +function MyObject () { + this.next = null + var that = this + this.num = 0 + this.func = function () { + /* eslint no-constant-condition: "off" */ + if (null) { + // do nothing + } else { + fib(that.num) + } + } +} + +for (var i = 0; i < max; i++) { + reuseNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/package.json b/node_modules/reusify/package.json new file mode 100644 index 0000000..ee66aee --- /dev/null +++ b/node_modules/reusify/package.json @@ -0,0 +1,45 @@ +{ + "name": "reusify", + "version": "1.0.4", + "description": "Reuse objects and functions with style", + "main": "reusify.js", + "scripts": { + "lint": "standard", + "test": "tape test.js | faucet", + "istanbul": "istanbul cover tape test.js", + "coverage": "npm run istanbul; cat coverage/lcov.info | coveralls" + }, + "pre-commit": [ + "lint", + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/reusify.git" + }, + "keywords": [ + "reuse", + "object", + "performance", + "function", + "fast" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/reusify/issues" + }, + "homepage": "https://github.com/mcollina/reusify#readme", + "engines": { + "node": ">=0.10.0", + "iojs": ">=1.0.0" + }, + "devDependencies": { + "coveralls": "^2.13.3", + "faucet": "0.0.1", + "istanbul": "^0.4.5", + "pre-commit": "^1.2.2", + "standard": "^10.0.3", + "tape": "^4.8.0" + } +} diff --git a/node_modules/reusify/reusify.js b/node_modules/reusify/reusify.js new file mode 100644 index 0000000..e6f36f3 --- /dev/null +++ b/node_modules/reusify/reusify.js @@ -0,0 +1,33 @@ +'use strict' + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify diff --git a/node_modules/reusify/test.js b/node_modules/reusify/test.js new file mode 100644 index 0000000..929cfd7 --- /dev/null +++ b/node_modules/reusify/test.js @@ -0,0 +1,66 @@ +'use strict' + +var test = require('tape') +var reusify = require('./') + +test('reuse objects', function (t) { + t.plan(6) + + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + + t.notEqual(obj, instance.get(), 'two instance created') + t.notOk(obj.next, 'next must be null') + + instance.release(obj) + + // the internals keeps a hot copy ready for reuse + // putting this one back in the queue + instance.release(instance.get()) + + // comparing the old one with the one we got + // never do this in real code, after release you + // should never reuse that instance + t.equal(obj, instance.get(), 'instance must be reused') +}) + +test('reuse more than 2 objects', function (t) { + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + var obj2 = instance.get() + var obj3 = instance.get() + + t.notOk(obj.next, 'next must be null') + t.notOk(obj2.next, 'next must be null') + t.notOk(obj3.next, 'next must be null') + + t.notEqual(obj, obj2) + t.notEqual(obj, obj3) + t.notEqual(obj3, obj2) + + instance.release(obj) + instance.release(obj2) + instance.release(obj3) + + // skip one + instance.get() + + var obj4 = instance.get() + var obj5 = instance.get() + var obj6 = instance.get() + + t.equal(obj4, obj) + t.equal(obj5, obj2) + t.equal(obj6, obj3) + t.end() +}) diff --git a/node_modules/run-parallel/LICENSE b/node_modules/run-parallel/LICENSE new file mode 100644 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/run-parallel/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/run-parallel/README.md b/node_modules/run-parallel/README.md new file mode 100644 index 0000000..edc3da4 --- /dev/null +++ b/node_modules/run-parallel/README.md @@ -0,0 +1,85 @@ +# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg +[travis-url]: https://travis-ci.org/feross/run-parallel +[npm-image]: https://img.shields.io/npm/v/run-parallel.svg +[npm-url]: https://npmjs.org/package/run-parallel +[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg +[downloads-url]: https://npmjs.org/package/run-parallel +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Run an array of functions in parallel + +![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) + +### install + +``` +npm install run-parallel +``` + +### usage + +#### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its callback, the main +`callback` is immediately called with the value of the error. Once the `tasks` have +completed, the results are passed to the final `callback` as an array. + +It is also possible to use an object instead of an array. Each property will be run as a +function and the results will be passed to the final `callback` as an object instead of +an array. This can be a more readable way of handling the results. + +##### arguments + +- `tasks` - An array or object containing functions to run. Each function is passed a +`callback(err, result)` which it must call on completion with an error `err` (which can +be `null`) and an optional `result` value. +- `callback(err, results)` - An optional callback to run once all the functions have +completed. This function gets a results array (or object) containing all the result +arguments passed to the task callbacks. + +##### example + +```js +var parallel = require('run-parallel') + +parallel([ + function (callback) { + setTimeout(function () { + callback(null, 'one') + }, 200) + }, + function (callback) { + setTimeout(function () { + callback(null, 'two') + }, 100) + } +], +// optional callback +function (err, results) { + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}) +``` + +This module is basically equavalent to +[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's +handy to just have the one function you need instead of the kitchen sink. Modularity! +Especially handy if you're serving to the browser and need to reduce your javascript +bundle size. + +Works great in the browser with [browserify](http://browserify.org/)! + +### see also + +- [run-auto](https://github.com/feross/run-auto) +- [run-parallel-limit](https://github.com/feross/run-parallel-limit) +- [run-series](https://github.com/feross/run-series) +- [run-waterfall](https://github.com/feross/run-waterfall) + +### license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/run-parallel/index.js b/node_modules/run-parallel/index.js new file mode 100644 index 0000000..6307141 --- /dev/null +++ b/node_modules/run-parallel/index.js @@ -0,0 +1,51 @@ +/*! run-parallel. MIT License. Feross Aboukhadijeh */ +module.exports = runParallel + +const queueMicrotask = require('queue-microtask') + +function runParallel (tasks, cb) { + let results, pending, keys + let isSync = true + + if (Array.isArray(tasks)) { + results = [] + pending = tasks.length + } else { + keys = Object.keys(tasks) + results = {} + pending = keys.length + } + + function done (err) { + function end () { + if (cb) cb(err, results) + cb = null + } + if (isSync) queueMicrotask(end) + else end() + } + + function each (i, err, result) { + results[i] = result + if (--pending === 0 || err) { + done(err) + } + } + + if (!pending) { + // empty + done(null) + } else if (keys) { + // object + keys.forEach(function (key) { + tasks[key](function (err, result) { each(key, err, result) }) + }) + } else { + // array + tasks.forEach(function (task, i) { + task(function (err, result) { each(i, err, result) }) + }) + } + + isSync = false +} diff --git a/node_modules/run-parallel/package.json b/node_modules/run-parallel/package.json new file mode 100644 index 0000000..1f14757 --- /dev/null +++ b/node_modules/run-parallel/package.json @@ -0,0 +1,58 @@ +{ + "name": "run-parallel", + "description": "Run an array of functions in parallel", + "version": "1.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/run-parallel/issues" + }, + "dependencies": { + "queue-microtask": "^1.2.2" + }, + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/feross/run-parallel", + "keywords": [ + "parallel", + "async", + "function", + "callback", + "asynchronous", + "run", + "array", + "run parallel" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/run-parallel.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/safe-buffer/LICENSE b/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/safe-buffer/README.md b/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/safe-buffer/index.d.ts b/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/safe-buffer/index.js b/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/safe-buffer/package.json b/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..f2869e2 --- /dev/null +++ b/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/seek-bzip/LICENSE b/node_modules/seek-bzip/LICENSE new file mode 100644 index 0000000..8bfd775 --- /dev/null +++ b/node_modules/seek-bzip/LICENSE @@ -0,0 +1,26 @@ +The MIT License (MIT) + +Copyright © 2013-2015 C. Scott Ananian + +Copyright © 2012-2015 Eli Skeggs + +Copyright © 2011 Kevin Kwok + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/seek-bzip/README.md b/node_modules/seek-bzip/README.md new file mode 100644 index 0000000..9a6f368 --- /dev/null +++ b/node_modules/seek-bzip/README.md @@ -0,0 +1,185 @@ +# seek-bzip + +[![Build Status][1]][2] [![dependency status][3]][4] [![dev dependency status][5]][6] + +`seek-bzip` is a pure-javascript Node.JS module adapted from [node-bzip](https://github.com/skeggse/node-bzip) and before that [antimatter15's pure-javascript bzip2 decoder](https://github.com/antimatter15/bzip2.js). Like these projects, `seek-bzip` only does decompression (see [compressjs](https://github.com/cscott/compressjs) if you need compression code). Unlike those other projects, `seek-bzip` can seek to and decode single blocks from the bzip2 file. + +`seek-bzip` primarily decodes buffers into other buffers, synchronously. +With the help of the [fibers](https://github.com/laverdet/node-fibers) +package, it can operate on node streams; see `test/stream.js` for an +example. + +## How to Install + +``` +npm install seek-bzip +``` + +This package uses +[Typed Arrays](https://developer.mozilla.org/en-US/docs/JavaScript/Typed_arrays), which are present in node.js >= 0.5.5. + +## Usage + +After compressing some example data into `example.bz2`, the following will recreate that original data and save it to `example`: + +``` +var Bunzip = require('seek-bzip'); +var fs = require('fs'); + +var compressedData = fs.readFileSync('example.bz2'); +var data = Bunzip.decode(compressedData); + +fs.writeFileSync('example', data); +``` + +See the tests in the `tests/` directory for further usage examples. + +For uncompressing single blocks of bzip2-compressed data, you will need +an out-of-band index listing the start of each bzip2 block. (Presumably +you generate this at the same time as you index the start of the information +you wish to seek to inside the compressed file.) The `seek-bzip` module +has been designed to be compatible with the C implementation `seek-bzip2` +available from https://bitbucket.org/james_taylor/seek-bzip2. That codebase +contains a `bzip-table` tool which will generate bzip2 block start indices. +There is also a pure-JavaScript `seek-bzip-table` tool in this package's +`bin` directory. + +## Documentation + +`require('seek-bzip')` returns a `Bunzip` object. It contains three static +methods. The first is a function accepting one or two parameters: + +`Bunzip.decode = function(input, [Number expectedSize] or [output], [boolean multistream])` + +The `input` argument can be a "stream" object (which must implement the +`readByte` method), or a `Buffer`. + +If `expectedSize` is not present, `decodeBzip` simply decodes `input` and +returns the resulting `Buffer`. + +If `expectedSize` is present (and numeric), `decodeBzip` will store +the results in a `Buffer` of length `expectedSize`, and throw an error +in the case that the size of the decoded data does not match +`expectedSize`. + +If you pass a non-numeric second parameter, it can either be a `Buffer` +object (which must be of the correct length; an error will be thrown if +the size of the decoded data does not match the buffer length) or +a "stream" object (which must implement a `writeByte` method). + +The optional third `multistream` parameter, if true, attempts to continue +reading past the end of the bzip2 file. This supports "multistream" +bzip2 files, which are simply multiple bzip2 files concatenated together. +If this argument is true, the input stream must have an `eof` method +which returns true when the end of the input has been reached. + +The second exported method is a function accepting two or three parameters: + +`Bunzip.decodeBlock = function(input, Number blockStartBits, [Number expectedSize] or [output])` + +The `input` and `expectedSize`/`output` parameters are as above. +The `blockStartBits` parameter gives the start of the desired block, in bits. + +If passing a stream as the `input` parameter, it must implement the +`seek` method. + +The final exported method is a function accepting two or three parameters: + +`Bunzip.table = function(input, Function callback, [boolean multistream])` + +The `input` and `multistream` parameters are identical to those for the +`decode` method. + +This function will invoke `callback(position, size)` once per bzip2 block, +where `position` gives the starting position of the block (in *bits*), and +`size` gives the uncompressed size of the block (in bytes). + +This can be used to construct an index allowing direct access to a particular +block inside a bzip2 file, using the `decodeBlock` method. + +## Command-line +There are binaries available in bin. The first generates an index of all +the blocks in a bzip2-compressed file: +``` +$ bin/seek-bzip-table test/sample4.bz2 +32 99981 +320555 99981 +606348 99981 +847568 99981 +1089094 99981 +1343625 99981 +1596228 99981 +1843336 99981 +2090919 99981 +2342106 39019 +$ +``` +The first field is the starting position of the block, in bits, and the +second field is the length of the block, in bytes. + +The second binary decodes an arbitrary block of a bzip2 file: +``` +$ bin/seek-bunzip -d -b 2342106 test/sample4.bz2 | tail +élan's +émigré +émigré's +émigrés +épée +épée's +épées +étude +étude's +études +$ +``` + +Use `--help` to see other options. + +## Help wanted + +Improvements to this module would be generally useful. +Feel free to fork on github and submit pull requests! + +## Related projects + +* https://github.com/skeggse/node-bzip node-bzip (original upstream source) +* https://github.com/cscott/compressjs + Lots of compression/decompression algorithms from the same author as this + module, including bzip2 compression code. +* https://github.com/cscott/lzjb fast LZJB compression/decompression + +## License + +#### MIT License + +> Copyright © 2013-2015 C. Scott Ananian +> +> Copyright © 2012-2015 Eli Skeggs +> +> Copyright © 2011 Kevin Kwok +> +> Permission is hereby granted, free of charge, to any person obtaining +> a copy of this software and associated documentation files (the +> "Software"), to deal in the Software without restriction, including +> without limitation the rights to use, copy, modify, merge, publish, +> distribute, sublicense, and/or sell copies of the Software, and to +> permit persons to whom the Software is furnished to do so, subject to +> the following conditions: +> +> The above copyright notice and this permission notice shall be +> included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +> NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +> LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +> OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +> WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[1]: https://travis-ci.org/cscott/seek-bzip.png +[2]: https://travis-ci.org/cscott/seek-bzip +[3]: https://david-dm.org/cscott/seek-bzip.png +[4]: https://david-dm.org/cscott/seek-bzip +[5]: https://david-dm.org/cscott/seek-bzip/dev-status.png +[6]: https://david-dm.org/cscott/seek-bzip#info=devDependencies diff --git a/node_modules/seek-bzip/bin/seek-bunzip b/node_modules/seek-bzip/bin/seek-bunzip new file mode 100755 index 0000000..da117fc --- /dev/null +++ b/node_modules/seek-bzip/bin/seek-bunzip @@ -0,0 +1,129 @@ +#!/usr/bin/env node + +var program = require('commander'); +var Bunzip = require('../'); +var fs = require('fs'); + +program + .version(Bunzip.version) + .usage('-d|-z [infile] [outfile]') + .option('-d, --decompress', + 'Decompress stdin to stdout') + //.option('-z, --compress', + // 'Compress stdin to stdout') + .option('-b, --block ', + 'Extract a single block, starting at bits.', undefined) + .option('-m, --multistream', + 'Read a multistream bzip2 file'); +program.on('--help', function() { + console.log(' If is omitted, reads from stdin.'); + console.log(' If is omitted, writes to stdout.'); +}); +program.parse(process.argv); + +if (!program.compress) { program.decompress = true; } + +if (program.compress && program.block !== undefined) { + console.error('--block can only be used with decompression'); + return 1; +} + +if (program.decompress && program.compress) { + console.error('Must specify either -d or -z.'); + return 1; +} + +var makeInStream = function(in_fd) { + var stat = fs.fstatSync(in_fd); + var stream = { + buffer: new Buffer(4096), + filePos: null, + pos: 0, + end: 0, + _fillBuffer: function() { + this.end = fs.readSync(in_fd, this.buffer, 0, this.buffer.length, + this.filePos); + this.pos = 0; + if (this.filePos !== null && this.end > 0) { + this.filePos += this.end; + } + }, + readByte: function() { + if (this.pos >= this.end) { this._fillBuffer(); } + if (this.pos < this.end) { + return this.buffer[this.pos++]; + } + return -1; + }, + read: function(buffer, bufOffset, length) { + if (this.pos >= this.end) { this._fillBuffer(); } + var bytesRead = 0; + while (bytesRead < length && this.pos < this.end) { + buffer[bufOffset++] = this.buffer[this.pos++]; + bytesRead++; + } + return bytesRead; + }, + seek: function(seek_pos) { + this.filePos = seek_pos; + this.pos = this.end = 0; + }, + eof: function() { + if (this.pos >= this.end) { this._fillBuffer(); } + return !(this.pos < this.end); + } + }; + if (stat.size) { + stream.size = stat.size; + } + return stream; +}; +var makeOutStream = function(out_fd) { + return { + buffer: new Buffer(4096), + pos: 0, + flush: function() { + fs.writeSync(out_fd, this.buffer, 0, this.pos); + this.pos = 0; + }, + writeByte: function(byte) { + if (this.pos >= this.buffer.length) { this.flush(); } + this.buffer[this.pos++] = byte; + } + }; +}; + +var in_fd = 0, close_in = function(){}; +var out_fd = 1, close_out = function(){}; +if (program.args.length > 0) { + in_fd = fs.openSync(program.args.shift(), 'r'); + close_in = function() { fs.closeSync(in_fd); }; +} +if (program.args.length > 0) { + out_fd = fs.openSync(program.args.shift(), 'w'); + close_out = function() { fs.closeSync(out_fd); }; +} + +var inStream = makeInStream(in_fd); +var outStream= makeOutStream(out_fd); + +if (program.decompress) { + try { + if (program.block !== undefined) { + Bunzip.decodeBlock(inStream, +program.block, outStream); + } else { + Bunzip.decode(inStream, outStream, program.multistream); + } + outStream.flush(); + } catch (e) { + if (e.code !== 'EPIPE') throw e; + } + close_in(); + close_out(); + return 0; +} +if (program.compress) { + console.error('Compression not yet implemented.'); + return 1; +} +return 1; diff --git a/node_modules/seek-bzip/bin/seek-bzip-table b/node_modules/seek-bzip/bin/seek-bzip-table new file mode 100755 index 0000000..9d852b3 --- /dev/null +++ b/node_modules/seek-bzip/bin/seek-bzip-table @@ -0,0 +1,71 @@ +#!/usr/bin/env node + +var program = require('commander'); +var Bunzip = require('../'); +var fs = require('fs'); + +program + .version(Bunzip.version) + .usage('[infile]') + .option('-m, --multistream', + 'Read a multistream bzip2 file'); +program.on('--help', function() { + console.log(' If is omitted, reads from stdin.'); +}); +program.parse(process.argv); + +var makeInStream = function(in_fd) { + var stat = fs.fstatSync(in_fd); + var stream = { + buffer: new Buffer(4096), + totalPos: 0, + pos: 0, + end: 0, + _fillBuffer: function() { + this.end = fs.readSync(in_fd, this.buffer, 0, this.buffer.length); + this.pos = 0; + }, + readByte: function() { + if (this.pos >= this.end) { this._fillBuffer(); } + if (this.pos < this.end) { + this.totalPos++; + return this.buffer[this.pos++]; + } + return -1; + }, + read: function(buffer, bufOffset, length) { + if (this.pos >= this.end) { this._fillBuffer(); } + var bytesRead = 0; + while (bytesRead < length && this.pos < this.end) { + buffer[bufOffset++] = this.buffer[this.pos++]; + bytesRead++; + } + this.totalPos += bytesRead; + return bytesRead; + }, + eof: function() { + if (this.pos >= this.end) { this._fillBuffer(); } + return !(this.pos < this.end); + } + }; + if (stat.size) { + stream.size = stat.size; + } + return stream; +}; + +var in_fd = 0, close_in = function(){}; +if (program.args.length > 0) { + in_fd = fs.openSync(program.args.shift(), 'r'); + close_in = function() { fs.closeSync(in_fd); }; +} + +var inStream = makeInStream(in_fd); + +var report = function(position, blocksize) { + console.log(position+'\t'+blocksize); +}; + +Bunzip.table(inStream, report, program.multistream); +close_in(); +return 0; diff --git a/node_modules/seek-bzip/lib/bitreader.js b/node_modules/seek-bzip/lib/bitreader.js new file mode 100644 index 0000000..7bcb396 --- /dev/null +++ b/node_modules/seek-bzip/lib/bitreader.js @@ -0,0 +1,94 @@ +/* +node-bzip - a pure-javascript Node.JS module for decoding bzip2 data + +Copyright (C) 2012 Eli Skeggs + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Adapted from bzip2.js, copyright 2011 antimatter15 (antimatter15@gmail.com). + +Based on micro-bunzip by Rob Landley (rob@landley.net). + +Based on bzip2 decompression code by Julian R Seward (jseward@acm.org), +which also acknowledges contributions by Mike Burrows, David Wheeler, +Peter Fenwick, Alistair Moffat, Radford Neal, Ian H. Witten, +Robert Sedgewick, and Jon L. Bentley. +*/ + +var BITMASK = [0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF]; + +// offset in bytes +var BitReader = function(stream) { + this.stream = stream; + this.bitOffset = 0; + this.curByte = 0; + this.hasByte = false; +}; + +BitReader.prototype._ensureByte = function() { + if (!this.hasByte) { + this.curByte = this.stream.readByte(); + this.hasByte = true; + } +}; + +// reads bits from the buffer +BitReader.prototype.read = function(bits) { + var result = 0; + while (bits > 0) { + this._ensureByte(); + var remaining = 8 - this.bitOffset; + // if we're in a byte + if (bits >= remaining) { + result <<= remaining; + result |= BITMASK[remaining] & this.curByte; + this.hasByte = false; + this.bitOffset = 0; + bits -= remaining; + } else { + result <<= bits; + var shift = remaining - bits; + result |= (this.curByte & (BITMASK[bits] << shift)) >> shift; + this.bitOffset += bits; + bits = 0; + } + } + return result; +}; + +// seek to an arbitrary point in the buffer (expressed in bits) +BitReader.prototype.seek = function(pos) { + var n_bit = pos % 8; + var n_byte = (pos - n_bit) / 8; + this.bitOffset = n_bit; + this.stream.seek(n_byte); + this.hasByte = false; +}; + +// reads 6 bytes worth of data using the read method +BitReader.prototype.pi = function() { + var buf = new Buffer(6), i; + for (i = 0; i < buf.length; i++) { + buf[i] = this.read(8); + } + return buf.toString('hex'); +}; + +module.exports = BitReader; diff --git a/node_modules/seek-bzip/lib/crc32.js b/node_modules/seek-bzip/lib/crc32.js new file mode 100644 index 0000000..8c9a169 --- /dev/null +++ b/node_modules/seek-bzip/lib/crc32.js @@ -0,0 +1,104 @@ +/* CRC32, used in Bzip2 implementation. + * This is a port of CRC32.java from the jbzip2 implementation at + * https://code.google.com/p/jbzip2 + * which is: + * Copyright (c) 2011 Matthew Francis + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + * This JavaScript implementation is: + * Copyright (c) 2013 C. Scott Ananian + * with the same licensing terms as Matthew Francis' original implementation. + */ +module.exports = (function() { + + /** + * A static CRC lookup table + */ + var crc32Lookup = new Uint32Array([ + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + ]); + + var CRC32 = function() { + /** + * The current CRC + */ + var crc = 0xffffffff; + + /** + * @return The current CRC + */ + this.getCRC = function() { + return (~crc) >>> 0; // return an unsigned value + }; + + /** + * Update the CRC with a single byte + * @param value The value to update the CRC with + */ + this.updateCRC = function(value) { + crc = (crc << 8) ^ crc32Lookup[((crc >>> 24) ^ value) & 0xff]; + }; + + /** + * Update the CRC with a sequence of identical bytes + * @param value The value to update the CRC with + * @param count The number of bytes + */ + this.updateCRCRun = function(value, count) { + while (count-- > 0) { + crc = (crc << 8) ^ crc32Lookup[((crc >>> 24) ^ value) & 0xff]; + } + }; + }; + return CRC32; +})(); diff --git a/node_modules/seek-bzip/lib/index.js b/node_modules/seek-bzip/lib/index.js new file mode 100644 index 0000000..f3bab49 --- /dev/null +++ b/node_modules/seek-bzip/lib/index.js @@ -0,0 +1,605 @@ +/* +seek-bzip - a pure-javascript module for seeking within bzip2 data + +Copyright (C) 2013 C. Scott Ananian +Copyright (C) 2012 Eli Skeggs +Copyright (C) 2011 Kevin Kwok + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Adapted from node-bzip, copyright 2012 Eli Skeggs. +Adapted from bzip2.js, copyright 2011 Kevin Kwok (antimatter15@gmail.com). + +Based on micro-bunzip by Rob Landley (rob@landley.net). + +Based on bzip2 decompression code by Julian R Seward (jseward@acm.org), +which also acknowledges contributions by Mike Burrows, David Wheeler, +Peter Fenwick, Alistair Moffat, Radford Neal, Ian H. Witten, +Robert Sedgewick, and Jon L. Bentley. +*/ + +var BitReader = require('./bitreader'); +var Stream = require('./stream'); +var CRC32 = require('./crc32'); +var pjson = require('../package.json'); + +var MAX_HUFCODE_BITS = 20; +var MAX_SYMBOLS = 258; +var SYMBOL_RUNA = 0; +var SYMBOL_RUNB = 1; +var MIN_GROUPS = 2; +var MAX_GROUPS = 6; +var GROUP_SIZE = 50; + +var WHOLEPI = "314159265359"; +var SQRTPI = "177245385090"; + +var mtf = function(array, index) { + var src = array[index], i; + for (i = index; i > 0; i--) { + array[i] = array[i-1]; + } + array[0] = src; + return src; +}; + +var Err = { + OK: 0, + LAST_BLOCK: -1, + NOT_BZIP_DATA: -2, + UNEXPECTED_INPUT_EOF: -3, + UNEXPECTED_OUTPUT_EOF: -4, + DATA_ERROR: -5, + OUT_OF_MEMORY: -6, + OBSOLETE_INPUT: -7, + END_OF_BLOCK: -8 +}; +var ErrorMessages = {}; +ErrorMessages[Err.LAST_BLOCK] = "Bad file checksum"; +ErrorMessages[Err.NOT_BZIP_DATA] = "Not bzip data"; +ErrorMessages[Err.UNEXPECTED_INPUT_EOF] = "Unexpected input EOF"; +ErrorMessages[Err.UNEXPECTED_OUTPUT_EOF] = "Unexpected output EOF"; +ErrorMessages[Err.DATA_ERROR] = "Data error"; +ErrorMessages[Err.OUT_OF_MEMORY] = "Out of memory"; +ErrorMessages[Err.OBSOLETE_INPUT] = "Obsolete (pre 0.9.5) bzip format not supported."; + +var _throw = function(status, optDetail) { + var msg = ErrorMessages[status] || 'unknown error'; + if (optDetail) { msg += ': '+optDetail; } + var e = new TypeError(msg); + e.errorCode = status; + throw e; +}; + +var Bunzip = function(inputStream, outputStream) { + this.writePos = this.writeCurrent = this.writeCount = 0; + + this._start_bunzip(inputStream, outputStream); +}; +Bunzip.prototype._init_block = function() { + var moreBlocks = this._get_next_block(); + if ( !moreBlocks ) { + this.writeCount = -1; + return false; /* no more blocks */ + } + this.blockCRC = new CRC32(); + return true; +}; +/* XXX micro-bunzip uses (inputStream, inputBuffer, len) as arguments */ +Bunzip.prototype._start_bunzip = function(inputStream, outputStream) { + /* Ensure that file starts with "BZh['1'-'9']." */ + var buf = new Buffer(4); + if (inputStream.read(buf, 0, 4) !== 4 || + String.fromCharCode(buf[0], buf[1], buf[2]) !== 'BZh') + _throw(Err.NOT_BZIP_DATA, 'bad magic'); + + var level = buf[3] - 0x30; + if (level < 1 || level > 9) + _throw(Err.NOT_BZIP_DATA, 'level out of range'); + + this.reader = new BitReader(inputStream); + + /* Fourth byte (ascii '1'-'9'), indicates block size in units of 100k of + uncompressed data. Allocate intermediate buffer for block. */ + this.dbufSize = 100000 * level; + this.nextoutput = 0; + this.outputStream = outputStream; + this.streamCRC = 0; +}; +Bunzip.prototype._get_next_block = function() { + var i, j, k; + var reader = this.reader; + // this is get_next_block() function from micro-bunzip: + /* Read in header signature and CRC, then validate signature. + (last block signature means CRC is for whole file, return now) */ + var h = reader.pi(); + if (h === SQRTPI) { // last block + return false; /* no more blocks */ + } + if (h !== WHOLEPI) + _throw(Err.NOT_BZIP_DATA); + this.targetBlockCRC = reader.read(32) >>> 0; // (convert to unsigned) + this.streamCRC = (this.targetBlockCRC ^ + ((this.streamCRC << 1) | (this.streamCRC>>>31))) >>> 0; + /* We can add support for blockRandomised if anybody complains. There was + some code for this in busybox 1.0.0-pre3, but nobody ever noticed that + it didn't actually work. */ + if (reader.read(1)) + _throw(Err.OBSOLETE_INPUT); + var origPointer = reader.read(24); + if (origPointer > this.dbufSize) + _throw(Err.DATA_ERROR, 'initial position out of bounds'); + /* mapping table: if some byte values are never used (encoding things + like ascii text), the compression code removes the gaps to have fewer + symbols to deal with, and writes a sparse bitfield indicating which + values were present. We make a translation table to convert the symbols + back to the corresponding bytes. */ + var t = reader.read(16); + var symToByte = new Buffer(256), symTotal = 0; + for (i = 0; i < 16; i++) { + if (t & (1 << (0xF - i))) { + var o = i * 16; + k = reader.read(16); + for (j = 0; j < 16; j++) + if (k & (1 << (0xF - j))) + symToByte[symTotal++] = o + j; + } + } + + /* How many different huffman coding groups does this block use? */ + var groupCount = reader.read(3); + if (groupCount < MIN_GROUPS || groupCount > MAX_GROUPS) + _throw(Err.DATA_ERROR); + /* nSelectors: Every GROUP_SIZE many symbols we select a new huffman coding + group. Read in the group selector list, which is stored as MTF encoded + bit runs. (MTF=Move To Front, as each value is used it's moved to the + start of the list.) */ + var nSelectors = reader.read(15); + if (nSelectors === 0) + _throw(Err.DATA_ERROR); + + var mtfSymbol = new Buffer(256); + for (i = 0; i < groupCount; i++) + mtfSymbol[i] = i; + + var selectors = new Buffer(nSelectors); // was 32768... + + for (i = 0; i < nSelectors; i++) { + /* Get next value */ + for (j = 0; reader.read(1); j++) + if (j >= groupCount) _throw(Err.DATA_ERROR); + /* Decode MTF to get the next selector */ + selectors[i] = mtf(mtfSymbol, j); + } + + /* Read the huffman coding tables for each group, which code for symTotal + literal symbols, plus two run symbols (RUNA, RUNB) */ + var symCount = symTotal + 2; + var groups = [], hufGroup; + for (j = 0; j < groupCount; j++) { + var length = new Buffer(symCount), temp = new Uint16Array(MAX_HUFCODE_BITS + 1); + /* Read huffman code lengths for each symbol. They're stored in + a way similar to mtf; record a starting value for the first symbol, + and an offset from the previous value for everys symbol after that. */ + t = reader.read(5); // lengths + for (i = 0; i < symCount; i++) { + for (;;) { + if (t < 1 || t > MAX_HUFCODE_BITS) _throw(Err.DATA_ERROR); + /* If first bit is 0, stop. Else second bit indicates whether + to increment or decrement the value. */ + if(!reader.read(1)) + break; + if(!reader.read(1)) + t++; + else + t--; + } + length[i] = t; + } + + /* Find largest and smallest lengths in this group */ + var minLen, maxLen; + minLen = maxLen = length[0]; + for (i = 1; i < symCount; i++) { + if (length[i] > maxLen) + maxLen = length[i]; + else if (length[i] < minLen) + minLen = length[i]; + } + + /* Calculate permute[], base[], and limit[] tables from length[]. + * + * permute[] is the lookup table for converting huffman coded symbols + * into decoded symbols. base[] is the amount to subtract from the + * value of a huffman symbol of a given length when using permute[]. + * + * limit[] indicates the largest numerical value a symbol with a given + * number of bits can have. This is how the huffman codes can vary in + * length: each code with a value>limit[length] needs another bit. + */ + hufGroup = {}; + groups.push(hufGroup); + hufGroup.permute = new Uint16Array(MAX_SYMBOLS); + hufGroup.limit = new Uint32Array(MAX_HUFCODE_BITS + 2); + hufGroup.base = new Uint32Array(MAX_HUFCODE_BITS + 1); + hufGroup.minLen = minLen; + hufGroup.maxLen = maxLen; + /* Calculate permute[]. Concurently, initialize temp[] and limit[]. */ + var pp = 0; + for (i = minLen; i <= maxLen; i++) { + temp[i] = hufGroup.limit[i] = 0; + for (t = 0; t < symCount; t++) + if (length[t] === i) + hufGroup.permute[pp++] = t; + } + /* Count symbols coded for at each bit length */ + for (i = 0; i < symCount; i++) + temp[length[i]]++; + /* Calculate limit[] (the largest symbol-coding value at each bit + * length, which is (previous limit<<1)+symbols at this level), and + * base[] (number of symbols to ignore at each bit length, which is + * limit minus the cumulative count of symbols coded for already). */ + pp = t = 0; + for (i = minLen; i < maxLen; i++) { + pp += temp[i]; + /* We read the largest possible symbol size and then unget bits + after determining how many we need, and those extra bits could + be set to anything. (They're noise from future symbols.) At + each level we're really only interested in the first few bits, + so here we set all the trailing to-be-ignored bits to 1 so they + don't affect the value>limit[length] comparison. */ + hufGroup.limit[i] = pp - 1; + pp <<= 1; + t += temp[i]; + hufGroup.base[i + 1] = pp - t; + } + hufGroup.limit[maxLen + 1] = Number.MAX_VALUE; /* Sentinal value for reading next sym. */ + hufGroup.limit[maxLen] = pp + temp[maxLen] - 1; + hufGroup.base[minLen] = 0; + } + /* We've finished reading and digesting the block header. Now read this + block's huffman coded symbols from the file and undo the huffman coding + and run length encoding, saving the result into dbuf[dbufCount++]=uc */ + + /* Initialize symbol occurrence counters and symbol Move To Front table */ + var byteCount = new Uint32Array(256); + for (i = 0; i < 256; i++) + mtfSymbol[i] = i; + /* Loop through compressed symbols. */ + var runPos = 0, dbufCount = 0, selector = 0, uc; + var dbuf = this.dbuf = new Uint32Array(this.dbufSize); + symCount = 0; + for (;;) { + /* Determine which huffman coding group to use. */ + if (!(symCount--)) { + symCount = GROUP_SIZE - 1; + if (selector >= nSelectors) { _throw(Err.DATA_ERROR); } + hufGroup = groups[selectors[selector++]]; + } + /* Read next huffman-coded symbol. */ + i = hufGroup.minLen; + j = reader.read(i); + for (;;i++) { + if (i > hufGroup.maxLen) { _throw(Err.DATA_ERROR); } + if (j <= hufGroup.limit[i]) + break; + j = (j << 1) | reader.read(1); + } + /* Huffman decode value to get nextSym (with bounds checking) */ + j -= hufGroup.base[i]; + if (j < 0 || j >= MAX_SYMBOLS) { _throw(Err.DATA_ERROR); } + var nextSym = hufGroup.permute[j]; + /* We have now decoded the symbol, which indicates either a new literal + byte, or a repeated run of the most recent literal byte. First, + check if nextSym indicates a repeated run, and if so loop collecting + how many times to repeat the last literal. */ + if (nextSym === SYMBOL_RUNA || nextSym === SYMBOL_RUNB) { + /* If this is the start of a new run, zero out counter */ + if (!runPos){ + runPos = 1; + t = 0; + } + /* Neat trick that saves 1 symbol: instead of or-ing 0 or 1 at + each bit position, add 1 or 2 instead. For example, + 1011 is 1<<0 + 1<<1 + 2<<2. 1010 is 2<<0 + 2<<1 + 1<<2. + You can make any bit pattern that way using 1 less symbol than + the basic or 0/1 method (except all bits 0, which would use no + symbols, but a run of length 0 doesn't mean anything in this + context). Thus space is saved. */ + if (nextSym === SYMBOL_RUNA) + t += runPos; + else + t += 2 * runPos; + runPos <<= 1; + continue; + } + /* When we hit the first non-run symbol after a run, we now know + how many times to repeat the last literal, so append that many + copies to our buffer of decoded symbols (dbuf) now. (The last + literal used is the one at the head of the mtfSymbol array.) */ + if (runPos){ + runPos = 0; + if (dbufCount + t > this.dbufSize) { _throw(Err.DATA_ERROR); } + uc = symToByte[mtfSymbol[0]]; + byteCount[uc] += t; + while (t--) + dbuf[dbufCount++] = uc; + } + /* Is this the terminating symbol? */ + if (nextSym > symTotal) + break; + /* At this point, nextSym indicates a new literal character. Subtract + one to get the position in the MTF array at which this literal is + currently to be found. (Note that the result can't be -1 or 0, + because 0 and 1 are RUNA and RUNB. But another instance of the + first symbol in the mtf array, position 0, would have been handled + as part of a run above. Therefore 1 unused mtf position minus + 2 non-literal nextSym values equals -1.) */ + if (dbufCount >= this.dbufSize) { _throw(Err.DATA_ERROR); } + i = nextSym - 1; + uc = mtf(mtfSymbol, i); + uc = symToByte[uc]; + /* We have our literal byte. Save it into dbuf. */ + byteCount[uc]++; + dbuf[dbufCount++] = uc; + } + /* At this point, we've read all the huffman-coded symbols (and repeated + runs) for this block from the input stream, and decoded them into the + intermediate buffer. There are dbufCount many decoded bytes in dbuf[]. + Now undo the Burrows-Wheeler transform on dbuf. + See http://dogma.net/markn/articles/bwt/bwt.htm + */ + if (origPointer < 0 || origPointer >= dbufCount) { _throw(Err.DATA_ERROR); } + /* Turn byteCount into cumulative occurrence counts of 0 to n-1. */ + j = 0; + for (i = 0; i < 256; i++) { + k = j + byteCount[i]; + byteCount[i] = j; + j = k; + } + /* Figure out what order dbuf would be in if we sorted it. */ + for (i = 0; i < dbufCount; i++) { + uc = dbuf[i] & 0xff; + dbuf[byteCount[uc]] |= (i << 8); + byteCount[uc]++; + } + /* Decode first byte by hand to initialize "previous" byte. Note that it + doesn't get output, and if the first three characters are identical + it doesn't qualify as a run (hence writeRunCountdown=5). */ + var pos = 0, current = 0, run = 0; + if (dbufCount) { + pos = dbuf[origPointer]; + current = (pos & 0xff); + pos >>= 8; + run = -1; + } + this.writePos = pos; + this.writeCurrent = current; + this.writeCount = dbufCount; + this.writeRun = run; + + return true; /* more blocks to come */ +}; +/* Undo burrows-wheeler transform on intermediate buffer to produce output. + If start_bunzip was initialized with out_fd=-1, then up to len bytes of + data are written to outbuf. Return value is number of bytes written or + error (all errors are negative numbers). If out_fd!=-1, outbuf and len + are ignored, data is written to out_fd and return is RETVAL_OK or error. +*/ +Bunzip.prototype._read_bunzip = function(outputBuffer, len) { + var copies, previous, outbyte; + /* james@jamestaylor.org: writeCount goes to -1 when the buffer is fully + decoded, which results in this returning RETVAL_LAST_BLOCK, also + equal to -1... Confusing, I'm returning 0 here to indicate no + bytes written into the buffer */ + if (this.writeCount < 0) { return 0; } + + var gotcount = 0; + var dbuf = this.dbuf, pos = this.writePos, current = this.writeCurrent; + var dbufCount = this.writeCount, outputsize = this.outputsize; + var run = this.writeRun; + + while (dbufCount) { + dbufCount--; + previous = current; + pos = dbuf[pos]; + current = pos & 0xff; + pos >>= 8; + if (run++ === 3){ + copies = current; + outbyte = previous; + current = -1; + } else { + copies = 1; + outbyte = current; + } + this.blockCRC.updateCRCRun(outbyte, copies); + while (copies--) { + this.outputStream.writeByte(outbyte); + this.nextoutput++; + } + if (current != previous) + run = 0; + } + this.writeCount = dbufCount; + // check CRC + if (this.blockCRC.getCRC() !== this.targetBlockCRC) { + _throw(Err.DATA_ERROR, "Bad block CRC "+ + "(got "+this.blockCRC.getCRC().toString(16)+ + " expected "+this.targetBlockCRC.toString(16)+")"); + } + return this.nextoutput; +}; + +var coerceInputStream = function(input) { + if ('readByte' in input) { return input; } + var inputStream = new Stream(); + inputStream.pos = 0; + inputStream.readByte = function() { return input[this.pos++]; }; + inputStream.seek = function(pos) { this.pos = pos; }; + inputStream.eof = function() { return this.pos >= input.length; }; + return inputStream; +}; +var coerceOutputStream = function(output) { + var outputStream = new Stream(); + var resizeOk = true; + if (output) { + if (typeof(output)==='number') { + outputStream.buffer = new Buffer(output); + resizeOk = false; + } else if ('writeByte' in output) { + return output; + } else { + outputStream.buffer = output; + resizeOk = false; + } + } else { + outputStream.buffer = new Buffer(16384); + } + outputStream.pos = 0; + outputStream.writeByte = function(_byte) { + if (resizeOk && this.pos >= this.buffer.length) { + var newBuffer = new Buffer(this.buffer.length*2); + this.buffer.copy(newBuffer); + this.buffer = newBuffer; + } + this.buffer[this.pos++] = _byte; + }; + outputStream.getBuffer = function() { + // trim buffer + if (this.pos !== this.buffer.length) { + if (!resizeOk) + throw new TypeError('outputsize does not match decoded input'); + var newBuffer = new Buffer(this.pos); + this.buffer.copy(newBuffer, 0, 0, this.pos); + this.buffer = newBuffer; + } + return this.buffer; + }; + outputStream._coerced = true; + return outputStream; +}; + +/* Static helper functions */ +Bunzip.Err = Err; +// 'input' can be a stream or a buffer +// 'output' can be a stream or a buffer or a number (buffer size) +Bunzip.decode = function(input, output, multistream) { + // make a stream from a buffer, if necessary + var inputStream = coerceInputStream(input); + var outputStream = coerceOutputStream(output); + + var bz = new Bunzip(inputStream, outputStream); + while (true) { + if ('eof' in inputStream && inputStream.eof()) break; + if (bz._init_block()) { + bz._read_bunzip(); + } else { + var targetStreamCRC = bz.reader.read(32) >>> 0; // (convert to unsigned) + if (targetStreamCRC !== bz.streamCRC) { + _throw(Err.DATA_ERROR, "Bad stream CRC "+ + "(got "+bz.streamCRC.toString(16)+ + " expected "+targetStreamCRC.toString(16)+")"); + } + if (multistream && + 'eof' in inputStream && + !inputStream.eof()) { + // note that start_bunzip will also resync the bit reader to next byte + bz._start_bunzip(inputStream, outputStream); + } else break; + } + } + if ('getBuffer' in outputStream) + return outputStream.getBuffer(); +}; +Bunzip.decodeBlock = function(input, pos, output) { + // make a stream from a buffer, if necessary + var inputStream = coerceInputStream(input); + var outputStream = coerceOutputStream(output); + var bz = new Bunzip(inputStream, outputStream); + bz.reader.seek(pos); + /* Fill the decode buffer for the block */ + var moreBlocks = bz._get_next_block(); + if (moreBlocks) { + /* Init the CRC for writing */ + bz.blockCRC = new CRC32(); + + /* Zero this so the current byte from before the seek is not written */ + bz.writeCopies = 0; + + /* Decompress the block and write to stdout */ + bz._read_bunzip(); + // XXX keep writing? + } + if ('getBuffer' in outputStream) + return outputStream.getBuffer(); +}; +/* Reads bzip2 file from stream or buffer `input`, and invoke + * `callback(position, size)` once for each bzip2 block, + * where position gives the starting position (in *bits*) + * and size gives uncompressed size of the block (in *bytes*). */ +Bunzip.table = function(input, callback, multistream) { + // make a stream from a buffer, if necessary + var inputStream = new Stream(); + inputStream.delegate = coerceInputStream(input); + inputStream.pos = 0; + inputStream.readByte = function() { + this.pos++; + return this.delegate.readByte(); + }; + if (inputStream.delegate.eof) { + inputStream.eof = inputStream.delegate.eof.bind(inputStream.delegate); + } + var outputStream = new Stream(); + outputStream.pos = 0; + outputStream.writeByte = function() { this.pos++; }; + + var bz = new Bunzip(inputStream, outputStream); + var blockSize = bz.dbufSize; + while (true) { + if ('eof' in inputStream && inputStream.eof()) break; + + var position = inputStream.pos*8 + bz.reader.bitOffset; + if (bz.reader.hasByte) { position -= 8; } + + if (bz._init_block()) { + var start = outputStream.pos; + bz._read_bunzip(); + callback(position, outputStream.pos - start); + } else { + var crc = bz.reader.read(32); // (but we ignore the crc) + if (multistream && + 'eof' in inputStream && + !inputStream.eof()) { + // note that start_bunzip will also resync the bit reader to next byte + bz._start_bunzip(inputStream, outputStream); + console.assert(bz.dbufSize === blockSize, + "shouldn't change block size within multistream file"); + } else break; + } + } +}; + +Bunzip.Stream = Stream; + +Bunzip.version = pjson.version; +Bunzip.license = pjson.license; + +module.exports = Bunzip; diff --git a/node_modules/seek-bzip/lib/stream.js b/node_modules/seek-bzip/lib/stream.js new file mode 100644 index 0000000..90f6155 --- /dev/null +++ b/node_modules/seek-bzip/lib/stream.js @@ -0,0 +1,42 @@ +/* very simple input/output stream interface */ +var Stream = function() { +}; + +// input streams ////////////// +/** Returns the next byte, or -1 for EOF. */ +Stream.prototype.readByte = function() { + throw new Error("abstract method readByte() not implemented"); +}; +/** Attempts to fill the buffer; returns number of bytes read, or + * -1 for EOF. */ +Stream.prototype.read = function(buffer, bufOffset, length) { + var bytesRead = 0; + while (bytesRead < length) { + var c = this.readByte(); + if (c < 0) { // EOF + return (bytesRead===0) ? -1 : bytesRead; + } + buffer[bufOffset++] = c; + bytesRead++; + } + return bytesRead; +}; +Stream.prototype.seek = function(new_pos) { + throw new Error("abstract method seek() not implemented"); +}; + +// output streams /////////// +Stream.prototype.writeByte = function(_byte) { + throw new Error("abstract method readByte() not implemented"); +}; +Stream.prototype.write = function(buffer, bufOffset, length) { + var i; + for (i=0; i=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const semverSimplifyRange = require('semver/ranges/simplify') +const semverRangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-n <0|1> + This is the base to be used for the prerelease identifier. + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` that specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and +would match the versions `2.0.0` and `3.1.0`, but not the versions +`1.0.1` or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. +Version `3.4.5` *would* satisfy the range because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose of this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range-matching +semantics. + +Second, a user who has opted into using a prerelease version has +indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for range-matching) +by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +#### Prerelease Identifier Base + +The method `.inc` takes an optional parameter 'identifierBase' string +that will let you let your prerelease number as zero-based or one-based. +Set to `false` to omit the prerelease number altogether. +If you do not specify this parameter, it will default to zero-based. + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', '1') +// '1.2.4-beta.1' +``` + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', false) +// '1.2.4-beta' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n 1 +1.2.4-beta.1 +``` + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n false +1.2.4-beta +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose`: Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease`: Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release, options, identifier, identifierBase)`: + Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, `prerelease` will work the + same as `prepatch`. It increments the patch version and then makes a + prerelease. If the input version is already a prerelease it simply + increments it. + * `identifier` can be used to prefix `premajor`, `preminor`, + `prepatch`, or `prerelease` version increments. `identifierBase` + is the base to be used for the `prerelease` identifier. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. +* `compareLoose(v1, v2)`: Short for ``compare(v1, v2, { loose: true })`. +* `diff(v1, v2)`: Returns the difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Sorting + +* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild` + function. +* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on + the `compareBuild` function in descending order. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can match + the given range. +* `gtr(version, range)`: Return `true` if the version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if the version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the range comparators intersect. +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in the `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +If the `options.includePrerelease` flag is set, then the `coerce` result will contain +prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` +will preserve prerelease `rc.1` and build `rev.2` in the result. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `'2.1.5'` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Constants + +As a convenience, helper constants are exported to provide information about what `node-semver` supports: + +### `RELEASE_TYPES` + +- major +- premajor +- minor +- preminor +- patch +- prepatch +- prerelease + +``` +const semver = require('semver'); + +if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { + console.log('This is a valid release type!'); +} else { + console.warn('This is NOT a valid release type!'); +} +``` + +### `SEMVER_SPEC_VERSION` + +2.0.0 + +``` +const semver = require('semver'); + +console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); +``` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/simplify')` +* `require('semver/ranges/subset')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` + diff --git a/node_modules/semver/bin/semver.js b/node_modules/semver/bin/semver.js new file mode 100755 index 0000000..f62b566 --- /dev/null +++ b/node_modules/semver/bin/semver.js @@ -0,0 +1,188 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + versions + .sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options)) + .map(v => semver.clean(v, options)) + .map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v) + .forEach(v => console.log(v)) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/node_modules/semver/classes/comparator.js b/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..3d39c0e --- /dev/null +++ b/node_modules/semver/classes/comparator.js @@ -0,0 +1,141 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/node_modules/semver/classes/index.js b/node_modules/semver/classes/index.js new file mode 100644 index 0000000..5e3f5c9 --- /dev/null +++ b/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js new file mode 100644 index 0000000..117b45a --- /dev/null +++ b/node_modules/semver/classes/range.js @@ -0,0 +1,540 @@ +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('../internal/lrucache') +const cache = new LRU() + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +// TODO build? +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..13e66ce --- /dev/null +++ b/node_modules/semver/classes/semver.js @@ -0,0 +1,302 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('build compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/node_modules/semver/functions/clean.js b/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..811fe6b --- /dev/null +++ b/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/node_modules/semver/functions/cmp.js b/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..4011909 --- /dev/null +++ b/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/node_modules/semver/functions/coerce.js b/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..b378dce --- /dev/null +++ b/node_modules/semver/functions/coerce.js @@ -0,0 +1,60 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/node_modules/semver/functions/compare-build.js b/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..9eb881b --- /dev/null +++ b/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/node_modules/semver/functions/compare-loose.js b/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..4881fbe --- /dev/null +++ b/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/node_modules/semver/functions/compare.js b/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..748b7af --- /dev/null +++ b/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/node_modules/semver/functions/diff.js b/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..fc224e3 --- /dev/null +++ b/node_modules/semver/functions/diff.js @@ -0,0 +1,65 @@ +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/node_modules/semver/functions/eq.js b/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..271fed9 --- /dev/null +++ b/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/node_modules/semver/functions/gt.js b/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..d9b2156 --- /dev/null +++ b/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/node_modules/semver/functions/gte.js b/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..5aeaa63 --- /dev/null +++ b/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/node_modules/semver/functions/inc.js b/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..7670b1b --- /dev/null +++ b/node_modules/semver/functions/inc.js @@ -0,0 +1,19 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/node_modules/semver/functions/lt.js b/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..b440ab7 --- /dev/null +++ b/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/node_modules/semver/functions/lte.js b/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..6dcc956 --- /dev/null +++ b/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/node_modules/semver/functions/major.js b/node_modules/semver/functions/major.js new file mode 100644 index 0000000..4283165 --- /dev/null +++ b/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/node_modules/semver/functions/minor.js b/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..57b3455 --- /dev/null +++ b/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/node_modules/semver/functions/neq.js b/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..f944c01 --- /dev/null +++ b/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/node_modules/semver/functions/parse.js b/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..459b3b1 --- /dev/null +++ b/node_modules/semver/functions/parse.js @@ -0,0 +1,16 @@ +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/node_modules/semver/functions/patch.js b/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..63afca2 --- /dev/null +++ b/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/node_modules/semver/functions/prerelease.js b/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..06aa132 --- /dev/null +++ b/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/node_modules/semver/functions/rcompare.js b/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..0ac509e --- /dev/null +++ b/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/node_modules/semver/functions/rsort.js b/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..82404c5 --- /dev/null +++ b/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/node_modules/semver/functions/satisfies.js b/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..50af1c1 --- /dev/null +++ b/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/node_modules/semver/functions/sort.js b/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..4d10917 --- /dev/null +++ b/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/node_modules/semver/functions/valid.js b/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..f27bae1 --- /dev/null +++ b/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/node_modules/semver/index.js b/node_modules/semver/index.js new file mode 100644 index 0000000..86d42ac --- /dev/null +++ b/node_modules/semver/index.js @@ -0,0 +1,89 @@ +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/constants.js b/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..94be1c5 --- /dev/null +++ b/node_modules/semver/internal/constants.js @@ -0,0 +1,35 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/node_modules/semver/internal/debug.js b/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..1c00e13 --- /dev/null +++ b/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/node_modules/semver/internal/identifiers.js b/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..e612d0a --- /dev/null +++ b/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/lrucache.js b/node_modules/semver/internal/lrucache.js new file mode 100644 index 0000000..6d89ec9 --- /dev/null +++ b/node_modules/semver/internal/lrucache.js @@ -0,0 +1,40 @@ +class LRUCache { + constructor () { + this.max = 1000 + this.map = new Map() + } + + get (key) { + const value = this.map.get(key) + if (value === undefined) { + return undefined + } else { + // Remove the key from the map and add it to the end + this.map.delete(key) + this.map.set(key, value) + return value + } + } + + delete (key) { + return this.map.delete(key) + } + + set (key, value) { + const deleted = this.delete(key) + + if (!deleted && value !== undefined) { + // If cache is full, delete the least recently used item + if (this.map.size >= this.max) { + const firstKey = this.map.keys().next().value + this.delete(firstKey) + } + + this.map.set(key, value) + } + + return this + } +} + +module.exports = LRUCache diff --git a/node_modules/semver/internal/parse-options.js b/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..10d64ce --- /dev/null +++ b/node_modules/semver/internal/parse-options.js @@ -0,0 +1,15 @@ +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js new file mode 100644 index 0000000..fd8920e --- /dev/null +++ b/node_modules/semver/internal/re.js @@ -0,0 +1,217 @@ +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 0000000..cb8def4 --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,77 @@ +{ + "name": "semver", + "version": "7.6.2", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "benchmark": "^2.1.4", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf", + "/benchmarks" + ], + "publish": "true" + } +} diff --git a/node_modules/semver/preload.js b/node_modules/semver/preload.js new file mode 100644 index 0000000..947cd4f --- /dev/null +++ b/node_modules/semver/preload.js @@ -0,0 +1,2 @@ +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/node_modules/semver/range.bnf b/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/semver/ranges/gtr.js b/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..db7e355 --- /dev/null +++ b/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/node_modules/semver/ranges/intersects.js b/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..e0e9b7c --- /dev/null +++ b/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/node_modules/semver/ranges/ltr.js b/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..528a885 --- /dev/null +++ b/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/node_modules/semver/ranges/max-satisfying.js b/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..6e3d993 --- /dev/null +++ b/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/node_modules/semver/ranges/min-satisfying.js b/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..9b60974 --- /dev/null +++ b/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/node_modules/semver/ranges/min-version.js b/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..350e1f7 --- /dev/null +++ b/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/node_modules/semver/ranges/outside.js b/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ae99b10 --- /dev/null +++ b/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/node_modules/semver/ranges/simplify.js b/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..618d5b6 --- /dev/null +++ b/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..1e5c268 --- /dev/null +++ b/node_modules/semver/ranges/subset.js @@ -0,0 +1,247 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/node_modules/semver/ranges/to-comparators.js b/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..6c8bc7e --- /dev/null +++ b/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/node_modules/semver/ranges/valid.js b/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..365f356 --- /dev/null +++ b/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/node_modules/shebang-command/index.js b/node_modules/shebang-command/index.js new file mode 100644 index 0000000..f35db30 --- /dev/null +++ b/node_modules/shebang-command/index.js @@ -0,0 +1,19 @@ +'use strict'; +const shebangRegex = require('shebang-regex'); + +module.exports = (string = '') => { + const match = string.match(shebangRegex); + + if (!match) { + return null; + } + + const [path, argument] = match[0].replace(/#! ?/, '').split(' '); + const binary = path.split('/').pop(); + + if (binary === 'env') { + return argument; + } + + return argument ? `${binary} ${argument}` : binary; +}; diff --git a/node_modules/shebang-command/license b/node_modules/shebang-command/license new file mode 100644 index 0000000..db6bc32 --- /dev/null +++ b/node_modules/shebang-command/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-command/package.json b/node_modules/shebang-command/package.json new file mode 100644 index 0000000..18e3c04 --- /dev/null +++ b/node_modules/shebang-command/package.json @@ -0,0 +1,34 @@ +{ + "name": "shebang-command", + "version": "2.0.0", + "description": "Get the command from a shebang", + "license": "MIT", + "repository": "kevva/shebang-command", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "github.com/kevva" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "cmd", + "command", + "parse", + "shebang" + ], + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "devDependencies": { + "ava": "^2.3.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/shebang-command/readme.md b/node_modules/shebang-command/readme.md new file mode 100644 index 0000000..84feb44 --- /dev/null +++ b/node_modules/shebang-command/readme.md @@ -0,0 +1,34 @@ +# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) + +> Get the command from a shebang + + +## Install + +``` +$ npm install shebang-command +``` + + +## Usage + +```js +const shebangCommand = require('shebang-command'); + +shebangCommand('#!/usr/bin/env node'); +//=> 'node' + +shebangCommand('#!/bin/bash'); +//=> 'bash' +``` + + +## API + +### shebangCommand(string) + +#### string + +Type: `string` + +String containing a shebang. diff --git a/node_modules/shebang-regex/index.d.ts b/node_modules/shebang-regex/index.d.ts new file mode 100644 index 0000000..61d034b --- /dev/null +++ b/node_modules/shebang-regex/index.d.ts @@ -0,0 +1,22 @@ +/** +Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. + +@example +``` +import shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` +*/ +declare const shebangRegex: RegExp; + +export = shebangRegex; diff --git a/node_modules/shebang-regex/index.js b/node_modules/shebang-regex/index.js new file mode 100644 index 0000000..63fc4a0 --- /dev/null +++ b/node_modules/shebang-regex/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = /^#!(.*)/; diff --git a/node_modules/shebang-regex/license b/node_modules/shebang-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/shebang-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-regex/package.json b/node_modules/shebang-regex/package.json new file mode 100644 index 0000000..00ab30f --- /dev/null +++ b/node_modules/shebang-regex/package.json @@ -0,0 +1,35 @@ +{ + "name": "shebang-regex", + "version": "3.0.0", + "description": "Regular expression for matching a shebang line", + "license": "MIT", + "repository": "sindresorhus/shebang-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "regex", + "regexp", + "shebang", + "match", + "test", + "line" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/shebang-regex/readme.md b/node_modules/shebang-regex/readme.md new file mode 100644 index 0000000..5ecf863 --- /dev/null +++ b/node_modules/shebang-regex/readme.md @@ -0,0 +1,33 @@ +# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) + +> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line + + +## Install + +``` +$ npm install shebang-regex +``` + + +## Usage + +```js +const shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/slash/index.d.ts b/node_modules/slash/index.d.ts new file mode 100644 index 0000000..692c7fd --- /dev/null +++ b/node_modules/slash/index.d.ts @@ -0,0 +1,23 @@ +/** +Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar`. + +[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths. + +@param path - A Windows backslash path. +@returns A path with forward slashes. + +@example +``` +import path from 'node:path'; +import slash from 'slash'; + +const string = path.join('foo', 'bar'); +// Unix => foo/bar +// Windows => foo\\bar + +slash(string); +// Unix => foo/bar +// Windows => foo/bar +``` +*/ +export default function slash(path: string): string; diff --git a/node_modules/slash/index.js b/node_modules/slash/index.js new file mode 100644 index 0000000..1b7ee1e --- /dev/null +++ b/node_modules/slash/index.js @@ -0,0 +1,9 @@ +export default function slash(path) { + const isExtendedLengthPath = path.startsWith('\\\\?\\'); + + if (isExtendedLengthPath) { + return path; + } + + return path.replace(/\\/g, '/'); +} diff --git a/node_modules/slash/license b/node_modules/slash/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/slash/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/slash/package.json b/node_modules/slash/package.json new file mode 100644 index 0000000..a0f507c --- /dev/null +++ b/node_modules/slash/package.json @@ -0,0 +1,39 @@ +{ + "name": "slash", + "version": "5.1.0", + "description": "Convert Windows backslash paths to slash paths", + "license": "MIT", + "repository": "sindresorhus/slash", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "types": "./index.d.ts", + "engines": { + "node": ">=14.16" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "path", + "seperator", + "slash", + "backslash", + "windows", + "convert" + ], + "devDependencies": { + "ava": "^5.2.0", + "tsd": "^0.28.1", + "xo": "^0.54.2" + } +} diff --git a/node_modules/slash/readme.md b/node_modules/slash/readme.md new file mode 100644 index 0000000..42f74f9 --- /dev/null +++ b/node_modules/slash/readme.md @@ -0,0 +1,36 @@ +# slash + +> Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar` + +[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths. + +This was created since the `path` methods in Node.js outputs `\\` paths on Windows. + +## Install + +```sh +npm install slash +``` + +## Usage + +```js +import path from 'node:path'; +import slash from 'slash'; + +const string = path.join('foo', 'bar'); +// Unix => foo/bar +// Windows => foo\\bar + +slash(string); +// Unix => foo/bar +// Windows => foo/bar +``` + +## API + +### slash(path) + +Type: `string` + +Accepts a Windows backslash path and returns a path with forward slashes. diff --git a/node_modules/source-map-js/LICENSE b/node_modules/source-map-js/LICENSE new file mode 100644 index 0000000..ed1b7cf --- /dev/null +++ b/node_modules/source-map-js/LICENSE @@ -0,0 +1,28 @@ + +Copyright (c) 2009-2011, Mozilla Foundation and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the names of the Mozilla Foundation nor the names of project + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/source-map-js/README.md b/node_modules/source-map-js/README.md new file mode 100644 index 0000000..614962d --- /dev/null +++ b/node_modules/source-map-js/README.md @@ -0,0 +1,765 @@ +# Source Map JS + +[![NPM](https://nodei.co/npm/source-map-js.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map-js) + +Difference between original [source-map](https://github.com/mozilla/source-map): + +> TL,DR: it's fork of original source-map@0.6, but with perfomance optimizations. + +This journey starts from [source-map@0.7.0](https://github.com/mozilla/source-map/blob/master/CHANGELOG.md#070). Some part of it was rewritten to Rust and WASM and API became async. + +It's still a major block for many libraries like PostCSS or Sass for example because they need to migrate the whole API to the async way. This is the reason why 0.6.1 has 2x more downloads than 0.7.3 while it's faster several times. + +![Downloads count](media/downloads.png) + +More important that WASM version has some optimizations in JS code too. This is why [community asked to create branch for 0.6 version](https://github.com/mozilla/source-map/issues/324) and port these optimizations but, sadly, the answer was «no». A bit later I discovered [the issue](https://github.com/mozilla/source-map/issues/370) created by [Ben Rothman (@benthemonkey)](https://github.com/benthemonkey) with no response at all. + +[Roman Dvornov (@lahmatiy)](https://github.com/lahmatiy) wrote a [serveral posts](https://t.me/gorshochekvarit/76) (russian, only, sorry) about source-map library in his own Telegram channel. He mentioned the article [«Maybe you don't need Rust and WASM to speed up your JS»](https://mrale.ph/blog/2018/02/03/maybe-you-dont-need-rust-to-speed-up-your-js.html) written by [Vyacheslav Egorov (@mraleph)](https://github.com/mraleph). This article contains optimizations and hacks that lead to almost the same performance compare to WASM implementation. + +I decided to fork the original source-map and port these optimizations from the article and several others PR from the original source-map. + +--------- + +This is a library to generate and consume the source map format +[described here][format]. + +[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit + +## Use with Node + + $ npm install source-map-js + + + +-------------------------------------------------------------------------------- + + + + + +## Table of Contents + +- [Examples](#examples) + - [Consuming a source map](#consuming-a-source-map) + - [Generating a source map](#generating-a-source-map) + - [With SourceNode (high level API)](#with-sourcenode-high-level-api) + - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) +- [API](#api) + - [SourceMapConsumer](#sourcemapconsumer) + - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) + - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) + - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) + - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) + - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) + - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) + - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) + - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) + - [SourceMapGenerator](#sourcemapgenerator) + - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) + - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) + - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) + - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) + - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) + - [SourceNode](#sourcenode) + - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) + - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) + - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) + - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) + - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) + - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) + - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) + - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) + - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) + - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) + + + +## Examples + +### Consuming a source map + +```js +var rawSourceMap = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: 'http://example.com/www/js/', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' +}; + +var smc = new SourceMapConsumer(rawSourceMap); + +console.log(smc.sources); +// [ 'http://example.com/www/js/one.js', +// 'http://example.com/www/js/two.js' ] + +console.log(smc.originalPositionFor({ + line: 2, + column: 28 +})); +// { source: 'http://example.com/www/js/two.js', +// line: 2, +// column: 10, +// name: 'n' } + +console.log(smc.generatedPositionFor({ + source: 'http://example.com/www/js/two.js', + line: 2, + column: 10 +})); +// { line: 2, column: 28 } + +smc.eachMapping(function (m) { + // ... +}); +``` + +### Generating a source map + +In depth guide: +[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) + +#### With SourceNode (high level API) + +```js +function compile(ast) { + switch (ast.type) { + case 'BinaryExpression': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + [compile(ast.left), " + ", compile(ast.right)] + ); + case 'Literal': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + String(ast.value) + ); + // ... + default: + throw new Error("Bad AST"); + } +} + +var ast = parse("40 + 2", "add.js"); +console.log(compile(ast).toStringWithSourceMap({ + file: 'add.js' +})); +// { code: '40 + 2', +// map: [object SourceMapGenerator] } +``` + +#### With SourceMapGenerator (low level API) + +```js +var map = new SourceMapGenerator({ + file: "source-mapped.js" +}); + +map.addMapping({ + generated: { + line: 10, + column: 35 + }, + source: "foo.js", + original: { + line: 33, + column: 2 + }, + name: "christopher" +}); + +console.log(map.toString()); +// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' +``` + +## API + +Get a reference to the module: + +```js +// Node.js +var sourceMap = require('source-map'); + +// Browser builds +var sourceMap = window.sourceMap; + +// Inside Firefox +const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); +``` + +### SourceMapConsumer + +A SourceMapConsumer instance represents a parsed source map which we can query +for information about the original file positions by giving it a file position +in the generated source. + +#### new SourceMapConsumer(rawSourceMap) + +The only parameter is the raw source map (either as a string which can be +`JSON.parse`'d, or an object). According to the spec, source maps have the +following attributes: + +* `version`: Which version of the source map spec this map is following. + +* `sources`: An array of URLs to the original source files. + +* `names`: An array of identifiers which can be referenced by individual + mappings. + +* `sourceRoot`: Optional. The URL root from which all sources are relative. + +* `sourcesContent`: Optional. An array of contents of the original source files. + +* `mappings`: A string of base64 VLQs which contain the actual mappings. + +* `file`: Optional. The generated filename this source map is associated with. + +```js +var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); +``` + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. + +```js +// Before: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] + +consumer.computeColumnSpans(); + +// After: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1, +// lastColumn: 9 }, +// { line: 2, +// column: 10, +// lastColumn: 19 }, +// { line: 2, +// column: 20, +// lastColumn: Infinity } ] + +``` + +#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) + +Returns the original source, line, and column information for the generated +source's line and column positions provided. The only argument is an object with +the following properties: + +* `line`: The line number in the generated source. Line numbers in + this library are 1-based (note that the underlying source map + specification uses 0-based line numbers -- this library handles the + translation). + +* `column`: The column number in the generated source. Column numbers + in this library are 0-based. + +* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or + `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest + element that is smaller than or greater than the one we are searching for, + respectively, if the exact element cannot be found. Defaults to + `SourceMapConsumer.GREATEST_LOWER_BOUND`. + +and an object is returned with the following properties: + +* `source`: The original source file, or null if this information is not + available. + +* `line`: The line number in the original source, or null if this information is + not available. The line number is 1-based. + +* `column`: The column number in the original source, or null if this + information is not available. The column number is 0-based. + +* `name`: The original identifier, or null if this information is not available. + +```js +consumer.originalPositionFor({ line: 2, column: 10 }) +// { source: 'foo.coffee', +// line: 2, +// column: 2, +// name: null } + +consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) +// { source: null, +// line: null, +// column: null, +// name: null } +``` + +#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) + +Returns the generated line and column information for the original source, +line, and column positions provided. The only argument is an object with +the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: The column number in the original source. The column + number is 0-based. + +and an object is returned with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) +// { line: 1, +// column: 56 } +``` + +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source, line, +and column provided. If no column is provided, returns all mappings +corresponding to a either the line we are searching for or the next closest line +that has any mappings. Otherwise, returns all mappings corresponding to the +given line and either the column we are searching for or the next closest column +that has any offsets. + +The only argument is an object with the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: Optional. The column number in the original source. The + column number is 0-based. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] +``` + +#### SourceMapConsumer.prototype.hasContentsOfAllSources() + +Return true if we have the embedded source content for every source listed in +the source map, false otherwise. + +In other words, if this method returns `true`, then +`consumer.sourceContentFor(s)` will succeed for every source `s` in +`consumer.sources`. + +```js +// ... +if (consumer.hasContentsOfAllSources()) { + consumerReadyCallback(consumer); +} else { + fetchSources(consumer, consumerReadyCallback); +} +// ... +``` + +#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) + +Returns the original source content for the source provided. The only +argument is the URL of the original source file. + +If the source content for the given source is not found, then an error is +thrown. Optionally, pass `true` as the second param to have `null` returned +instead. + +```js +consumer.sources +// [ "my-cool-lib.clj" ] + +consumer.sourceContentFor("my-cool-lib.clj") +// "..." + +consumer.sourceContentFor("this is not in the source map"); +// Error: "this is not in the source map" is not in the source map + +consumer.sourceContentFor("this is not in the source map", true); +// null +``` + +#### SourceMapConsumer.prototype.eachMapping(callback, context, order) + +Iterate over each mapping between an original source/line/column and a +generated line/column in this source map. + +* `callback`: The function that is called with each mapping. Mappings have the + form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, + name }` + +* `context`: Optional. If specified, this object will be the value of `this` + every time that `callback` is called. + +* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or + `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over + the mappings sorted by the generated file's line/column order or the + original's source/line/column order, respectively. Defaults to + `SourceMapConsumer.GENERATED_ORDER`. + +```js +consumer.eachMapping(function (m) { console.log(m); }) +// ... +// { source: 'illmatic.js', +// generatedLine: 1, +// generatedColumn: 0, +// originalLine: 1, +// originalColumn: 0, +// name: null } +// { source: 'illmatic.js', +// generatedLine: 2, +// generatedColumn: 0, +// originalLine: 2, +// originalColumn: 0, +// name: null } +// ... +``` +### SourceMapGenerator + +An instance of the SourceMapGenerator represents a source map which is being +built incrementally. + +#### new SourceMapGenerator([startOfSourceMap]) + +You may pass an object with the following properties: + +* `file`: The filename of the generated source that this source map is + associated with. + +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. + +* `ignoreInvalidMapping`: Optional. When `true`, instead of throwing error on + invalid mapping, it will be ignored. + +```js +var generator = new sourceMap.SourceMapGenerator({ + file: "my-generated-javascript-file.js", + sourceRoot: "http://example.com/app/js/" +}); +``` + +#### SourceMapGenerator.fromSourceMap(sourceMapConsumer, sourceMapGeneratorOptions) + +Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. + +* `sourceMapConsumer` The SourceMap. + +* `sourceMapGeneratorOptions` options that will be passed to the SourceMapGenerator constructor which used under the hood. + +```js +var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer, { + ignoreInvalidMapping: true, +}); +``` + +#### SourceMapGenerator.prototype.addMapping(mapping) + +Add a single mapping from original source line and column to the generated +source's line and column for this source map being created. The mapping object +should have the following properties: + +* `generated`: An object with the generated line and column positions. + +* `original`: An object with the original line and column positions. + +* `source`: The original source file (relative to the sourceRoot). + +* `name`: An optional original token name for this mapping. + +```js +generator.addMapping({ + source: "module-one.scm", + original: { line: 128, column: 0 }, + generated: { line: 3, column: 456 } +}) +``` + +#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for an original source file. + +* `sourceFile` the URL of the original source file. + +* `sourceContent` the content of the source file. + +```js +generator.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) + +Applies a SourceMap for a source file to the SourceMap. +Each mapping to the supplied source file is rewritten using the +supplied SourceMap. Note: The resolution for the resulting mappings +is the minimum of this map and the supplied map. + +* `sourceMapConsumer`: The SourceMap to be applied. + +* `sourceFile`: Optional. The filename of the source file. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) + +#### SourceMapGenerator.prototype.toString() + +Renders the source map being generated to a string. + +```js +generator.toString() +// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' +``` + +### SourceNode + +SourceNodes provide a way to abstract over interpolating and/or concatenating +snippets of generated JavaScript source code, while maintaining the line and +column information associated between those snippets and the original source +code. This is useful as the final intermediate representation a compiler might +use before outputting the generated JS and source map. + +#### new SourceNode([line, column, source[, chunk[, name]]]) + +* `line`: The original line number associated with this source node, or null if + it isn't associated with an original line. The line number is 1-based. + +* `column`: The original column number associated with this source node, or null + if it isn't associated with an original column. The column number + is 0-based. + +* `source`: The original source's filename; null if no filename is provided. + +* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see + below. + +* `name`: Optional. The original identifier. + +```js +var node = new SourceNode(1, 2, "a.cpp", [ + new SourceNode(3, 4, "b.cpp", "extern int status;\n"), + new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), + new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), +]); +``` + +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) + +Creates a SourceNode from generated code and a SourceMapConsumer. + +* `code`: The generated code + +* `sourceMapConsumer` The SourceMap for the generated code + +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + +```js +var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8")); +var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), + consumer); +``` + +#### SourceNode.prototype.add(chunk) + +Add a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.add(" + "); +node.add(otherNode); +node.add([leftHandOperandNode, " + ", rightHandOperandNode]); +``` + +#### SourceNode.prototype.prepend(chunk) + +Prepend a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.prepend("/** Build Id: f783haef86324gf **/\n\n"); +``` + +#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for a source file. This will be added to the +`SourceMap` in the `sourcesContent` field. + +* `sourceFile`: The filename of the source file + +* `sourceContent`: The content of the source file + +```js +node.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceNode.prototype.walk(fn) + +Walk over the tree of JS snippets in this node and its children. The walking +function is called once for each snippet of JS and is passed that snippet and +the its original associated source's line/column location. + +* `fn`: The traversal function. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.walk(function (code, loc) { console.log("WALK:", code, loc); }) +// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } +// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } +// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } +// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } +``` + +#### SourceNode.prototype.walkSourceContents(fn) + +Walk over the tree of SourceNodes. The walking function is called for each +source file content and is passed the filename and source content. + +* `fn`: The traversal function. + +```js +var a = new SourceNode(1, 2, "a.js", "generated from a"); +a.setSourceContent("a.js", "original a"); +var b = new SourceNode(1, 2, "b.js", "generated from b"); +b.setSourceContent("b.js", "original b"); +var c = new SourceNode(1, 2, "c.js", "generated from c"); +c.setSourceContent("c.js", "original c"); + +var node = new SourceNode(null, null, null, [a, b, c]); +node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) +// WALK: a.js : original a +// WALK: b.js : original b +// WALK: c.js : original c +``` + +#### SourceNode.prototype.join(sep) + +Like `Array.prototype.join` except for SourceNodes. Inserts the separator +between each of this source node's children. + +* `sep`: The separator. + +```js +var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); +var operand = new SourceNode(3, 4, "a.rs", "="); +var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); + +var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); +var joinedNode = node.join(" "); +``` + +#### SourceNode.prototype.replaceRight(pattern, replacement) + +Call `String.prototype.replace` on the very right-most source snippet. Useful +for trimming white space from the end of a source node, etc. + +* `pattern`: The pattern to replace. + +* `replacement`: The thing to replace the pattern with. + +```js +// Trim trailing white space. +node.replaceRight(/\s*$/, ""); +``` + +#### SourceNode.prototype.toString() + +Return the string representation of this source node. Walks over the tree and +concatenates all the various snippets together to one string. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toString() +// 'unodostresquatro' +``` + +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) + +Returns the string representation of this tree of source nodes, plus a +SourceMapGenerator which contains all the mappings between the generated and +original sources. + +The arguments are the same as those to `new SourceMapGenerator`. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toStringWithSourceMap({ file: "my-output-file.js" }) +// { code: 'unodostresquatro', +// map: [object SourceMapGenerator] } +``` diff --git a/node_modules/source-map-js/lib/array-set.js b/node_modules/source-map-js/lib/array-set.js new file mode 100644 index 0000000..fbd5c81 --- /dev/null +++ b/node_modules/source-map-js/lib/array-set.js @@ -0,0 +1,121 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var has = Object.prototype.hasOwnProperty; +var hasNativeMap = typeof Map !== "undefined"; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; diff --git a/node_modules/source-map-js/lib/base64-vlq.js b/node_modules/source-map-js/lib/base64-vlq.js new file mode 100644 index 0000000..612b404 --- /dev/null +++ b/node_modules/source-map-js/lib/base64-vlq.js @@ -0,0 +1,140 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = require('./base64'); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; diff --git a/node_modules/source-map-js/lib/base64.js b/node_modules/source-map-js/lib/base64.js new file mode 100644 index 0000000..8aa86b3 --- /dev/null +++ b/node_modules/source-map-js/lib/base64.js @@ -0,0 +1,67 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; diff --git a/node_modules/source-map-js/lib/binary-search.js b/node_modules/source-map-js/lib/binary-search.js new file mode 100644 index 0000000..010ac94 --- /dev/null +++ b/node_modules/source-map-js/lib/binary-search.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/node_modules/source-map-js/lib/mapping-list.js b/node_modules/source-map-js/lib/mapping-list.js new file mode 100644 index 0000000..06d1274 --- /dev/null +++ b/node_modules/source-map-js/lib/mapping-list.js @@ -0,0 +1,79 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; diff --git a/node_modules/source-map-js/lib/quick-sort.js b/node_modules/source-map-js/lib/quick-sort.js new file mode 100644 index 0000000..23f9eda --- /dev/null +++ b/node_modules/source-map-js/lib/quick-sort.js @@ -0,0 +1,132 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +function SortTemplate(comparator) { + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot, false) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + + return doQuickSort; +} + +function cloneSort(comparator) { + let template = SortTemplate.toString(); + let templateFn = new Function(`return ${template}`)(); + return templateFn(comparator); +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + +let sortCache = new WeakMap(); +exports.quickSort = function (ary, comparator, start = 0) { + let doQuickSort = sortCache.get(comparator); + if (doQuickSort === void 0) { + doQuickSort = cloneSort(comparator); + sortCache.set(comparator, doQuickSort); + } + doQuickSort(ary, comparator, start, ary.length - 1); +}; diff --git a/node_modules/source-map-js/lib/source-map-consumer.js b/node_modules/source-map-js/lib/source-map-consumer.js new file mode 100644 index 0000000..db0a532 --- /dev/null +++ b/node_modules/source-map-js/lib/source-map-consumer.js @@ -0,0 +1,1184 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var binarySearch = require('./binary-search'); +var ArraySet = require('./array-set').ArraySet; +var base64VLQ = require('./base64-vlq'); +var quickSort = require('./quick-sort').quickSort; + +function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + var boundCallback = aCallback.bind(context); + var names = this._names; + var sources = this._sources; + var sourceMapURL = this._sourceMapURL; + + for (var i = 0, n = mappings.length; i < n; i++) { + var mapping = mappings[i]; + var source = mapping.source === null ? null : sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, sourceMapURL); + boundCallback({ + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : names.at(mapping.name) + }); + } + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ +BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; +}; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + +const compareGenerated = util.compareByGeneratedPositionsDeflatedNoLine; +function sortGenerated(array, start) { + let l = array.length; + let n = array.length - start; + if (n <= 1) { + return; + } else if (n == 2) { + let a = array[start]; + let b = array[start + 1]; + if (compareGenerated(a, b) > 0) { + array[start] = b; + array[start + 1] = a; + } + } else if (n < 20) { + for (let i = start; i < l; i++) { + for (let j = i; j > start; j--) { + let a = array[j - 1]; + let b = array[j]; + if (compareGenerated(a, b) <= 0) { + break; + } + array[j - 1] = b; + array[j] = a; + } + } + } else { + quickSort(array, compareGenerated, start); + } +} +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + let subarrayStart = 0; + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + + sortGenerated(generatedMappings, subarrayStart); + subarrayStart = generatedMappings.length; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + let currentSource = mapping.source; + while (originalMappings.length <= currentSource) { + originalMappings.push(null); + } + if (originalMappings[currentSource] === null) { + originalMappings[currentSource] = []; + } + originalMappings[currentSource].push(mapping); + } + } + } + + sortGenerated(generatedMappings, subarrayStart); + this.__generatedMappings = generatedMappings; + + for (var i = 0; i < originalMappings.length; i++) { + if (originalMappings[i] != null) { + quickSort(originalMappings[i], util.compareByOriginalPositionsNoSource); + } + } + this.__originalMappings = [].concat(...originalMappings); + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content || content === '') { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map-js/lib/source-map-generator.js b/node_modules/source-map-js/lib/source-map-generator.js new file mode 100644 index 0000000..bab04ff --- /dev/null +++ b/node_modules/source-map-js/lib/source-map-generator.js @@ -0,0 +1,444 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = require('./base64-vlq'); +var util = require('./util'); +var ArraySet = require('./array-set').ArraySet; +var MappingList = require('./mapping-list').MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._ignoreInvalidMapping = util.getArg(aArgs, 'ignoreInvalidMapping', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer, generatorOps) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator(Object.assign(generatorOps || {}, { + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + })); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + if (this._validateMapping(generated, original, source, name) === false) { + return; + } + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + var message = 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + + if (this._ignoreInvalidMapping) { + if (typeof console !== 'undefined' && console.warn) { + console.warn(message); + } + return false; + } else { + throw new Error(message); + } + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + var message = 'Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + }); + + if (this._ignoreInvalidMapping) { + if (typeof console !== 'undefined' && console.warn) { + console.warn(message); + } + return false; + } else { + throw new Error(message) + } + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map-js/lib/source-node.js b/node_modules/source-map-js/lib/source-node.js new file mode 100644 index 0000000..8bcdbe3 --- /dev/null +++ b/node_modules/source-map-js/lib/source-node.js @@ -0,0 +1,413 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; +var util = require('./util'); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; diff --git a/node_modules/source-map-js/lib/util.js b/node_modules/source-map-js/lib/util.js new file mode 100644 index 0000000..430e2d0 --- /dev/null +++ b/node_modules/source-map-js/lib/util.js @@ -0,0 +1,594 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +var MAX_CACHED_INPUTS = 32; + +/** + * Takes some function `f(input) -> result` and returns a memoized version of + * `f`. + * + * We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The + * memoization is a dumb-simple, linear least-recently-used cache. + */ +function lruMemoize(f) { + var cache = []; + + return function(input) { + for (var i = 0; i < cache.length; i++) { + if (cache[i].input === input) { + var temp = cache[0]; + cache[0] = cache[i]; + cache[i] = temp; + return cache[0].result; + } + } + + var result = f(input); + + cache.unshift({ + input, + result, + }); + + if (cache.length > MAX_CACHED_INPUTS) { + cache.pop(); + } + + return result; + }; +} + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +var normalize = lruMemoize(function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + // Split the path into parts between `/` characters. This is much faster than + // using `.split(/\/+/g)`. + var parts = []; + var start = 0; + var i = 0; + while (true) { + start = i; + i = path.indexOf("/", start); + if (i === -1) { + parts.push(path.slice(start)); + break; + } else { + parts.push(path.slice(start, i)); + while (i < path.length && path[i] === "/") { + i++; + } + } + } + + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +}); +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +function compareByOriginalPositionsNoSource(mappingA, mappingB, onlyCompareOriginal) { + var cmp + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositionsNoSource = compareByOriginalPositionsNoSource; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function compareByGeneratedPositionsDeflatedNoLine(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflatedNoLine = compareByGeneratedPositionsDeflatedNoLine; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + +/** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ +function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); +} +exports.parseSourceMapInput = parseSourceMapInput; + +/** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ +function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); +} +exports.computeSourceURL = computeSourceURL; diff --git a/node_modules/source-map-js/package.json b/node_modules/source-map-js/package.json new file mode 100644 index 0000000..f16eb36 --- /dev/null +++ b/node_modules/source-map-js/package.json @@ -0,0 +1,71 @@ +{ + "name": "source-map-js", + "description": "Generates and consumes source maps", + "version": "1.2.0", + "homepage": "https://github.com/7rulnik/source-map-js", + "author": "Valentin 7rulnik Semirulnik ", + "contributors": [ + "Nick Fitzgerald ", + "Tobias Koppers ", + "Duncan Beevers ", + "Stephen Crane ", + "Ryan Seddon ", + "Miles Elam ", + "Mihai Bazon ", + "Michael Ficarra ", + "Todd Wolfson ", + "Alexander Solovyov ", + "Felix Gnass ", + "Conrad Irwin ", + "usrbincc ", + "David Glasser ", + "Chase Douglas ", + "Evan Wallace ", + "Heather Arthur ", + "Hugh Kennedy ", + "David Glasser ", + "Simon Lydell ", + "Jmeas Smith ", + "Michael Z Goddard ", + "azu ", + "John Gozde ", + "Adam Kirkton ", + "Chris Montgomery ", + "J. Ryan Stinnett ", + "Jack Herrington ", + "Chris Truter ", + "Daniel Espeset ", + "Jamie Wong ", + "Eddy Bruël ", + "Hawken Rives ", + "Gilad Peleg ", + "djchie ", + "Gary Ye ", + "Nicolas Lalevée " + ], + "repository": "7rulnik/source-map-js", + "main": "./source-map.js", + "files": [ + "source-map.js", + "source-map.d.ts", + "lib/" + ], + "engines": { + "node": ">=0.10.0" + }, + "license": "BSD-3-Clause", + "scripts": { + "test": "npm run build && node test/run-tests.js", + "build": "webpack --color", + "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" + }, + "devDependencies": { + "clean-publish": "^3.1.0", + "doctoc": "^0.15.0", + "webpack": "^1.12.0" + }, + "clean-publish": { + "cleanDocs": true + }, + "typings": "source-map.d.ts" +} diff --git a/node_modules/source-map-js/source-map.d.ts b/node_modules/source-map-js/source-map.d.ts new file mode 100644 index 0000000..9f8a4b3 --- /dev/null +++ b/node_modules/source-map-js/source-map.d.ts @@ -0,0 +1,115 @@ +declare module 'source-map-js' { + export interface StartOfSourceMap { + file?: string; + sourceRoot?: string; + } + + export interface RawSourceMap extends StartOfSourceMap { + version: string; + sources: string[]; + names: string[]; + sourcesContent?: string[]; + mappings: string; + } + + export interface Position { + line: number; + column: number; + } + + export interface LineRange extends Position { + lastColumn: number; + } + + export interface FindPosition extends Position { + // SourceMapConsumer.GREATEST_LOWER_BOUND or SourceMapConsumer.LEAST_UPPER_BOUND + bias?: number; + } + + export interface SourceFindPosition extends FindPosition { + source: string; + } + + export interface MappedPosition extends Position { + source: string; + name?: string; + } + + export interface MappingItem { + source: string; + generatedLine: number; + generatedColumn: number; + originalLine: number; + originalColumn: number; + name: string; + } + + export class SourceMapConsumer { + static GENERATED_ORDER: number; + static ORIGINAL_ORDER: number; + + static GREATEST_LOWER_BOUND: number; + static LEAST_UPPER_BOUND: number; + + constructor(rawSourceMap: RawSourceMap); + computeColumnSpans(): void; + originalPositionFor(generatedPosition: FindPosition): MappedPosition; + generatedPositionFor(originalPosition: SourceFindPosition): LineRange; + allGeneratedPositionsFor(originalPosition: MappedPosition): Position[]; + hasContentsOfAllSources(): boolean; + sourceContentFor(source: string, returnNullOnMissing?: boolean): string; + eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void; + } + + export interface Mapping { + generated: Position; + original: Position; + source: string; + name?: string; + } + + export class SourceMapGenerator { + constructor(startOfSourceMap?: StartOfSourceMap); + static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator; + addMapping(mapping: Mapping): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void; + toString(): string; + } + + export interface CodeWithSourceMap { + code: string; + map: SourceMapGenerator; + } + + export class SourceNode { + constructor(); + constructor(line: number, column: number, source: string); + constructor(line: number, column: number, source: string, chunk?: string, name?: string); + static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode; + add(chunk: string): void; + prepend(chunk: string): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + walk(fn: (chunk: string, mapping: MappedPosition) => void): void; + walkSourceContents(fn: (file: string, content: string) => void): void; + join(sep: string): SourceNode; + replaceRight(pattern: string, replacement: string): SourceNode; + toString(): string; + toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap; + } +} + +declare module 'source-map-js/lib/source-map-generator' { + import { SourceMapGenerator } from 'source-map-js' + export { SourceMapGenerator } +} + +declare module 'source-map-js/lib/source-map-consumer' { + import { SourceMapConsumer } from 'source-map-js' + export { SourceMapConsumer } +} + +declare module 'source-map-js/lib/source-node' { + import { SourceNode } from 'source-map-js' + export { SourceNode } +} diff --git a/node_modules/source-map-js/source-map.js b/node_modules/source-map-js/source-map.js new file mode 100644 index 0000000..bc88fe8 --- /dev/null +++ b/node_modules/source-map-js/source-map.js @@ -0,0 +1,8 @@ +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; +exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; +exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/spdx-correct/LICENSE b/node_modules/spdx-correct/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/spdx-correct/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/spdx-correct/README.md b/node_modules/spdx-correct/README.md new file mode 100644 index 0000000..af563d8 --- /dev/null +++ b/node_modules/spdx-correct/README.md @@ -0,0 +1,22 @@ +## Usage + +```javascript +var correct = require('spdx-correct') +var assert = require('assert') + +assert.strictEqual(correct('mit'), 'MIT') + +assert.strictEqual(correct('Apache 2'), 'Apache-2.0') + +assert(correct('No idea what license') === null) + +// disable upgrade option +assert(correct('GPL-3.0'), 'GPL-3.0-or-later') +assert(correct('GPL-3.0', { upgrade: false }), 'GPL-3.0') +``` + +## Contributors + +spdx-correct has benefited from the work of several contributors. +See [the GitHub repository](https://github.com/jslicense/spdx-correct.js/graphs/contributors) +for more information. diff --git a/node_modules/spdx-correct/index.js b/node_modules/spdx-correct/index.js new file mode 100644 index 0000000..4d9037e --- /dev/null +++ b/node_modules/spdx-correct/index.js @@ -0,0 +1,386 @@ +/* +Copyright spdx-correct.js contributors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var parse = require('spdx-expression-parse') +var spdxLicenseIds = require('spdx-license-ids') + +function valid (string) { + try { + parse(string) + return true + } catch (error) { + return false + } +} + +// Sorting function that orders the given array of transpositions such +// that a transposition with the longer pattern comes before a transposition +// with a shorter pattern. This is to prevent e.g. the transposition +// ["General Public License", "GPL"] from matching to "Lesser General Public License" +// before a longer and more accurate transposition ["Lesser General Public License", "LGPL"] +// has a chance to be recognized. +function sortTranspositions(a, b) { + var length = b[0].length - a[0].length + if (length !== 0) return length + return a[0].toUpperCase().localeCompare(b[0].toUpperCase()) +} + +// Common transpositions of license identifier acronyms +var transpositions = [ + ['APGL', 'AGPL'], + ['Gpl', 'GPL'], + ['GLP', 'GPL'], + ['APL', 'Apache'], + ['ISD', 'ISC'], + ['GLP', 'GPL'], + ['IST', 'ISC'], + ['Claude', 'Clause'], + [' or later', '+'], + [' International', ''], + ['GNU', 'GPL'], + ['GUN', 'GPL'], + ['+', ''], + ['GNU GPL', 'GPL'], + ['GNU LGPL', 'LGPL'], + ['GNU/GPL', 'GPL'], + ['GNU GLP', 'GPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['GNU Lesser General Public License', 'LGPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['GNU Lesser General Public License', 'LGPL-2.1'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['Lesser General Public License', 'LGPL'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['Lesser General Public License', 'LGPL-2.1'], + ['GNU General Public License', 'GPL'], + ['Gnu public license', 'GPL'], + ['GNU Public License', 'GPL'], + ['GNU GENERAL PUBLIC LICENSE', 'GPL'], + ['MTI', 'MIT'], + ['Mozilla Public License', 'MPL'], + ['Universal Permissive License', 'UPL'], + ['WTH', 'WTF'], + ['WTFGPL', 'WTFPL'], + ['-License', ''] +].sort(sortTranspositions) + +var TRANSPOSED = 0 +var CORRECT = 1 + +// Simple corrections to nearly valid identifiers. +var transforms = [ + // e.g. 'mit' + function (argument) { + return argument.toUpperCase() + }, + // e.g. 'MIT ' + function (argument) { + return argument.trim() + }, + // e.g. 'M.I.T.' + function (argument) { + return argument.replace(/\./g, '') + }, + // e.g. 'Apache- 2.0' + function (argument) { + return argument.replace(/\s+/g, '') + }, + // e.g. 'CC BY 4.0'' + function (argument) { + return argument.replace(/\s+/g, '-') + }, + // e.g. 'LGPLv2.1' + function (argument) { + return argument.replace('v', '-') + }, + // e.g. 'Apache 2.0' + function (argument) { + return argument.replace(/,?\s*(\d)/, '-$1') + }, + // e.g. 'GPL 2' + function (argument) { + return argument.replace(/,?\s*(\d)/, '-$1.0') + }, + // e.g. 'Apache Version 2.0' + function (argument) { + return argument + .replace(/,?\s*(V\.|v\.|V|v|Version|version)\s*(\d)/, '-$2') + }, + // e.g. 'Apache Version 2' + function (argument) { + return argument + .replace(/,?\s*(V\.|v\.|V|v|Version|version)\s*(\d)/, '-$2.0') + }, + // e.g. 'ZLIB' + function (argument) { + return argument[0].toUpperCase() + argument.slice(1) + }, + // e.g. 'MPL/2.0' + function (argument) { + return argument.replace('/', '-') + }, + // e.g. 'Apache 2' + function (argument) { + return argument + .replace(/\s*V\s*(\d)/, '-$1') + .replace(/(\d)$/, '$1.0') + }, + // e.g. 'GPL-2.0', 'GPL-3.0' + function (argument) { + if (argument.indexOf('3.0') !== -1) { + return argument + '-or-later' + } else { + return argument + '-only' + } + }, + // e.g. 'GPL-2.0-' + function (argument) { + return argument + 'only' + }, + // e.g. 'GPL2' + function (argument) { + return argument.replace(/(\d)$/, '-$1.0') + }, + // e.g. 'BSD 3' + function (argument) { + return argument.replace(/(-| )?(\d)$/, '-$2-Clause') + }, + // e.g. 'BSD clause 3' + function (argument) { + return argument.replace(/(-| )clause(-| )(\d)/, '-$3-Clause') + }, + // e.g. 'New BSD license' + function (argument) { + return argument.replace(/\b(Modified|New|Revised)(-| )?BSD((-| )License)?/i, 'BSD-3-Clause') + }, + // e.g. 'Simplified BSD license' + function (argument) { + return argument.replace(/\bSimplified(-| )?BSD((-| )License)?/i, 'BSD-2-Clause') + }, + // e.g. 'Free BSD license' + function (argument) { + return argument.replace(/\b(Free|Net)(-| )?BSD((-| )License)?/i, 'BSD-2-Clause-$1BSD') + }, + // e.g. 'Clear BSD license' + function (argument) { + return argument.replace(/\bClear(-| )?BSD((-| )License)?/i, 'BSD-3-Clause-Clear') + }, + // e.g. 'Old BSD License' + function (argument) { + return argument.replace(/\b(Old|Original)(-| )?BSD((-| )License)?/i, 'BSD-4-Clause') + }, + // e.g. 'BY-NC-4.0' + function (argument) { + return 'CC-' + argument + }, + // e.g. 'BY-NC' + function (argument) { + return 'CC-' + argument + '-4.0' + }, + // e.g. 'Attribution-NonCommercial' + function (argument) { + return argument + .replace('Attribution', 'BY') + .replace('NonCommercial', 'NC') + .replace('NoDerivatives', 'ND') + .replace(/ (\d)/, '-$1') + .replace(/ ?International/, '') + }, + // e.g. 'Attribution-NonCommercial' + function (argument) { + return 'CC-' + + argument + .replace('Attribution', 'BY') + .replace('NonCommercial', 'NC') + .replace('NoDerivatives', 'ND') + .replace(/ (\d)/, '-$1') + .replace(/ ?International/, '') + + '-4.0' + } +] + +var licensesWithVersions = spdxLicenseIds + .map(function (id) { + var match = /^(.*)-\d+\.\d+$/.exec(id) + return match + ? [match[0], match[1]] + : [id, null] + }) + .reduce(function (objectMap, item) { + var key = item[1] + objectMap[key] = objectMap[key] || [] + objectMap[key].push(item[0]) + return objectMap + }, {}) + +var licensesWithOneVersion = Object.keys(licensesWithVersions) + .map(function makeEntries (key) { + return [key, licensesWithVersions[key]] + }) + .filter(function identifySoleVersions (item) { + return ( + // Licenses has just one valid version suffix. + item[1].length === 1 && + item[0] !== null && + // APL will be considered Apache, rather than APL-1.0 + item[0] !== 'APL' + ) + }) + .map(function createLastResorts (item) { + return [item[0], item[1][0]] + }) + +licensesWithVersions = undefined + +// If all else fails, guess that strings containing certain substrings +// meant to identify certain licenses. +var lastResorts = [ + ['UNLI', 'Unlicense'], + ['WTF', 'WTFPL'], + ['2 CLAUSE', 'BSD-2-Clause'], + ['2-CLAUSE', 'BSD-2-Clause'], + ['3 CLAUSE', 'BSD-3-Clause'], + ['3-CLAUSE', 'BSD-3-Clause'], + ['AFFERO', 'AGPL-3.0-or-later'], + ['AGPL', 'AGPL-3.0-or-later'], + ['APACHE', 'Apache-2.0'], + ['ARTISTIC', 'Artistic-2.0'], + ['Affero', 'AGPL-3.0-or-later'], + ['BEER', 'Beerware'], + ['BOOST', 'BSL-1.0'], + ['BSD', 'BSD-2-Clause'], + ['CDDL', 'CDDL-1.1'], + ['ECLIPSE', 'EPL-1.0'], + ['FUCK', 'WTFPL'], + ['GNU', 'GPL-3.0-or-later'], + ['LGPL', 'LGPL-3.0-or-later'], + ['GPLV1', 'GPL-1.0-only'], + ['GPL-1', 'GPL-1.0-only'], + ['GPLV2', 'GPL-2.0-only'], + ['GPL-2', 'GPL-2.0-only'], + ['GPL', 'GPL-3.0-or-later'], + ['MIT +NO-FALSE-ATTRIBS', 'MITNFA'], + ['MIT', 'MIT'], + ['MPL', 'MPL-2.0'], + ['X11', 'X11'], + ['ZLIB', 'Zlib'] +].concat(licensesWithOneVersion).sort(sortTranspositions) + +var SUBSTRING = 0 +var IDENTIFIER = 1 + +var validTransformation = function (identifier) { + for (var i = 0; i < transforms.length; i++) { + var transformed = transforms[i](identifier).trim() + if (transformed !== identifier && valid(transformed)) { + return transformed + } + } + return null +} + +var validLastResort = function (identifier) { + var upperCased = identifier.toUpperCase() + for (var i = 0; i < lastResorts.length; i++) { + var lastResort = lastResorts[i] + if (upperCased.indexOf(lastResort[SUBSTRING]) > -1) { + return lastResort[IDENTIFIER] + } + } + return null +} + +var anyCorrection = function (identifier, check) { + for (var i = 0; i < transpositions.length; i++) { + var transposition = transpositions[i] + var transposed = transposition[TRANSPOSED] + if (identifier.indexOf(transposed) > -1) { + var corrected = identifier.replace( + transposed, + transposition[CORRECT] + ) + var checked = check(corrected) + if (checked !== null) { + return checked + } + } + } + return null +} + +module.exports = function (identifier, options) { + options = options || {} + var upgrade = options.upgrade === undefined ? true : !!options.upgrade + function postprocess (value) { + return upgrade ? upgradeGPLs(value) : value + } + var validArugment = ( + typeof identifier === 'string' && + identifier.trim().length !== 0 + ) + if (!validArugment) { + throw Error('Invalid argument. Expected non-empty string.') + } + identifier = identifier.trim() + if (valid(identifier)) { + return postprocess(identifier) + } + var noPlus = identifier.replace(/\+$/, '').trim() + if (valid(noPlus)) { + return postprocess(noPlus) + } + var transformed = validTransformation(identifier) + if (transformed !== null) { + return postprocess(transformed) + } + transformed = anyCorrection(identifier, function (argument) { + if (valid(argument)) { + return argument + } + return validTransformation(argument) + }) + if (transformed !== null) { + return postprocess(transformed) + } + transformed = validLastResort(identifier) + if (transformed !== null) { + return postprocess(transformed) + } + transformed = anyCorrection(identifier, validLastResort) + if (transformed !== null) { + return postprocess(transformed) + } + return null +} + +function upgradeGPLs (value) { + if ([ + 'GPL-1.0', 'LGPL-1.0', 'AGPL-1.0', + 'GPL-2.0', 'LGPL-2.0', 'AGPL-2.0', + 'LGPL-2.1' + ].indexOf(value) !== -1) { + return value + '-only' + } else if ([ + 'GPL-1.0+', 'GPL-2.0+', 'GPL-3.0+', + 'LGPL-2.0+', 'LGPL-2.1+', 'LGPL-3.0+', + 'AGPL-1.0+', 'AGPL-3.0+' + ].indexOf(value) !== -1) { + return value.replace(/\+$/, '-or-later') + } else if (['GPL-3.0', 'LGPL-3.0', 'AGPL-3.0'].indexOf(value) !== -1) { + return value + '-or-later' + } else { + return value + } +} diff --git a/node_modules/spdx-correct/package.json b/node_modules/spdx-correct/package.json new file mode 100644 index 0000000..d776156 --- /dev/null +++ b/node_modules/spdx-correct/package.json @@ -0,0 +1,32 @@ +{ + "name": "spdx-correct", + "description": "correct invalid SPDX expressions", + "version": "3.2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + }, + "devDependencies": { + "defence-cli": "^3.0.1", + "replace-require-self": "^1.0.0", + "standard": "^14.3.4", + "standard-markdown": "^6.0.0", + "tape": "^5.0.1" + }, + "files": [ + "index.js" + ], + "keywords": [ + "SPDX", + "law", + "legal", + "license", + "metadata" + ], + "license": "Apache-2.0", + "repository": "jslicense/spdx-correct.js", + "scripts": { + "lint": "standard && standard-markdown README.md", + "test": "defence README.md | replace-require-self | node && node test.js" + } +} diff --git a/node_modules/spdx-exceptions/README.md b/node_modules/spdx-exceptions/README.md new file mode 100644 index 0000000..6c927ec --- /dev/null +++ b/node_modules/spdx-exceptions/README.md @@ -0,0 +1,36 @@ +The package exports an array of strings. Each string is an identifier +for a license exception under the [Software Package Data Exchange +(SPDX)][SPDX] software license metadata standard. + +[SPDX]: https://spdx.org + +## Copyright and Licensing + +### SPDX + +"SPDX" is a federally registered United States trademark of The Linux +Foundation Corporation. + +From version 2.0 of the [SPDX] specification: + +> Copyright © 2010-2015 Linux Foundation and its Contributors. Licensed +> under the Creative Commons Attribution License 3.0 Unported. All other +> rights are expressly reserved. + +The Linux Foundation and the SPDX working groups are good people. Only +they decide what "SPDX" means, as a standard and otherwise. I respect +their work and their rights. You should, too. + +### This Package + +> I created this package by copying exception identifiers out of the +> SPDX specification. That work was mechanical, routine, and required no +> creativity whatsoever. - Kyle Mitchell, package author + +United States users concerned about intellectual property may wish to +discuss the following Supreme Court decisions with their attorneys: + +- _Baker v. Selden_, 101 U.S. 99 (1879) + +- _Feist Publications, Inc., v. Rural Telephone Service Co._, + 499 U.S. 340 (1991) diff --git a/node_modules/spdx-exceptions/deprecated.json b/node_modules/spdx-exceptions/deprecated.json new file mode 100644 index 0000000..cba7e2b --- /dev/null +++ b/node_modules/spdx-exceptions/deprecated.json @@ -0,0 +1,3 @@ +[ + "Nokia-Qt-exception-1.1" +] diff --git a/node_modules/spdx-exceptions/index.json b/node_modules/spdx-exceptions/index.json new file mode 100644 index 0000000..d9549d3 --- /dev/null +++ b/node_modules/spdx-exceptions/index.json @@ -0,0 +1,68 @@ +[ + "389-exception", + "Asterisk-exception", + "Autoconf-exception-2.0", + "Autoconf-exception-3.0", + "Autoconf-exception-generic", + "Autoconf-exception-generic-3.0", + "Autoconf-exception-macro", + "Bison-exception-1.24", + "Bison-exception-2.2", + "Bootloader-exception", + "Classpath-exception-2.0", + "CLISP-exception-2.0", + "cryptsetup-OpenSSL-exception", + "DigiRule-FOSS-exception", + "eCos-exception-2.0", + "Fawkes-Runtime-exception", + "FLTK-exception", + "fmt-exception", + "Font-exception-2.0", + "freertos-exception-2.0", + "GCC-exception-2.0", + "GCC-exception-2.0-note", + "GCC-exception-3.1", + "Gmsh-exception", + "GNAT-exception", + "GNOME-examples-exception", + "GNU-compiler-exception", + "gnu-javamail-exception", + "GPL-3.0-interface-exception", + "GPL-3.0-linking-exception", + "GPL-3.0-linking-source-exception", + "GPL-CC-1.0", + "GStreamer-exception-2005", + "GStreamer-exception-2008", + "i2p-gpl-java-exception", + "KiCad-libraries-exception", + "LGPL-3.0-linking-exception", + "libpri-OpenH323-exception", + "Libtool-exception", + "Linux-syscall-note", + "LLGPL", + "LLVM-exception", + "LZMA-exception", + "mif-exception", + "OCaml-LGPL-linking-exception", + "OCCT-exception-1.0", + "OpenJDK-assembly-exception-1.0", + "openvpn-openssl-exception", + "PS-or-PDF-font-exception-20170817", + "QPL-1.0-INRIA-2004-exception", + "Qt-GPL-exception-1.0", + "Qt-LGPL-exception-1.1", + "Qwt-exception-1.0", + "SANE-exception", + "SHL-2.0", + "SHL-2.1", + "stunnel-exception", + "SWI-exception", + "Swift-exception", + "Texinfo-exception", + "u-boot-exception-2.0", + "UBDL-exception", + "Universal-FOSS-exception-1.0", + "vsftpd-openssl-exception", + "WxWindows-exception-3.1", + "x11vnc-openssl-exception" +] diff --git a/node_modules/spdx-exceptions/package.json b/node_modules/spdx-exceptions/package.json new file mode 100644 index 0000000..2f9a950 --- /dev/null +++ b/node_modules/spdx-exceptions/package.json @@ -0,0 +1,19 @@ +{ + "name": "spdx-exceptions", + "description": "list of SPDX standard license exceptions", + "version": "2.5.0", + "author": "The Linux Foundation", + "contributors": [ + "Kyle E. Mitchell (https://kemitchell.com/)" + ], + "license": "CC-BY-3.0", + "repository": "kemitchell/spdx-exceptions.json", + "files": [ + "index.json", + "deprecated.json" + ], + "scripts": { + "build": "node build.js", + "latest": "node latest.js" + } +} diff --git a/node_modules/spdx-expression-parse/AUTHORS b/node_modules/spdx-expression-parse/AUTHORS new file mode 100644 index 0000000..257a76b --- /dev/null +++ b/node_modules/spdx-expression-parse/AUTHORS @@ -0,0 +1,4 @@ +C. Scott Ananian (http://cscott.net) +Kyle E. Mitchell (https://kemitchell.com) +Shinnosuke Watanabe +Antoine Motet diff --git a/node_modules/spdx-expression-parse/LICENSE b/node_modules/spdx-expression-parse/LICENSE new file mode 100644 index 0000000..831618e --- /dev/null +++ b/node_modules/spdx-expression-parse/LICENSE @@ -0,0 +1,22 @@ +The MIT License + +Copyright (c) 2015 Kyle E. Mitchell & other authors listed in AUTHORS + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/spdx-expression-parse/README.md b/node_modules/spdx-expression-parse/README.md new file mode 100644 index 0000000..9406462 --- /dev/null +++ b/node_modules/spdx-expression-parse/README.md @@ -0,0 +1,91 @@ +This package parses [SPDX license expression](https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60) strings describing license terms, like [package.json license strings](https://docs.npmjs.com/files/package.json#license), into consistently structured ECMAScript objects. The npm command-line interface depends on this package, as do many automatic license-audit tools. + +In a nutshell: + +```javascript +var parse = require('spdx-expression-parse') +var assert = require('assert') + +assert.deepEqual( + // Licensed under the terms of the Two-Clause BSD License. + parse('BSD-2-Clause'), + {license: 'BSD-2-Clause'} +) + +assert.throws(function () { + // An invalid SPDX license expression. + // Should be `Apache-2.0`. + parse('Apache 2') +}) + +assert.deepEqual( + // Dual licensed under either: + // - LGPL 2.1 + // - a combination of Three-Clause BSD and MIT + parse('(LGPL-2.1 OR BSD-3-Clause AND MIT)'), + { + left: {license: 'LGPL-2.1'}, + conjunction: 'or', + right: { + left: {license: 'BSD-3-Clause'}, + conjunction: 'and', + right: {license: 'MIT'} + } + } +) +``` + +The syntax comes from the [Software Package Data eXchange (SPDX)](https://spdx.org/), a standard from the [Linux Foundation](https://www.linuxfoundation.org) for shareable data about software package license terms. SPDX aims to make sharing and auditing license data easy, especially for users of open-source software. + +The bulk of the SPDX standard describes syntax and semantics of XML metadata files. This package implements two lightweight, plain-text components of that larger standard: + +1. The [license list](https://spdx.org/licenses), a mapping from specific string identifiers, like `Apache-2.0`, to standard form license texts and bolt-on license exceptions. The [spdx-license-ids](https://www.npmjs.com/package/spdx-license-ids) and [spdx-exceptions](https://www.npmjs.com/package/spdx-exceptions) packages implement the license list. `spdx-expression-parse` depends on and `require()`s them. + + Any license identifier from the license list is a valid license expression: + + ```javascript + var identifiers = [] + .concat(require('spdx-license-ids')) + .concat(require('spdx-license-ids/deprecated')) + + identifiers.forEach(function (id) { + assert.deepEqual(parse(id), {license: id}) + }) + ``` + + So is any license identifier `WITH` a standardized license exception: + + ```javascript + identifiers.forEach(function (id) { + require('spdx-exceptions').forEach(function (e) { + assert.deepEqual( + parse(id + ' WITH ' + e), + {license: id, exception: e} + ) + }) + }) + ``` + +2. The license expression language, for describing simple and complex license terms, like `MIT` for MIT-licensed and `(GPL-2.0 OR Apache-2.0)` for dual-licensing under GPL 2.0 and Apache 2.0. `spdx-expression-parse` itself implements license expression language, exporting a parser. + + ```javascript + assert.deepEqual( + // Licensed under a combination of: + // - the MIT License AND + // - a combination of: + // - LGPL 2.1 (or a later version) AND + // - Three-Clause BSD + parse('(MIT AND (LGPL-2.1+ AND BSD-3-Clause))'), + { + left: {license: 'MIT'}, + conjunction: 'and', + right: { + left: {license: 'LGPL-2.1', plus: true}, + conjunction: 'and', + right: {license: 'BSD-3-Clause'} + } + } + ) + ``` + +The Linux Foundation and its contributors license the SPDX standard under the terms of [the Creative Commons Attribution License 3.0 Unported (SPDX: "CC-BY-3.0")](http://spdx.org/licenses/CC-BY-3.0). "SPDX" is a United States federally registered trademark of the Linux Foundation. The authors of this package license their work under the terms of the MIT License. diff --git a/node_modules/spdx-expression-parse/index.js b/node_modules/spdx-expression-parse/index.js new file mode 100644 index 0000000..52fab56 --- /dev/null +++ b/node_modules/spdx-expression-parse/index.js @@ -0,0 +1,8 @@ +'use strict' + +var scan = require('./scan') +var parse = require('./parse') + +module.exports = function (source) { + return parse(scan(source)) +} diff --git a/node_modules/spdx-expression-parse/package.json b/node_modules/spdx-expression-parse/package.json new file mode 100644 index 0000000..c9edc9f --- /dev/null +++ b/node_modules/spdx-expression-parse/package.json @@ -0,0 +1,39 @@ +{ + "name": "spdx-expression-parse", + "description": "parse SPDX license expressions", + "version": "3.0.1", + "author": "Kyle E. Mitchell (https://kemitchell.com)", + "files": [ + "AUTHORS", + "index.js", + "parse.js", + "scan.js" + ], + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + }, + "devDependencies": { + "defence-cli": "^3.0.1", + "replace-require-self": "^1.0.0", + "standard": "^14.1.0" + }, + "keywords": [ + "SPDX", + "law", + "legal", + "license", + "metadata", + "package", + "package.json", + "standards" + ], + "license": "MIT", + "repository": "jslicense/spdx-expression-parse.js", + "scripts": { + "lint": "standard", + "test:readme": "defence -i javascript README.md | replace-require-self | node", + "test:suite": "node test.js", + "test": "npm run test:suite && npm run test:readme" + } +} diff --git a/node_modules/spdx-expression-parse/parse.js b/node_modules/spdx-expression-parse/parse.js new file mode 100644 index 0000000..5a00b45 --- /dev/null +++ b/node_modules/spdx-expression-parse/parse.js @@ -0,0 +1,138 @@ +'use strict' + +// The ABNF grammar in the spec is totally ambiguous. +// +// This parser follows the operator precedence defined in the +// `Order of Precedence and Parentheses` section. + +module.exports = function (tokens) { + var index = 0 + + function hasMore () { + return index < tokens.length + } + + function token () { + return hasMore() ? tokens[index] : null + } + + function next () { + if (!hasMore()) { + throw new Error() + } + index++ + } + + function parseOperator (operator) { + var t = token() + if (t && t.type === 'OPERATOR' && operator === t.string) { + next() + return t.string + } + } + + function parseWith () { + if (parseOperator('WITH')) { + var t = token() + if (t && t.type === 'EXCEPTION') { + next() + return t.string + } + throw new Error('Expected exception after `WITH`') + } + } + + function parseLicenseRef () { + // TODO: Actually, everything is concatenated into one string + // for backward-compatibility but it could be better to return + // a nice structure. + var begin = index + var string = '' + var t = token() + if (t.type === 'DOCUMENTREF') { + next() + string += 'DocumentRef-' + t.string + ':' + if (!parseOperator(':')) { + throw new Error('Expected `:` after `DocumentRef-...`') + } + } + t = token() + if (t.type === 'LICENSEREF') { + next() + string += 'LicenseRef-' + t.string + return { license: string } + } + index = begin + } + + function parseLicense () { + var t = token() + if (t && t.type === 'LICENSE') { + next() + var node = { license: t.string } + if (parseOperator('+')) { + node.plus = true + } + var exception = parseWith() + if (exception) { + node.exception = exception + } + return node + } + } + + function parseParenthesizedExpression () { + var left = parseOperator('(') + if (!left) { + return + } + + var expr = parseExpression() + + if (!parseOperator(')')) { + throw new Error('Expected `)`') + } + + return expr + } + + function parseAtom () { + return ( + parseParenthesizedExpression() || + parseLicenseRef() || + parseLicense() + ) + } + + function makeBinaryOpParser (operator, nextParser) { + return function parseBinaryOp () { + var left = nextParser() + if (!left) { + return + } + + if (!parseOperator(operator)) { + return left + } + + var right = parseBinaryOp() + if (!right) { + throw new Error('Expected expression') + } + return { + left: left, + conjunction: operator.toLowerCase(), + right: right + } + } + } + + var parseAnd = makeBinaryOpParser('AND', parseAtom) + var parseExpression = makeBinaryOpParser('OR', parseAnd) + + var node = parseExpression() + if (!node || hasMore()) { + throw new Error('Syntax error') + } + return node +} diff --git a/node_modules/spdx-expression-parse/scan.js b/node_modules/spdx-expression-parse/scan.js new file mode 100644 index 0000000..b74fce2 --- /dev/null +++ b/node_modules/spdx-expression-parse/scan.js @@ -0,0 +1,131 @@ +'use strict' + +var licenses = [] + .concat(require('spdx-license-ids')) + .concat(require('spdx-license-ids/deprecated')) +var exceptions = require('spdx-exceptions') + +module.exports = function (source) { + var index = 0 + + function hasMore () { + return index < source.length + } + + // `value` can be a regexp or a string. + // If it is recognized, the matching source string is returned and + // the index is incremented. Otherwise `undefined` is returned. + function read (value) { + if (value instanceof RegExp) { + var chars = source.slice(index) + var match = chars.match(value) + if (match) { + index += match[0].length + return match[0] + } + } else { + if (source.indexOf(value, index) === index) { + index += value.length + return value + } + } + } + + function skipWhitespace () { + read(/[ ]*/) + } + + function operator () { + var string + var possibilities = ['WITH', 'AND', 'OR', '(', ')', ':', '+'] + for (var i = 0; i < possibilities.length; i++) { + string = read(possibilities[i]) + if (string) { + break + } + } + + if (string === '+' && index > 1 && source[index - 2] === ' ') { + throw new Error('Space before `+`') + } + + return string && { + type: 'OPERATOR', + string: string + } + } + + function idstring () { + return read(/[A-Za-z0-9-.]+/) + } + + function expectIdstring () { + var string = idstring() + if (!string) { + throw new Error('Expected idstring at offset ' + index) + } + return string + } + + function documentRef () { + if (read('DocumentRef-')) { + var string = expectIdstring() + return { type: 'DOCUMENTREF', string: string } + } + } + + function licenseRef () { + if (read('LicenseRef-')) { + var string = expectIdstring() + return { type: 'LICENSEREF', string: string } + } + } + + function identifier () { + var begin = index + var string = idstring() + + if (licenses.indexOf(string) !== -1) { + return { + type: 'LICENSE', + string: string + } + } else if (exceptions.indexOf(string) !== -1) { + return { + type: 'EXCEPTION', + string: string + } + } + + index = begin + } + + // Tries to read the next token. Returns `undefined` if no token is + // recognized. + function parseToken () { + // Ordering matters + return ( + operator() || + documentRef() || + licenseRef() || + identifier() + ) + } + + var tokens = [] + while (hasMore()) { + skipWhitespace() + if (!hasMore()) { + break + } + + var token = parseToken() + if (!token) { + throw new Error('Unexpected `' + source[index] + + '` at offset ' + index) + } + + tokens.push(token) + } + return tokens +} diff --git a/node_modules/spdx-license-ids/README.md b/node_modules/spdx-license-ids/README.md new file mode 100644 index 0000000..500e32d --- /dev/null +++ b/node_modules/spdx-license-ids/README.md @@ -0,0 +1,52 @@ +# spdx-license-ids + +[![npm version](https://img.shields.io/npm/v/spdx-license-ids.svg)](https://www.npmjs.com/package/spdx-license-ids) +[![Github Actions](https://action-badges.now.sh/jslicense/spdx-license-ids)](https://wdp9fww0r9.execute-api.us-west-2.amazonaws.com/production/results/jslicense/spdx-license-ids) + +A list of [SPDX license](https://spdx.org/licenses/) identifiers + +## Installation + +[Download JSON directly](https://raw.githubusercontent.com/jslicense/spdx-license-ids/main/index.json), or [use](https://docs.npmjs.com/cli/install) [npm](https://docs.npmjs.com/about-npm/): + +``` +npm install spdx-license-ids +``` + +## [Node.js](https://nodejs.org/) API + +### require('spdx-license-ids') + +Type: `string[]` + +All license IDs except for the currently deprecated ones. + +```javascript +const ids = require('spdx-license-ids'); +//=> ['0BSD', 'AAL', 'ADSL', 'AFL-1.1', 'AFL-1.2', 'AFL-2.0', 'AFL-2.1', 'AFL-3.0', 'AGPL-1.0-only', ...] + +ids.includes('BSD-3-Clause'); //=> true +ids.includes('CC-BY-1.0'); //=> true + +ids.includes('GPL-3.0'); //=> false +``` + +### require('spdx-license-ids/deprecated') + +Type: `string[]` + +Deprecated license IDs. + +```javascript +const deprecatedIds = require('spdx-license-ids/deprecated'); +//=> ['AGPL-1.0', 'AGPL-3.0', 'GFDL-1.1', 'GFDL-1.2', 'GFDL-1.3', 'GPL-1.0', 'GPL-2.0', ...] + +deprecatedIds.includes('BSD-3-Clause'); //=> false +deprecatedIds.includes('CC-BY-1.0'); //=> false + +deprecatedIds.includes('GPL-3.0'); //=> true +``` + +## License + +[Creative Commons Zero v1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/deed) diff --git a/node_modules/spdx-license-ids/deprecated.json b/node_modules/spdx-license-ids/deprecated.json new file mode 100644 index 0000000..278531e --- /dev/null +++ b/node_modules/spdx-license-ids/deprecated.json @@ -0,0 +1,27 @@ +[ + "AGPL-1.0", + "AGPL-3.0", + "BSD-2-Clause-FreeBSD", + "BSD-2-Clause-NetBSD", + "GFDL-1.1", + "GFDL-1.2", + "GFDL-1.3", + "GPL-1.0", + "GPL-2.0", + "GPL-2.0-with-GCC-exception", + "GPL-2.0-with-autoconf-exception", + "GPL-2.0-with-bison-exception", + "GPL-2.0-with-classpath-exception", + "GPL-2.0-with-font-exception", + "GPL-3.0", + "GPL-3.0-with-GCC-exception", + "GPL-3.0-with-autoconf-exception", + "LGPL-2.0", + "LGPL-2.1", + "LGPL-3.0", + "Nunit", + "StandardML-NJ", + "bzip2-1.0.5", + "eCos-2.0", + "wxWindows" +] diff --git a/node_modules/spdx-license-ids/index.json b/node_modules/spdx-license-ids/index.json new file mode 100644 index 0000000..c7686a7 --- /dev/null +++ b/node_modules/spdx-license-ids/index.json @@ -0,0 +1,630 @@ +[ + "0BSD", + "3D-Slicer-1.0", + "AAL", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "AMD-newlib", + "AMDPLPA", + "AML", + "AML-glslang", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "ASWF-Digital-Assets-1.0", + "ASWF-Digital-Assets-1.1", + "Abstyles", + "AdaCore-doc", + "Adobe-2006", + "Adobe-Display-PostScript", + "Adobe-Glyph", + "Adobe-Utopia", + "Afmparse", + "Aladdin", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "App-s2p", + "Arphic-1999", + "Artistic-1.0", + "Artistic-1.0-Perl", + "Artistic-1.0-cl8", + "Artistic-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-Darwin", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-2-Clause-first-lines", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-HP", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-3-Clause-Sun", + "BSD-3-Clause-acpica", + "BSD-3-Clause-flex", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-4.3RENO", + "BSD-4.3TAHOE", + "BSD-Advertising-Acknowledgement", + "BSD-Attribution-HPND-disclaimer", + "BSD-Inferno-Nettverk", + "BSD-Protection", + "BSD-Source-Code", + "BSD-Source-beginning-file", + "BSD-Systemics", + "BSD-Systemics-W3Works", + "BSL-1.0", + "BUSL-1.1", + "Baekmuk", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "Bitstream-Charter", + "Bitstream-Vera", + "BlueOak-1.0.0", + "Boehm-GC", + "Borceux", + "Brian-Gladman-2-Clause", + "Brian-Gladman-3-Clause", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-2.5-AU", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-AU", + "CC-BY-3.0-DE", + "CC-BY-3.0-IGO", + "CC-BY-3.0-NL", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-3.0-DE", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-DE", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.0-DE", + "CC-BY-NC-SA-2.0-FR", + "CC-BY-NC-SA-2.0-UK", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-3.0-DE", + "CC-BY-NC-SA-3.0-IGO", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-3.0-DE", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-3.0-DE", + "CC-BY-SA-3.0-IGO", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Permissive-2.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CFITSIO", + "CMU-Mach", + "CMU-Mach-nodoc", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "COIL-1.0", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "CUA-OPL-1.0", + "Caldera", + "Caldera-no-preamble", + "Catharon", + "ClArtistic", + "Clips", + "Community-Spec-1.0", + "Condor-1.1", + "Cornell-Lossless-JPEG", + "Cronyx", + "Crossword", + "CrystalStacker", + "Cube", + "D-FSL-1.0", + "DEC-3-Clause", + "DL-DE-BY-2.0", + "DL-DE-ZERO-2.0", + "DOC", + "DRL-1.0", + "DRL-1.1", + "DSDP", + "Dotseqn", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Elastic-2.0", + "Entessa", + "ErlPL-1.1", + "Eurosym", + "FBM", + "FDK-AAC", + "FSFAP", + "FSFAP-no-warranty-disclaimer", + "FSFUL", + "FSFULLR", + "FSFULLRWD", + "FTL", + "Fair", + "Ferguson-Twofish", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "Furuseth", + "GCR-docs", + "GD", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "GL2PS", + "GLWTPL", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "Giftware", + "Glide", + "Glulxe", + "Graphics-Gems", + "Gutmann", + "HP-1986", + "HP-1989", + "HPND", + "HPND-DEC", + "HPND-Fenneberg-Livingston", + "HPND-INRIA-IMAG", + "HPND-Intel", + "HPND-Kevlin-Henney", + "HPND-MIT-disclaimer", + "HPND-Markus-Kuhn", + "HPND-Pbmplus", + "HPND-UC", + "HPND-UC-export-US", + "HPND-doc", + "HPND-doc-sell", + "HPND-export-US", + "HPND-export-US-acknowledgement", + "HPND-export-US-modify", + "HPND-export2-US", + "HPND-merchantability-variant", + "HPND-sell-MIT-disclaimer-xserver", + "HPND-sell-regexpr", + "HPND-sell-variant", + "HPND-sell-variant-MIT-disclaimer", + "HPND-sell-variant-MIT-disclaimer-rev", + "HTMLTIDY", + "HaskellReport", + "Hippocratic-2.1", + "IBM-pibs", + "ICU", + "IEC-Code-Components-EULA", + "IJG", + "IJG-short", + "IPA", + "IPL-1.0", + "ISC", + "ISC-Veillard", + "ImageMagick", + "Imlib2", + "Info-ZIP", + "Inner-Net-2.0", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "JPL-image", + "JPNIC", + "JSON", + "Jam", + "JasPer-2.0", + "Kastrup", + "Kazlib", + "Knuth-CTAN", + "LAL-1.2", + "LAL-1.3", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "LOOP", + "LPD-document", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "LZMA-SDK-9.11-to-9.20", + "LZMA-SDK-9.22", + "Latex2e", + "Latex2e-translated-notice", + "Leptonica", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Libpng", + "Linux-OpenIB", + "Linux-man-pages-1-para", + "Linux-man-pages-copyleft", + "Linux-man-pages-copyleft-2-para", + "Linux-man-pages-copyleft-var", + "Lucida-Bitmap-Fonts", + "MIT", + "MIT-0", + "MIT-CMU", + "MIT-Festival", + "MIT-Khronos-old", + "MIT-Modern-Variant", + "MIT-Wu", + "MIT-advertising", + "MIT-enna", + "MIT-feh", + "MIT-open-group", + "MIT-testregex", + "MITNFA", + "MMIXware", + "MPEG-SSG", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-LPL", + "MS-PL", + "MS-RL", + "MTLL", + "Mackerras-3-Clause", + "Mackerras-3-Clause-acknowledgment", + "MakeIndex", + "Martin-Birgmeier", + "McPhee-slideshow", + "Minpack", + "MirOS", + "Motosoto", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "NBPL-1.0", + "NCBI-PD", + "NCGL-UK-2.0", + "NCL", + "NCSA", + "NGPL", + "NICTA-1.0", + "NIST-PD", + "NIST-PD-fallback", + "NIST-Software", + "NLOD-1.0", + "NLOD-2.0", + "NLPL", + "NOSL", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "Naumen", + "Net-SNMP", + "NetCDF", + "Newsletr", + "Nokia", + "Noweb", + "O-UDA-1.0", + "OAR", + "OCCT-PL", + "OCLC-2.0", + "ODC-By-1.0", + "ODbL-1.0", + "OFFIS", + "OFL-1.0", + "OFL-1.0-RFN", + "OFL-1.0-no-RFN", + "OFL-1.1", + "OFL-1.1-RFN", + "OFL-1.1-no-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OLFL-1.3", + "OML", + "OPL-1.0", + "OPL-UK-3.0", + "OPUBL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "OpenPBS-2.3", + "OpenSSL", + "OpenSSL-standalone", + "OpenVision", + "PADL", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "PPL", + "PSF-2.0", + "Parity-6.0.0", + "Parity-7.0.0", + "Pixar", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "Python-2.0", + "Python-2.0.1", + "QPL-1.0", + "QPL-1.0-INRIA-2004", + "Qhull", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Rdisc", + "Ruby", + "SAX-PD", + "SAX-PD-2.0", + "SCEA", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SGI-OpenGL", + "SGP4", + "SHL-0.5", + "SHL-0.51", + "SISSL", + "SISSL-1.2", + "SL", + "SMLNJ", + "SMPPL", + "SNIA", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSLeay-standalone", + "SSPL-1.0", + "SWL", + "Saxpath", + "SchemeReport", + "Sendmail", + "Sendmail-8.23", + "SimPL-2.0", + "Sleepycat", + "Soundex", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SugarCRM-1.1.3", + "Sun-PPP", + "Sun-PPP-2000", + "SunPro", + "Symlinks", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TGPPL-1.0", + "TMate", + "TORQUE-1.1", + "TOSL", + "TPDL", + "TPL-1.0", + "TTWL", + "TTYP0", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "TermReadKey", + "UCAR", + "UCL-1.0", + "UMich-Merit", + "UPL-1.0", + "URT-RLE", + "Unicode-3.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "UnixCrypt", + "Unlicense", + "VOSTROM", + "VSL-1.0", + "Vim", + "W3C", + "W3C-19980720", + "W3C-20150513", + "WTFPL", + "Watcom-1.0", + "Widget-Workshop", + "Wsuipa", + "X11", + "X11-distribute-modifications-variant", + "XFree86-1.1", + "XSkat", + "Xdebug-1.03", + "Xerox", + "Xfig", + "Xnet", + "YPL-1.0", + "YPL-1.1", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", + "Zed", + "Zeeff", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "any-OSI", + "bcrypt-Solar-Designer", + "blessing", + "bzip2-1.0.6", + "check-cvs", + "checkmk", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "curl", + "cve-tou", + "diffmark", + "dtoa", + "dvipdfm", + "eGenix", + "etalab-2.0", + "fwlw", + "gSOAP-1.3b", + "gnuplot", + "gtkbook", + "hdparm", + "iMatix", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "libutil-David-Nugent", + "lsof", + "magaz", + "mailprio", + "metamail", + "mpi-permissive", + "mpich2", + "mplus", + "pkgconf", + "pnmstitch", + "psfrag", + "psutils", + "python-ldap", + "radvd", + "snprintf", + "softSurfer", + "ssh-keyscan", + "swrule", + "threeparttable", + "ulem", + "w3m", + "xinetd", + "xkeyboard-config-Zinoviev", + "xlock", + "xpp", + "xzoom", + "zlib-acknowledgement" +] diff --git a/node_modules/spdx-license-ids/package.json b/node_modules/spdx-license-ids/package.json new file mode 100644 index 0000000..5f5ed95 --- /dev/null +++ b/node_modules/spdx-license-ids/package.json @@ -0,0 +1,39 @@ +{ + "name": "spdx-license-ids", + "version": "3.0.18", + "description": "A list of SPDX license identifiers", + "repository": "jslicense/spdx-license-ids", + "author": "Shinnosuke Watanabe (https://github.com/shinnn)", + "license": "CC0-1.0", + "scripts": { + "build": "node build.js", + "pretest": "eslint .", + "latest": "node latest.js", + "test": "node test.js" + }, + "files": [ + "deprecated.json", + "index.json" + ], + "keywords": [ + "spdx", + "license", + "licenses", + "id", + "identifier", + "identifiers", + "json", + "array", + "oss" + ], + "devDependencies": { + "@shinnn/eslint-config": "^7.0.0", + "eslint": "^8.49.0", + "eslint-formatter-codeframe": "^7.32.1", + "rmfr": "^2.0.0", + "tape": "^5.6.6" + }, + "eslintConfig": { + "extends": "@shinnn" + } +} diff --git a/node_modules/string-width/index.d.ts b/node_modules/string-width/index.d.ts new file mode 100644 index 0000000..12b5309 --- /dev/null +++ b/node_modules/string-width/index.d.ts @@ -0,0 +1,29 @@ +declare const stringWidth: { + /** + Get the visual width of a string - the number of columns required to display it. + + Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + + @example + ``` + import stringWidth = require('string-width'); + + stringWidth('a'); + //=> 1 + + stringWidth('古'); + //=> 2 + + stringWidth('\u001B[1m古\u001B[22m'); + //=> 2 + ``` + */ + (string: string): number; + + // TODO: remove this in the next major version, refactor the whole definition to: + // declare function stringWidth(string: string): number; + // export = stringWidth; + default: typeof stringWidth; +} + +export = stringWidth; diff --git a/node_modules/string-width/index.js b/node_modules/string-width/index.js new file mode 100644 index 0000000..f4d261a --- /dev/null +++ b/node_modules/string-width/index.js @@ -0,0 +1,47 @@ +'use strict'; +const stripAnsi = require('strip-ansi'); +const isFullwidthCodePoint = require('is-fullwidth-code-point'); +const emojiRegex = require('emoji-regex'); + +const stringWidth = string => { + if (typeof string !== 'string' || string.length === 0) { + return 0; + } + + string = stripAnsi(string); + + if (string.length === 0) { + return 0; + } + + string = string.replace(emojiRegex(), ' '); + + let width = 0; + + for (let i = 0; i < string.length; i++) { + const code = string.codePointAt(i); + + // Ignore control characters + if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) { + continue; + } + + // Ignore combining characters + if (code >= 0x300 && code <= 0x36F) { + continue; + } + + // Surrogates + if (code > 0xFFFF) { + i++; + } + + width += isFullwidthCodePoint(code) ? 2 : 1; + } + + return width; +}; + +module.exports = stringWidth; +// TODO: remove this in the next major version +module.exports.default = stringWidth; diff --git a/node_modules/string-width/license b/node_modules/string-width/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/string-width/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/package.json b/node_modules/string-width/package.json new file mode 100644 index 0000000..28ba7b4 --- /dev/null +++ b/node_modules/string-width/package.json @@ -0,0 +1,56 @@ +{ + "name": "string-width", + "version": "4.2.3", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "string", + "character", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/string-width/readme.md b/node_modules/string-width/readme.md new file mode 100644 index 0000000..bdd3141 --- /dev/null +++ b/node_modules/string-width/readme.md @@ -0,0 +1,50 @@ +# string-width + +> Get the visual width of a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + +Useful to be able to measure the actual width of command-line output. + + +## Install + +``` +$ npm install string-width +``` + + +## Usage + +```js +const stringWidth = require('string-width'); + +stringWidth('a'); +//=> 1 + +stringWidth('古'); +//=> 2 + +stringWidth('\u001B[1m古\u001B[22m'); +//=> 2 +``` + + +## Related + +- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module +- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string +- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/string_decoder/.travis.yml b/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000..3347a72 --- /dev/null +++ b/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/LICENSE b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/string_decoder/node_modules/safe-buffer/README.md b/node_modules/string_decoder/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.js b/node_modules/string_decoder/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..22438da --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/string_decoder/node_modules/safe-buffer/package.json b/node_modules/string_decoder/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..623fbc3 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..518c3eb --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,31 @@ +{ + "name": "string_decoder", + "version": "1.1.1", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/strip-ansi/index.d.ts b/node_modules/strip-ansi/index.d.ts new file mode 100644 index 0000000..907fccc --- /dev/null +++ b/node_modules/strip-ansi/index.d.ts @@ -0,0 +1,17 @@ +/** +Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string. + +@example +``` +import stripAnsi = require('strip-ansi'); + +stripAnsi('\u001B[4mUnicorn\u001B[0m'); +//=> 'Unicorn' + +stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); +//=> 'Click' +``` +*/ +declare function stripAnsi(string: string): string; + +export = stripAnsi; diff --git a/node_modules/strip-ansi/index.js b/node_modules/strip-ansi/index.js new file mode 100644 index 0000000..9a593df --- /dev/null +++ b/node_modules/strip-ansi/index.js @@ -0,0 +1,4 @@ +'use strict'; +const ansiRegex = require('ansi-regex'); + +module.exports = string => typeof string === 'string' ? string.replace(ansiRegex(), '') : string; diff --git a/node_modules/strip-ansi/license b/node_modules/strip-ansi/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/strip-ansi/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-ansi/package.json b/node_modules/strip-ansi/package.json new file mode 100644 index 0000000..1a41108 --- /dev/null +++ b/node_modules/strip-ansi/package.json @@ -0,0 +1,54 @@ +{ + "name": "strip-ansi", + "version": "6.0.1", + "description": "Strip ANSI escape codes from a string", + "license": "MIT", + "repository": "chalk/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "devDependencies": { + "ava": "^2.4.0", + "tsd": "^0.10.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/strip-ansi/readme.md b/node_modules/strip-ansi/readme.md new file mode 100644 index 0000000..7c4b56d --- /dev/null +++ b/node_modules/strip-ansi/readme.md @@ -0,0 +1,46 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string + + +## Install + +``` +$ npm install strip-ansi +``` + + +## Usage + +```js +const stripAnsi = require('strip-ansi'); + +stripAnsi('\u001B[4mUnicorn\u001B[0m'); +//=> 'Unicorn' + +stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); +//=> 'Click' +``` + + +## strip-ansi for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of strip-ansi and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-strip-ansi?utm_source=npm-strip-ansi&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Streaming version of this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + diff --git a/node_modules/strip-dirs/LICENSE b/node_modules/strip-dirs/LICENSE new file mode 100644 index 0000000..3b7a190 --- /dev/null +++ b/node_modules/strip-dirs/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 - 2016 Shinnosuke Watanabe + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-dirs/README.md b/node_modules/strip-dirs/README.md new file mode 100644 index 0000000..7449982 --- /dev/null +++ b/node_modules/strip-dirs/README.md @@ -0,0 +1,75 @@ +# strip-dirs + +[![NPM version](https://img.shields.io/npm/v/strip-dirs.svg)](https://www.npmjs.com/package/strip-dirs) +[![Build Status](https://img.shields.io/travis/shinnn/node-strip-dirs.svg)](https://travis-ci.org/shinnn/node-strip-dirs) +[![Build status](https://ci.appveyor.com/api/projects/status/pr5edbtg59f6xfgn?svg=true)](https://ci.appveyor.com/project/ShinnosukeWatanabe/node-strip-dirs) +[![Coverage Status](https://img.shields.io/coveralls/shinnn/node-strip-dirs.svg)](https://coveralls.io/r/shinnn/node-strip-dirs) +[![Dependency Status](https://david-dm.org/shinnn/node-strip-dirs.svg)](https://david-dm.org/shinnn/node-strip-dirs) +[![devDependency Status](https://david-dm.org/shinnn/node-strip-dirs/dev-status.svg)](https://david-dm.org/shinnn/node-strip-dirs#info=devDependencies) + +Remove leading directory components from a path, like [tar(1)](http://linuxcommand.org/man_pages/tar1.html)'s `--strip-components` option + +```javascript +const stripDirs = require('strip-dirs'); + +stripDirs('foo/bar/baz', 1); //=> 'bar/baz' +stripDirs('foo/bar/baz', 2); //=> 'baz' +stripDirs('foo/bar/baz', 999); //=> 'baz' +``` + +## Installation + +[Use npm](https://docs.npmjs.com/cli/install). + +``` +npm install --save strip-dirs +``` + +## API + +```javascript +const stripDirs = require('strip-dirs'); +``` + +### stripDirs(*path*, *count* [, *option*]) + +*path*: `String` (A relative path) +*count*: `Number` (0, 1, 2, ...) +*option*: `Object` +Return: `String` + +It removes directory components from the beginning of the *path* by *count*. + +```javascript +const stripDirs = require('strip-dirs'); + +stripDirs('foo/bar', 1); //=> 'bar' +stripDirs('foo/bar/baz', 2); //=> 'bar' +stripDirs('foo/././/bar/./', 1); //=> 'bar' +stripDirs('foo/bar', 0); //=> 'foo/bar' + +stripDirs('/foo/bar', 1) // throw an error because the path is an absolute path +``` + +If you want to remove all directory components certainly, use [`path.basename`](https://nodejs.org/api/path.html#path_path_basename_path_ext) instead of this module. + +#### option.disallowOverflow + +Type: `Boolean` +Default: `false` + +By default, it keeps the last path component when path components are fewer than the *count*. + +If this option is enabled, it throws an error in this situation. + +```javascript +stripDirs('foo/bar/baz', 9999); //=> 'baz' + +stripDirs('foo/bar/baz', 9999, {disallowOverflow: true}); // throws an range error +``` + +## License + +Copyright (c) 2014 - 2016 [Shinnosuke Watanabe](https://github.com/shinnn) + +Licensed under [the MIT License](./LICENSE). diff --git a/node_modules/strip-dirs/index.js b/node_modules/strip-dirs/index.js new file mode 100755 index 0000000..974af89 --- /dev/null +++ b/node_modules/strip-dirs/index.js @@ -0,0 +1,72 @@ +/*! + * strip-dirs | MIT (c) Shinnosuke Watanabe + * https://github.com/shinnn/node-strip-dirs +*/ +'use strict'; + +const path = require('path'); +const util = require('util'); + +const isNaturalNumber = require('is-natural-number'); + +module.exports = function stripDirs(pathStr, count, option) { + if (typeof pathStr !== 'string') { + throw new TypeError( + util.inspect(pathStr) + + ' is not a string. First argument to strip-dirs must be a path string.' + ); + } + + if (path.posix.isAbsolute(pathStr) || path.win32.isAbsolute(pathStr)) { + throw new Error(`${pathStr} is an absolute path. strip-dirs requires a relative path.`); + } + + if (!isNaturalNumber(count, {includeZero: true})) { + throw new Error( + 'The Second argument of strip-dirs must be a natural number or 0, but received ' + + util.inspect(count) + + '.' + ); + } + + if (option) { + if (typeof option !== 'object') { + throw new TypeError( + util.inspect(option) + + ' is not an object. Expected an object with a boolean `disallowOverflow` property.' + ); + } + + if (Array.isArray(option)) { + throw new TypeError( + util.inspect(option) + + ' is an array. Expected an object with a boolean `disallowOverflow` property.' + ); + } + + if ('disallowOverflow' in option && typeof option.disallowOverflow !== 'boolean') { + throw new TypeError( + util.inspect(option.disallowOverflow) + + ' is neither true nor false. `disallowOverflow` option must be a Boolean value.' + ); + } + } else { + option = {disallowOverflow: false}; + } + + const pathComponents = path.normalize(pathStr).split(path.sep); + + if (pathComponents.length > 1 && pathComponents[0] === '.') { + pathComponents.shift(); + } + + if (count > pathComponents.length - 1) { + if (option.disallowOverflow) { + throw new RangeError('Cannot strip more directories than there are.'); + } + + count = pathComponents.length - 1; + } + + return path.join.apply(null, pathComponents.slice(count)); +}; diff --git a/node_modules/strip-dirs/package.json b/node_modules/strip-dirs/package.json new file mode 100644 index 0000000..7f1a3db --- /dev/null +++ b/node_modules/strip-dirs/package.json @@ -0,0 +1,39 @@ +{ + "name": "strip-dirs", + "version": "2.1.0", + "description": "Remove leading directory components from a path, like tar's --strip-components option", + "repository": "shinnn/node-strip-dirs", + "author": "Shinnosuke Watanabe (https://github.com/shinnn)", + "files": [ + "index.js" + ], + "scripts": { + "pretest": "eslint --fix --format=codeframe index.js test.js", + "test": "node --throw-deprecation --track-heap-objects test.js | tap-spec", + "coverage": "istanbul cover test.js" + }, + "license": "MIT", + "keywords": [ + "filepath", + "file-path", + "path", + "dir", + "directory", + "strip", + "strip-components" + ], + "dependencies": { + "is-natural-number": "^4.0.1" + }, + "devDependencies": { + "@shinnn/eslint-config-node": "^3.0.0", + "eslint": "^3.10.0", + "istanbul": "^0.4.5", + "istanbul-coveralls": "^1.0.3", + "tap-spec": "^4.1.1", + "tape": "^4.6.2" + }, + "eslintConfig": { + "extends": "@shinnn/node" + } +} diff --git a/node_modules/supports-color/browser.js b/node_modules/supports-color/browser.js new file mode 100644 index 0000000..62afa3a --- /dev/null +++ b/node_modules/supports-color/browser.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = { + stdout: false, + stderr: false +}; diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js new file mode 100644 index 0000000..1704131 --- /dev/null +++ b/node_modules/supports-color/index.js @@ -0,0 +1,131 @@ +'use strict'; +const os = require('os'); +const hasFlag = require('has-flag'); + +const env = process.env; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; + } + + const min = forceColor ? 1 : 0; + + if (process.platform === 'win32') { + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first Windows + // release that supports 256 colors. Windows 10 build 14931 is the first release + // that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(process.versions.node.split('.')[0]) >= 8 && + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr) +}; diff --git a/node_modules/supports-color/license b/node_modules/supports-color/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json new file mode 100644 index 0000000..ad199f5 --- /dev/null +++ b/node_modules/supports-color/package.json @@ -0,0 +1,53 @@ +{ + "name": "supports-color", + "version": "5.5.0", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "dependencies": { + "has-flag": "^3.0.0" + }, + "devDependencies": { + "ava": "^0.25.0", + "import-fresh": "^2.0.0", + "xo": "^0.20.0" + }, + "browser": "browser.js" +} diff --git a/node_modules/supports-color/readme.md b/node_modules/supports-color/readme.md new file mode 100644 index 0000000..f6e4019 --- /dev/null +++ b/node_modules/supports-color/readme.md @@ -0,0 +1,66 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install supports-color +``` + + +## Usage + +```js +const supportsColor = require('supports-color'); + +if (supportsColor.stdout) { + console.log('Terminal stdout supports color'); +} + +if (supportsColor.stdout.has256) { + console.log('Terminal stdout supports 256 colors'); +} + +if (supportsColor.stderr.has16m) { + console.log('Terminal stderr supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported. + +The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors) + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add the environment variable `FORCE_COLOR=1` to forcefully enable color or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/tar-stream/LICENSE b/node_modules/tar-stream/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/tar-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/tar-stream/README.md b/node_modules/tar-stream/README.md new file mode 100644 index 0000000..96abbca --- /dev/null +++ b/node_modules/tar-stream/README.md @@ -0,0 +1,168 @@ +# tar-stream + +tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system. + +Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this. + +``` +npm install tar-stream +``` + +[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream) +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT) + +## Usage + +tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both. + + +It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) + +## Related + +If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module. + +## Packing + +To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries. + +``` js +var tar = require('tar-stream') +var pack = tar.pack() // pack is a streams2 stream + +// add a file called my-test.txt with the content "Hello World!" +pack.entry({ name: 'my-test.txt' }, 'Hello World!') + +// add a file called my-stream-test.txt from a stream +var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) { + // the stream was added + // no more entries + pack.finalize() +}) + +entry.write('hello') +entry.write(' ') +entry.write('world') +entry.end() + +// pipe the pack stream somewhere +pack.pipe(process.stdout) +``` + +## Extracting + +To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )` + +``` js +var extract = tar.extract() + +extract.on('entry', function(header, stream, next) { + // header is the tar header + // stream is the content body (might be an empty stream) + // call next when you are done with this entry + + stream.on('end', function() { + next() // ready for next entry + }) + + stream.resume() // just auto drain the stream +}) + +extract.on('finish', function() { + // all entries read +}) + +pack.pipe(extract) +``` + +The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading. + +## Headers + +The header object using in `entry` should contain the following properties. +Most of these values can be found by stat'ing a file. + +``` js +{ + name: 'path/to/this/entry.txt', + size: 1314, // entry size. defaults to 0 + mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise + mtime: new Date(), // last modified date for entry. defaults to now. + type: 'file', // type of entry. defaults to file. can be: + // file | link | symlink | directory | block-device + // character-device | fifo | contiguous-file + linkname: 'path', // linked file name + uid: 0, // uid of entry owner. defaults to 0 + gid: 0, // gid of entry owner. defaults to 0 + uname: 'maf', // uname of entry owner. defaults to null + gname: 'staff', // gname of entry owner. defaults to null + devmajor: 0, // device major version. defaults to 0 + devminor: 0 // device minor version. defaults to 0 +} +``` + +## Modifying existing tarballs + +Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball. + +``` js +var extract = tar.extract() +var pack = tar.pack() +var path = require('path') + +extract.on('entry', function(header, stream, callback) { + // let's prefix all names with 'tmp' + header.name = path.join('tmp', header.name) + // write the new entry to the pack stream + stream.pipe(pack.entry(header, callback)) +}) + +extract.on('finish', function() { + // all entries done - lets finalize it + pack.finalize() +}) + +// pipe the old tarball to the extractor +oldTarballStream.pipe(extract) + +// pipe the new tarball the another stream +pack.pipe(newTarballStream) +``` + +## Saving tarball to fs + + +``` js +var fs = require('fs') +var tar = require('tar-stream') + +var pack = tar.pack() // pack is a streams2 stream +var path = 'YourTarBall.tar' +var yourTarball = fs.createWriteStream(path) + +// add a file called YourFile.txt with the content "Hello World!" +pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) { + if (err) throw err + pack.finalize() +}) + +// pipe the pack stream to your file +pack.pipe(yourTarball) + +yourTarball.on('close', function () { + console.log(path + ' has been written') + fs.stat(path, function(err, stats) { + if (err) throw err + console.log(stats) + console.log('Got file info successfully!') + }) +}) +``` + +## Performance + +[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance) + +# License + +MIT diff --git a/node_modules/tar-stream/extract.js b/node_modules/tar-stream/extract.js new file mode 100644 index 0000000..19a4255 --- /dev/null +++ b/node_modules/tar-stream/extract.js @@ -0,0 +1,258 @@ +var util = require('util') +var bl = require('bl') +var xtend = require('xtend') +var headers = require('./headers') + +var Writable = require('readable-stream').Writable +var PassThrough = require('readable-stream').PassThrough + +var noop = function () {} + +var overflow = function (size) { + size &= 511 + return size && 512 - size +} + +var emptyStream = function (self, offset) { + var s = new Source(self, offset) + s.end() + return s +} + +var mixinPax = function (header, pax) { + if (pax.path) header.name = pax.path + if (pax.linkpath) header.linkname = pax.linkpath + if (pax.size) header.size = parseInt(pax.size, 10) + header.pax = pax + return header +} + +var Source = function (self, offset) { + this._parent = self + this.offset = offset + PassThrough.call(this) +} + +util.inherits(Source, PassThrough) + +Source.prototype.destroy = function (err) { + this._parent.destroy(err) +} + +var Extract = function (opts) { + if (!(this instanceof Extract)) return new Extract(opts) + Writable.call(this, opts) + + opts = opts || {} + + this._offset = 0 + this._buffer = bl() + this._missing = 0 + this._partial = false + this._onparse = noop + this._header = null + this._stream = null + this._overflow = null + this._cb = null + this._locked = false + this._destroyed = false + this._pax = null + this._paxGlobal = null + this._gnuLongPath = null + this._gnuLongLinkPath = null + + var self = this + var b = self._buffer + + var oncontinue = function () { + self._continue() + } + + var onunlock = function (err) { + self._locked = false + if (err) return self.destroy(err) + if (!self._stream) oncontinue() + } + + var onstreamend = function () { + self._stream = null + var drain = overflow(self._header.size) + if (drain) self._parse(drain, ondrain) + else self._parse(512, onheader) + if (!self._locked) oncontinue() + } + + var ondrain = function () { + self._buffer.consume(overflow(self._header.size)) + self._parse(512, onheader) + oncontinue() + } + + var onpaxglobalheader = function () { + var size = self._header.size + self._paxGlobal = headers.decodePax(b.slice(0, size)) + b.consume(size) + onstreamend() + } + + var onpaxheader = function () { + var size = self._header.size + self._pax = headers.decodePax(b.slice(0, size)) + if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax) + b.consume(size) + onstreamend() + } + + var ongnulongpath = function () { + var size = self._header.size + this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding) + b.consume(size) + onstreamend() + } + + var ongnulonglinkpath = function () { + var size = self._header.size + this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding) + b.consume(size) + onstreamend() + } + + var onheader = function () { + var offset = self._offset + var header + try { + header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding) + } catch (err) { + self.emit('error', err) + } + b.consume(512) + + if (!header) { + self._parse(512, onheader) + oncontinue() + return + } + if (header.type === 'gnu-long-path') { + self._parse(header.size, ongnulongpath) + oncontinue() + return + } + if (header.type === 'gnu-long-link-path') { + self._parse(header.size, ongnulonglinkpath) + oncontinue() + return + } + if (header.type === 'pax-global-header') { + self._parse(header.size, onpaxglobalheader) + oncontinue() + return + } + if (header.type === 'pax-header') { + self._parse(header.size, onpaxheader) + oncontinue() + return + } + + if (self._gnuLongPath) { + header.name = self._gnuLongPath + self._gnuLongPath = null + } + + if (self._gnuLongLinkPath) { + header.linkname = self._gnuLongLinkPath + self._gnuLongLinkPath = null + } + + if (self._pax) { + self._header = header = mixinPax(header, self._pax) + self._pax = null + } + + self._locked = true + + if (!header.size || header.type === 'directory') { + self._parse(512, onheader) + self.emit('entry', header, emptyStream(self, offset), onunlock) + return + } + + self._stream = new Source(self, offset) + + self.emit('entry', header, self._stream, onunlock) + self._parse(header.size, onstreamend) + oncontinue() + } + + this._onheader = onheader + this._parse(512, onheader) +} + +util.inherits(Extract, Writable) + +Extract.prototype.destroy = function (err) { + if (this._destroyed) return + this._destroyed = true + + if (err) this.emit('error', err) + this.emit('close') + if (this._stream) this._stream.emit('close') +} + +Extract.prototype._parse = function (size, onparse) { + if (this._destroyed) return + this._offset += size + this._missing = size + if (onparse === this._onheader) this._partial = false + this._onparse = onparse +} + +Extract.prototype._continue = function () { + if (this._destroyed) return + var cb = this._cb + this._cb = noop + if (this._overflow) this._write(this._overflow, undefined, cb) + else cb() +} + +Extract.prototype._write = function (data, enc, cb) { + if (this._destroyed) return + + var s = this._stream + var b = this._buffer + var missing = this._missing + if (data.length) this._partial = true + + // we do not reach end-of-chunk now. just forward it + + if (data.length < missing) { + this._missing -= data.length + this._overflow = null + if (s) return s.write(data, cb) + b.append(data) + return cb() + } + + // end-of-chunk. the parser should call cb. + + this._cb = cb + this._missing = 0 + + var overflow = null + if (data.length > missing) { + overflow = data.slice(missing) + data = data.slice(0, missing) + } + + if (s) s.end(data) + else b.append(data) + + this._overflow = overflow + this._onparse() +} + +Extract.prototype._final = function (cb) { + if (this._partial) return this.destroy(new Error('Unexpected end of data')) + cb() +} + +module.exports = Extract diff --git a/node_modules/tar-stream/headers.js b/node_modules/tar-stream/headers.js new file mode 100644 index 0000000..6efbc4a --- /dev/null +++ b/node_modules/tar-stream/headers.js @@ -0,0 +1,283 @@ +var toBuffer = require('to-buffer') +var alloc = require('buffer-alloc') + +var ZEROS = '0000000000000000000' +var SEVENS = '7777777777777777777' +var ZERO_OFFSET = '0'.charCodeAt(0) +var USTAR = 'ustar\x0000' +var MASK = parseInt('7777', 8) + +var clamp = function (index, len, defaultValue) { + if (typeof index !== 'number') return defaultValue + index = ~~index // Coerce to integer. + if (index >= len) return len + if (index >= 0) return index + index += len + if (index >= 0) return index + return 0 +} + +var toType = function (flag) { + switch (flag) { + case 0: + return 'file' + case 1: + return 'link' + case 2: + return 'symlink' + case 3: + return 'character-device' + case 4: + return 'block-device' + case 5: + return 'directory' + case 6: + return 'fifo' + case 7: + return 'contiguous-file' + case 72: + return 'pax-header' + case 55: + return 'pax-global-header' + case 27: + return 'gnu-long-link-path' + case 28: + case 30: + return 'gnu-long-path' + } + + return null +} + +var toTypeflag = function (flag) { + switch (flag) { + case 'file': + return 0 + case 'link': + return 1 + case 'symlink': + return 2 + case 'character-device': + return 3 + case 'block-device': + return 4 + case 'directory': + return 5 + case 'fifo': + return 6 + case 'contiguous-file': + return 7 + case 'pax-header': + return 72 + } + + return 0 +} + +var indexOf = function (block, num, offset, end) { + for (; offset < end; offset++) { + if (block[offset] === num) return offset + } + return end +} + +var cksum = function (block) { + var sum = 8 * 32 + for (var i = 0; i < 148; i++) sum += block[i] + for (var j = 156; j < 512; j++) sum += block[j] + return sum +} + +var encodeOct = function (val, n) { + val = val.toString(8) + if (val.length > n) return SEVENS.slice(0, n) + ' ' + else return ZEROS.slice(0, n - val.length) + val + ' ' +} + +/* Copied from the node-tar repo and modified to meet + * tar-stream coding standard. + * + * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349 + */ +function parse256 (buf) { + // first byte MUST be either 80 or FF + // 80 for positive, FF for 2's comp + var positive + if (buf[0] === 0x80) positive = true + else if (buf[0] === 0xFF) positive = false + else return null + + // build up a base-256 tuple from the least sig to the highest + var zero = false + var tuple = [] + for (var i = buf.length - 1; i > 0; i--) { + var byte = buf[i] + if (positive) tuple.push(byte) + else if (zero && byte === 0) tuple.push(0) + else if (zero) { + zero = false + tuple.push(0x100 - byte) + } else tuple.push(0xFF - byte) + } + + var sum = 0 + var l = tuple.length + for (i = 0; i < l; i++) { + sum += tuple[i] * Math.pow(256, i) + } + + return positive ? sum : -1 * sum +} + +var decodeOct = function (val, offset, length) { + val = val.slice(offset, offset + length) + offset = 0 + + // If prefixed with 0x80 then parse as a base-256 integer + if (val[offset] & 0x80) { + return parse256(val) + } else { + // Older versions of tar can prefix with spaces + while (offset < val.length && val[offset] === 32) offset++ + var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length) + while (offset < end && val[offset] === 0) offset++ + if (end === offset) return 0 + return parseInt(val.slice(offset, end).toString(), 8) + } +} + +var decodeStr = function (val, offset, length, encoding) { + return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding) +} + +var addLength = function (str) { + var len = Buffer.byteLength(str) + var digits = Math.floor(Math.log(len) / Math.log(10)) + 1 + if (len + digits >= Math.pow(10, digits)) digits++ + + return (len + digits) + str +} + +exports.decodeLongPath = function (buf, encoding) { + return decodeStr(buf, 0, buf.length, encoding) +} + +exports.encodePax = function (opts) { // TODO: encode more stuff in pax + var result = '' + if (opts.name) result += addLength(' path=' + opts.name + '\n') + if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n') + var pax = opts.pax + if (pax) { + for (var key in pax) { + result += addLength(' ' + key + '=' + pax[key] + '\n') + } + } + return toBuffer(result) +} + +exports.decodePax = function (buf) { + var result = {} + + while (buf.length) { + var i = 0 + while (i < buf.length && buf[i] !== 32) i++ + var len = parseInt(buf.slice(0, i).toString(), 10) + if (!len) return result + + var b = buf.slice(i + 1, len - 1).toString() + var keyIndex = b.indexOf('=') + if (keyIndex === -1) return result + result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1) + + buf = buf.slice(len) + } + + return result +} + +exports.encode = function (opts) { + var buf = alloc(512) + var name = opts.name + var prefix = '' + + if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/' + if (Buffer.byteLength(name) !== name.length) return null // utf-8 + + while (Buffer.byteLength(name) > 100) { + var i = name.indexOf('/') + if (i === -1) return null + prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i) + name = name.slice(i + 1) + } + + if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null + if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null + + buf.write(name) + buf.write(encodeOct(opts.mode & MASK, 6), 100) + buf.write(encodeOct(opts.uid, 6), 108) + buf.write(encodeOct(opts.gid, 6), 116) + buf.write(encodeOct(opts.size, 11), 124) + buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136) + + buf[156] = ZERO_OFFSET + toTypeflag(opts.type) + + if (opts.linkname) buf.write(opts.linkname, 157) + + buf.write(USTAR, 257) + if (opts.uname) buf.write(opts.uname, 265) + if (opts.gname) buf.write(opts.gname, 297) + buf.write(encodeOct(opts.devmajor || 0, 6), 329) + buf.write(encodeOct(opts.devminor || 0, 6), 337) + + if (prefix) buf.write(prefix, 345) + + buf.write(encodeOct(cksum(buf), 6), 148) + + return buf +} + +exports.decode = function (buf, filenameEncoding) { + var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET + + var name = decodeStr(buf, 0, 100, filenameEncoding) + var mode = decodeOct(buf, 100, 8) + var uid = decodeOct(buf, 108, 8) + var gid = decodeOct(buf, 116, 8) + var size = decodeOct(buf, 124, 12) + var mtime = decodeOct(buf, 136, 12) + var type = toType(typeflag) + var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding) + var uname = decodeStr(buf, 265, 32) + var gname = decodeStr(buf, 297, 32) + var devmajor = decodeOct(buf, 329, 8) + var devminor = decodeOct(buf, 337, 8) + + if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name + + // to support old tar versions that use trailing / to indicate dirs + if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5 + + var c = cksum(buf) + + // checksum is still initial value if header was null. + if (c === 8 * 32) return null + + // valid checksum + if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?') + + return { + name: name, + mode: mode, + uid: uid, + gid: gid, + size: size, + mtime: new Date(1000 * mtime), + type: type, + linkname: linkname, + uname: uname, + gname: gname, + devmajor: devmajor, + devminor: devminor + } +} diff --git a/node_modules/tar-stream/index.js b/node_modules/tar-stream/index.js new file mode 100644 index 0000000..6481704 --- /dev/null +++ b/node_modules/tar-stream/index.js @@ -0,0 +1,2 @@ +exports.extract = require('./extract') +exports.pack = require('./pack') diff --git a/node_modules/tar-stream/pack.js b/node_modules/tar-stream/pack.js new file mode 100644 index 0000000..72d96a0 --- /dev/null +++ b/node_modules/tar-stream/pack.js @@ -0,0 +1,255 @@ +var constants = require('fs-constants') +var eos = require('end-of-stream') +var util = require('util') +var alloc = require('buffer-alloc') +var toBuffer = require('to-buffer') + +var Readable = require('readable-stream').Readable +var Writable = require('readable-stream').Writable +var StringDecoder = require('string_decoder').StringDecoder + +var headers = require('./headers') + +var DMODE = parseInt('755', 8) +var FMODE = parseInt('644', 8) + +var END_OF_TAR = alloc(1024) + +var noop = function () {} + +var overflow = function (self, size) { + size &= 511 + if (size) self.push(END_OF_TAR.slice(0, 512 - size)) +} + +function modeToType (mode) { + switch (mode & constants.S_IFMT) { + case constants.S_IFBLK: return 'block-device' + case constants.S_IFCHR: return 'character-device' + case constants.S_IFDIR: return 'directory' + case constants.S_IFIFO: return 'fifo' + case constants.S_IFLNK: return 'symlink' + } + + return 'file' +} + +var Sink = function (to) { + Writable.call(this) + this.written = 0 + this._to = to + this._destroyed = false +} + +util.inherits(Sink, Writable) + +Sink.prototype._write = function (data, enc, cb) { + this.written += data.length + if (this._to.push(data)) return cb() + this._to._drain = cb +} + +Sink.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var LinkSink = function () { + Writable.call(this) + this.linkname = '' + this._decoder = new StringDecoder('utf-8') + this._destroyed = false +} + +util.inherits(LinkSink, Writable) + +LinkSink.prototype._write = function (data, enc, cb) { + this.linkname += this._decoder.write(data) + cb() +} + +LinkSink.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var Void = function () { + Writable.call(this) + this._destroyed = false +} + +util.inherits(Void, Writable) + +Void.prototype._write = function (data, enc, cb) { + cb(new Error('No body allowed for this entry')) +} + +Void.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var Pack = function (opts) { + if (!(this instanceof Pack)) return new Pack(opts) + Readable.call(this, opts) + + this._drain = noop + this._finalized = false + this._finalizing = false + this._destroyed = false + this._stream = null +} + +util.inherits(Pack, Readable) + +Pack.prototype.entry = function (header, buffer, callback) { + if (this._stream) throw new Error('already piping an entry') + if (this._finalized || this._destroyed) return + + if (typeof buffer === 'function') { + callback = buffer + buffer = null + } + + if (!callback) callback = noop + + var self = this + + if (!header.size || header.type === 'symlink') header.size = 0 + if (!header.type) header.type = modeToType(header.mode) + if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE + if (!header.uid) header.uid = 0 + if (!header.gid) header.gid = 0 + if (!header.mtime) header.mtime = new Date() + + if (typeof buffer === 'string') buffer = toBuffer(buffer) + if (Buffer.isBuffer(buffer)) { + header.size = buffer.length + this._encode(header) + this.push(buffer) + overflow(self, header.size) + process.nextTick(callback) + return new Void() + } + + if (header.type === 'symlink' && !header.linkname) { + var linkSink = new LinkSink() + eos(linkSink, function (err) { + if (err) { // stream was closed + self.destroy() + return callback(err) + } + + header.linkname = linkSink.linkname + self._encode(header) + callback() + }) + + return linkSink + } + + this._encode(header) + + if (header.type !== 'file' && header.type !== 'contiguous-file') { + process.nextTick(callback) + return new Void() + } + + var sink = new Sink(this) + + this._stream = sink + + eos(sink, function (err) { + self._stream = null + + if (err) { // stream was closed + self.destroy() + return callback(err) + } + + if (sink.written !== header.size) { // corrupting tar + self.destroy() + return callback(new Error('size mismatch')) + } + + overflow(self, header.size) + if (self._finalizing) self.finalize() + callback() + }) + + return sink +} + +Pack.prototype.finalize = function () { + if (this._stream) { + this._finalizing = true + return + } + + if (this._finalized) return + this._finalized = true + this.push(END_OF_TAR) + this.push(null) +} + +Pack.prototype.destroy = function (err) { + if (this._destroyed) return + this._destroyed = true + + if (err) this.emit('error', err) + this.emit('close') + if (this._stream && this._stream.destroy) this._stream.destroy() +} + +Pack.prototype._encode = function (header) { + if (!header.pax) { + var buf = headers.encode(header) + if (buf) { + this.push(buf) + return + } + } + this._encodePax(header) +} + +Pack.prototype._encodePax = function (header) { + var paxHeader = headers.encodePax({ + name: header.name, + linkname: header.linkname, + pax: header.pax + }) + + var newHeader = { + name: 'PaxHeader', + mode: header.mode, + uid: header.uid, + gid: header.gid, + size: paxHeader.length, + mtime: header.mtime, + type: 'pax-header', + linkname: header.linkname && 'PaxHeader', + uname: header.uname, + gname: header.gname, + devmajor: header.devmajor, + devminor: header.devminor + } + + this.push(headers.encode(newHeader)) + this.push(paxHeader) + overflow(this, paxHeader.length) + + newHeader.size = header.size + newHeader.type = header.type + this.push(headers.encode(newHeader)) +} + +Pack.prototype._read = function (n) { + var drain = this._drain + this._drain = noop + drain() +} + +module.exports = Pack diff --git a/node_modules/tar-stream/package.json b/node_modules/tar-stream/package.json new file mode 100644 index 0000000..e15eb27 --- /dev/null +++ b/node_modules/tar-stream/package.json @@ -0,0 +1,60 @@ +{ + "name": "tar-stream", + "version": "1.6.2", + "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.", + "author": "Mathias Buus ", + "engines": { + "node": ">= 0.8.0" + }, + "dependencies": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + }, + "devDependencies": { + "concat-stream": "^1.6.2", + "standard": "^11.0.1", + "tape": "^4.9.0" + }, + "scripts": { + "test": "standard && tape test/extract.js test/pack.js", + "test-all": "standard && tape test/*.js" + }, + "keywords": [ + "tar", + "tarball", + "parse", + "parser", + "generate", + "generator", + "stream", + "stream2", + "streams", + "streams2", + "streaming", + "pack", + "extract", + "modify" + ], + "bugs": { + "url": "https://github.com/mafintosh/tar-stream/issues" + }, + "homepage": "https://github.com/mafintosh/tar-stream", + "main": "index.js", + "files": [ + "*.js", + "LICENSE" + ], + "directories": { + "test": "test" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/tar-stream.git" + } +} diff --git a/node_modules/temp-dir/index.d.ts b/node_modules/temp-dir/index.d.ts new file mode 100644 index 0000000..fea8417 --- /dev/null +++ b/node_modules/temp-dir/index.d.ts @@ -0,0 +1,22 @@ +/** +Get the real path of the system temp directory. + +@example +``` +import temporaryDirectory from 'temp-dir'; + +console.log(temporaryDirectory); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T' +``` + +@example +``` +import os from 'node:os'; + +console.log(os.tmpdir()); +//=> '/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T' // <= Symlink +``` +*/ +declare const temporaryDirectory: string; + +export default temporaryDirectory; diff --git a/node_modules/temp-dir/index.js b/node_modules/temp-dir/index.js new file mode 100644 index 0000000..45040dc --- /dev/null +++ b/node_modules/temp-dir/index.js @@ -0,0 +1,6 @@ +import {promises as fs} from 'node:fs'; +import os from 'node:os'; + +const temporaryDirectory = await fs.realpath(os.tmpdir()); + +export default temporaryDirectory; diff --git a/node_modules/temp-dir/license b/node_modules/temp-dir/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/temp-dir/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/temp-dir/package.json b/node_modules/temp-dir/package.json new file mode 100644 index 0000000..9481053 --- /dev/null +++ b/node_modules/temp-dir/package.json @@ -0,0 +1,49 @@ +{ + "name": "temp-dir", + "version": "3.0.0", + "description": "Get the real path of the system temp directory", + "license": "MIT", + "repository": "sindresorhus/temp-dir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "types": "./index.d.ts", + "engines": { + "node": ">=14.16" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "temp", + "tmpdir", + "os", + "system", + "real", + "path", + "realpath", + "resolved", + "temporary", + "directory", + "folder" + ], + "devDependencies": { + "ava": "^4.3.3", + "quibble": "^0.6.14", + "tsd": "^0.24.1", + "xo": "^0.52.3" + }, + "ava": { + "nodeArguments": [ + "--loader=quibble" + ] + } +} diff --git a/node_modules/temp-dir/readme.md b/node_modules/temp-dir/readme.md new file mode 100644 index 0000000..d0d7a1b --- /dev/null +++ b/node_modules/temp-dir/readme.md @@ -0,0 +1,27 @@ +# temp-dir + +> Get the real path of the system temp directory + +[The `os.tmpdir()` built-in doesn't return the real path.](https://github.com/nodejs/node/issues/11422) That can cause problems when the returned path is a symlink, which is the case on macOS. Use this module to get the resolved path. + +## Install + +```sh +npm install temp-dir +``` + +## Usage + +```js +import temporaryDirectory from 'temp-dir'; + +console.log(temporaryDirectory); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T' +``` + +```js +import os from 'node:os'; + +console.log(os.tmpdir()); +//=> '/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T' // <= Symlink +``` diff --git a/node_modules/tempy/index.d.ts b/node_modules/tempy/index.d.ts new file mode 100644 index 0000000..f66efbd --- /dev/null +++ b/node_modules/tempy/index.d.ts @@ -0,0 +1,156 @@ +import {type Buffer} from 'node:buffer'; +import {type MergeExclusive, type TypedArray} from 'type-fest'; + +export type FileOptions = MergeExclusive< +{ + /** + File extension. + + Mutually exclusive with the `name` option. + + _You usually won't need this option. Specify it only when actually needed._ + */ + readonly extension?: string; +}, +{ + /** + Filename. + + Mutually exclusive with the `extension` option. + + _You usually won't need this option. Specify it only when actually needed._ + */ + readonly name?: string; +} +>; + +export type DirectoryOptions = { + /** + Directory prefix. + + _You usually won't need this option. Specify it only when actually needed._ + + Useful for testing by making it easier to identify cache directories that are created. + */ + readonly prefix?: string; +}; + +/** +The temporary path created by the function. Can be asynchronous. +*/ +export type TaskCallback = (temporaryPath: string) => Promise | ReturnValueType; + +/** +Get a temporary file path you can write to. + +@example +``` +import {temporaryFile, temporaryDirectory} from 'tempy'; + +temporaryFile(); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/4f504b9edb5ba0e89451617bf9f971dd' + +temporaryFile({extension: 'png'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/a9fb0decd08179eb6cf4691568aa2018.png' + +temporaryFile({name: 'unicorn.png'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/f7f62bfd4e2a05f1589947647ed3f9ec/unicorn.png' + +temporaryDirectory(); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' +``` +*/ +export function temporaryFile(options?: FileOptions): string; + +/** +The `callback` resolves with a temporary file path you can write to. The file is automatically cleaned up after the callback is executed. + +@returns A promise that resolves after the callback is executed and the file is cleaned up. + +@example +``` +import {temporaryFileTask} from 'tempy'; + +await temporaryFileTask(tempFile => { + console.log(tempFile); + //=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/4f504b9edb5ba0e89451617bf9f971dd' +}); +``` +*/ +export function temporaryFileTask(callback: TaskCallback, options?: FileOptions): Promise ; + +/** +Get a temporary directory path. The directory is created for you. + +@example +``` +import {temporaryDirectory} from 'tempy'; + +temporaryDirectory(); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' + +temporaryDirectory({prefix: 'a'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/name_3c085674ad31223b9653c88f725d6b41' +``` +*/ +export function temporaryDirectory(options?: DirectoryOptions): string; + +/** +The `callback` resolves with a temporary directory path you can write to. The directory is automatically cleaned up after the callback is executed. + +@returns A promise that resolves after the callback is executed and the directory is cleaned up. + +@example +``` +import {temporaryDirectoryTask} from 'tempy'; + +await temporaryDirectoryTask(tempDirectory => { + //=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' +}) +``` +*/ +export function temporaryDirectoryTask(callback: TaskCallback, options?: DirectoryOptions): Promise; + +/** +Write data to a random temp file. + +@example +``` +import {temporaryWrite} from 'tempy'; + +await temporaryWrite('🦄'); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' +``` +*/ +export function temporaryWrite(fileContent: string | Buffer | TypedArray | DataView | NodeJS.ReadableStream, options?: FileOptions): Promise; + +/** +Write data to a random temp file. The file is automatically cleaned up after the callback is executed. + +@returns A promise that resolves after the callback is executed and the file is cleaned up. + +@example +``` +import {temporaryWriteTask} from 'tempy'; + +await temporaryWriteTask('🦄', tempFile => { + //=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/4f504b9edb5ba0e89451617bf9f971dd' +}); +``` +*/ +export function temporaryWriteTask(fileContent: string | Buffer | TypedArray | DataView | NodeJS.ReadableStream, callback: TaskCallback, options?: FileOptions): Promise; + +/** +Synchronously write data to a random temp file. + +@example +``` +import {temporaryWriteSync} from 'tempy'; + +temporaryWriteSync('🦄'); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' +``` +*/ +export function temporaryWriteSync(fileContent: string | Buffer | TypedArray | DataView, options?: FileOptions): string; + +export {default as rootTemporaryDirectory} from 'temp-dir'; diff --git a/node_modules/tempy/index.js b/node_modules/tempy/index.js new file mode 100644 index 0000000..b259b15 --- /dev/null +++ b/node_modules/tempy/index.js @@ -0,0 +1,61 @@ +import fs from 'node:fs'; +import fsPromises from 'node:fs/promises'; +import path from 'node:path'; +import stream from 'node:stream'; +import {promisify} from 'node:util'; +import uniqueString from 'unique-string'; +import tempDir from 'temp-dir'; +import {isStream} from 'is-stream'; + +const pipeline = promisify(stream.pipeline); // TODO: Use `node:stream/promises` when targeting Node.js 16. + +const getPath = (prefix = '') => path.join(tempDir, prefix + uniqueString()); + +const writeStream = async (filePath, data) => pipeline(data, fs.createWriteStream(filePath)); + +async function runTask(temporaryPath, callback) { + try { + return await callback(temporaryPath); + } finally { + await fsPromises.rm(temporaryPath, {recursive: true, force: true, maxRetries: 2}); + } +} + +export function temporaryFile({name, extension} = {}) { + if (name) { + if (extension !== undefined && extension !== null) { + throw new Error('The `name` and `extension` options are mutually exclusive'); + } + + return path.join(temporaryDirectory(), name); + } + + return getPath() + (extension === undefined || extension === null ? '' : '.' + extension.replace(/^\./, '')); +} + +export const temporaryFileTask = async (callback, options) => runTask(temporaryFile(options), callback); + +export function temporaryDirectory({prefix = ''} = {}) { + const directory = getPath(prefix); + fs.mkdirSync(directory); + return directory; +} + +export const temporaryDirectoryTask = async (callback, options) => runTask(temporaryDirectory(options), callback); + +export async function temporaryWrite(fileContent, options) { + const filename = temporaryFile(options); + const write = isStream(fileContent) ? writeStream : fsPromises.writeFile; + await write(filename, fileContent); + return filename; +} + +export const temporaryWriteTask = async (fileContent, callback, options) => runTask(await temporaryWrite(fileContent, options), callback); + +export function temporaryWriteSync(fileContent, options) { + const filename = temporaryFile(options); + fs.writeFileSync(filename, fileContent); + return filename; +} + +export {default as rootTemporaryDirectory} from 'temp-dir'; diff --git a/node_modules/tempy/license b/node_modules/tempy/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/tempy/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/tempy/node_modules/is-stream/index.d.ts b/node_modules/tempy/node_modules/is-stream/index.d.ts new file mode 100644 index 0000000..df994e0 --- /dev/null +++ b/node_modules/tempy/node_modules/is-stream/index.d.ts @@ -0,0 +1,81 @@ +import { + Stream, + Writable as WritableStream, + Readable as ReadableStream, + Duplex as DuplexStream, + Transform as TransformStream, +} from 'node:stream'; + +/** +@returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +@example +``` +import fs from 'node:fs'; +import {isStream} from 'is-stream'; + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` +*/ +export function isStream(stream: unknown): stream is Stream; + +/** +@returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +@example +``` +import fs from 'node:fs'; +import {isWritableStream} from 'is-stream'; + +isWritableStream(fs.createWriteStrem('unicorn.txt')); +//=> true +``` +*/ +export function isWritableStream(stream: unknown): stream is WritableStream; + +/** +@returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +@example +``` +import fs from 'node:fs'; +import {isReadableStream} from 'is-stream'; + +isReadableStream(fs.createReadStream('unicorn.png')); +//=> true +``` +*/ +export function isReadableStream(stream: unknown): stream is ReadableStream; + +/** +@returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +@example +``` +import {Duplex as DuplexStream} from 'node:stream'; +import {isDuplexStream} from 'is-stream'; + +isDuplexStream(new DuplexStream()); +//=> true +``` +*/ +export function isDuplexStream(stream: unknown): stream is DuplexStream; + +/** +@returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +@example +``` +import fs from 'node:fs'; +import StringifyStream from 'streaming-json-stringify'; +import {isTransformStream} from 'is-stream'; + +isTransformStream(StringifyStream()); +//=> true +``` +*/ +export function isTransformStream(stream: unknown): stream is TransformStream; diff --git a/node_modules/tempy/node_modules/is-stream/index.js b/node_modules/tempy/node_modules/is-stream/index.js new file mode 100644 index 0000000..887e601 --- /dev/null +++ b/node_modules/tempy/node_modules/is-stream/index.js @@ -0,0 +1,29 @@ +export function isStream(stream) { + return stream !== null + && typeof stream === 'object' + && typeof stream.pipe === 'function'; +} + +export function isWritableStream(stream) { + return isStream(stream) + && stream.writable !== false + && typeof stream._write === 'function' + && typeof stream._writableState === 'object'; +} + +export function isReadableStream(stream) { + return isStream(stream) + && stream.readable !== false + && typeof stream._read === 'function' + && typeof stream._readableState === 'object'; +} + +export function isDuplexStream(stream) { + return isWritableStream(stream) + && isReadableStream(stream); +} + +export function isTransformStream(stream) { + return isDuplexStream(stream) + && typeof stream._transform === 'function'; +} diff --git a/node_modules/tempy/node_modules/is-stream/license b/node_modules/tempy/node_modules/is-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/tempy/node_modules/is-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/tempy/node_modules/is-stream/package.json b/node_modules/tempy/node_modules/is-stream/package.json new file mode 100644 index 0000000..47d5761 --- /dev/null +++ b/node_modules/tempy/node_modules/is-stream/package.json @@ -0,0 +1,44 @@ +{ + "name": "is-stream", + "version": "3.0.0", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "@types/node": "^16.4.13", + "ava": "^3.15.0", + "tempy": "^1.0.1", + "tsd": "^0.17.0", + "xo": "^0.44.0" + } +} diff --git a/node_modules/tempy/node_modules/is-stream/readme.md b/node_modules/tempy/node_modules/is-stream/readme.md new file mode 100644 index 0000000..c6f8c1b --- /dev/null +++ b/node_modules/tempy/node_modules/is-stream/readme.md @@ -0,0 +1,60 @@ +# is-stream + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + +## Install + +``` +$ npm install is-stream +``` + +## Usage + +```js +import fs from 'node:fs'; +import {isStream} from 'is-stream'; + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + +## API + +### isStream(stream) + +Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +#### isWritableStream(stream) + +Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +#### isReadableStream(stream) + +Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +#### isDuplexStream(stream) + +Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +#### isTransformStream(stream) + +Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +## Related + +- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/tempy/package.json b/node_modules/tempy/package.json new file mode 100644 index 0000000..32ad877 --- /dev/null +++ b/node_modules/tempy/package.json @@ -0,0 +1,58 @@ +{ + "name": "tempy", + "version": "3.1.0", + "description": "Get a random temporary file or directory path", + "license": "MIT", + "repository": "sindresorhus/tempy", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=14.16" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "temp", + "temporary", + "path", + "file", + "directory", + "folder", + "tempfile", + "tempdir", + "tmpdir", + "tmpfile", + "random", + "unique" + ], + "dependencies": { + "is-stream": "^3.0.0", + "temp-dir": "^3.0.0", + "type-fest": "^2.12.2", + "unique-string": "^3.0.0" + }, + "devDependencies": { + "@types/node": "^20.4.1", + "ava": "^5.3.1", + "path-exists": "^5.0.0", + "touch": "^3.1.0", + "tsd": "^0.28.1", + "xo": "^0.54.2" + }, + "xo": { + "rules": { + "@typescript-eslint/no-redundant-type-constituents": "off" + } + } +} diff --git a/node_modules/tempy/readme.md b/node_modules/tempy/readme.md new file mode 100644 index 0000000..2e46d46 --- /dev/null +++ b/node_modules/tempy/readme.md @@ -0,0 +1,134 @@ +# tempy + +> Get a random temporary file or directory path + +## Install + +```sh +npm install tempy +``` + +## Usage + +```js +import {temporaryFile, temporaryDirectory} from 'tempy'; + +temporaryFile(); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/4f504b9edb5ba0e89451617bf9f971dd' + +temporaryFile({extension: 'png'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/a9fb0decd08179eb6cf4691568aa2018.png' + +temporaryFile({name: 'unicorn.png'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/f7f62bfd4e2a05f1589947647ed3f9ec/unicorn.png' + +temporaryDirectory(); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/2f3d094aec2cb1b93bb0f4cffce5ebd6' + +temporaryDirectory({prefix: 'name'}); +//=> '/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T/name_3c085674ad31223b9653c88f725d6b41' +``` + +## API + +### temporaryFile(options?) + +Get a temporary file path you can write to. + +### temporaryFileTask(callback, options?) + +The `callback` resolves with a temporary file path you can write to. The file is automatically cleaned up after the callback is executed. Returns a promise that resolves with the return value of the callback after it is executed and the file is cleaned up. + +#### callback + +Type: `(tempPath: string) => void` + +A callback that is executed with the temp file path. Can be asynchronous. + +#### options + +Type: `object` + +*You usually won't need either the `extension` or `name` option. Specify them only when actually needed.* + +##### extension + +Type: `string` + +File extension. + +##### name + +Type: `string` + +Filename. Mutually exclusive with the `extension` option. + +### temporaryDirectory(options?) + +Get a temporary directory path. The directory is created for you. + +### temporaryDirectoryTask(callback, options?) + +The `callback` resolves with a temporary directory path you can write to. The directory is automatically cleaned up after the callback is executed. Returns a promise that resolves with the return value of the callback after it is executed and the directory is cleaned up. + +##### callback + +Type: `(tempPath: string) => void` + +A callback that is executed with the temp directory path. Can be asynchronous. + +#### options + +Type: `Object` + +##### prefix + +Type: `string` + +Directory prefix. + +Useful for testing by making it easier to identify cache directories that are created. + +*You usually won't need this option. Specify it only when actually needed.* + +### temporaryWrite(fileContent, options?) + +Write data to a random temp file. + +### temporaryWriteTask(fileContent, callback, options?) + +Write data to a random temp file. The file is automatically cleaned up after the callback is executed. Returns a promise that resolves with the return value of the callback after it is executed and the file is cleaned up. + +##### fileContent + +Type: `string | Buffer | TypedArray | DataView | stream.Readable` + +Data to write to the temp file. + +##### callback + +Type: `(tempPath: string) => void` + +A callback that is executed with the temp file path. Can be asynchronous. + +##### options + +See [options](#options). + +### temporaryWriteSync(fileContent, options?) + +Synchronously write data to a random temp file. + +##### fileContent + +Type: `string | Buffer | TypedArray | DataView` + +Data to write to the temp file. + +##### options + +See [options](#options). + +### rootTemporaryDirectory + +Get the root temporary directory path. For example: `/private/var/folders/3x/jf5977fn79jbglr7rk0tq4d00000gn/T` diff --git a/node_modules/thenby/LICENSE.TXT b/node_modules/thenby/LICENSE.TXT new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/node_modules/thenby/LICENSE.TXT @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/thenby/README.md b/node_modules/thenby/README.md new file mode 100644 index 0000000..7b7aac0 --- /dev/null +++ b/node_modules/thenby/README.md @@ -0,0 +1,159 @@ + + + + +- [thenBy.js usage](#thenbyjs-usage) + - [Sort by property names](#sort-by-property-names) + - [Sort by unary functions](#sort-by-unary-functions) + - [Extra options](#extra-options) + - [Sort descending](#sort-descending) + - [Case insensitive sorting](#case-insensitive-sorting) + - [Custom compare function](#custom-compare-function) + - [Internationalization: Using javascripts native `Intl.Collator`](#internationalization-using-javascripts-native-intlcollator) + - [A word on performance](#a-word-on-performance) + - [Installing](#installing) + - [Install in your HTML](#install-in-your-html) + - [Install using npm or yarn](#install-using-npm-or-yarn) + + + +# thenBy.js usage + + [![NPM Version][npm-image]][npm-url] + [![NPM Downloads][downloads-image]][downloads-url] + +`thenBy` is a javascript micro library that helps sorting arrays on multiple keys. It allows you to use the [native Array::sort() method](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort) of javascript, but pass in multiple functions to sort that are composed with `firstBy().thenBy().thenBy()` style. + +Example: +```javascript +// first by length of name, then by population, then by ID +data.sort( + firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }) + .thenBy(function (v1, v2) { return v1.population - v2.population; }) + .thenBy(function (v1, v2) { return v1.id - v2.id; }) +); +``` +`thenBy` also offers some nice shortcuts that make the most common ways of sorting even easier and more readable. + +### Sort by property names +Javascript sorting relies heavily on passing discriminator functions that return -1, 0 or 1 for a pair of items. While this is very flexible, often you want to sort on the value of a simple property. As a convenience, thenBy.js builds the appropriate compare function for you if you pass in a property name (instead of a function). The example above would then look like this: +```javascript +// first by length of name, then by population, then by ID +data.sort( + firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }) + .thenBy("population") + .thenBy("id") +); +``` + +If an element doesn't have the property defined, it will sort like the empty string (""). Typically, this will be at the top. + +### Sort by unary functions +You can also pass a function that takes a single item and returns its sorting key. This turns the above expression into: +```javascript +// first by length of name, then by population, then by ID +data.sort( + firstBy(function (v) { return v.name.length; }) + .thenBy("population") + .thenBy("id") +); +``` + +Note that javascript contains a number of standard functions that can be passed in here as well. The Number() function will make your sorting sort on numeric values instead of lexical values: +```javascript +var values = ["2", "20", "03", "-2", "0", 200, "2"]; +var sorted = values.sort(firstBy(Number)); +``` +## Extra options +### Sort descending +thenBy.js allows you to pass in a second parameter for `direction`. If you pass in 'desc' or -1, the sorting will be reversed. So: +```javascript +// first by length of name descending, then by population descending, then by ID ascending +data.sort( + firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }, -1) + .thenBy("population", "desc") + .thenBy("id") +); +``` + +### Case insensitive sorting +(as of v1.2.0) All of the shortcut methods allow you to sort case insensitive as well. The second parameter expects an options object (if it is a number, it is interpreted as `direction` as above). The ignoreCase property can be set to true, like this: +```javascript +// first by name, case insensitive, then by population +data.sort( + firstBy("name", {ignoreCase:true}) + .thenBy("population") +); +``` +If you want to use both descending and ignoreCase, you have to use the options syntax for direction as well: +```javascript +// sort by name, case insensitive and descending +data.sort(firstBy("name", {ignoreCase:true, direction:"desc"})); +``` +### Custom compare function +If you have more specific wishes for the exact sort order, but still want to use the convenience of unary functions or sorting on property names, you can pass in you own compare function in the options. Here we use a compare function that known about the relative values of playing cards:: + +```javascript +const cards = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K', 'A']; +var cardCompare = (c1, c2) =>{ + return cards.indexOf(c1) - cards.indexOf(c2); +} +var handOfCards = [ + { id: 7, suit:"c", card:"A" }, + { id: 8, suit:"d", card:"10" }, + // etc + ]; +handOfCards.sort(firstBy("card", {cmp: cardCompare, direction: "desc"})); + +``` +You can use the `cmp` function together with `direction`, but not with `ignoreCase` (for obvious reasons). + +### Internationalization: Using javascripts native `Intl.Collator` +One of the more interesting custom compare functions you may want to pass in is the native `compare` function that is exposed by `Intl.Collator`. This compare function knows about the different sorting rules in different cultures. Many browsers have these implemented, but in NodeJS, the API is implemented, but only for the English culture. You would use it with thenBy like this: + +```javascript +// in German, ä sorts with a +var germanCompare = new Intl.Collator('de').compare; +// in Swedish, ä sorts after z +var swedishCompare = new Intl.Collator('sv').compare; +data.sort( + firstBy("name", {cmp: swedishCompare}) +); +``` +Check the [details on using Intl.Collator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Collator). + +## A word on performance +thenBy constructs a comparer function for you. It does this by combining the functions you pass in with a number of small utility functions that perform tasks like "reverting", "combining the current sort order with the previous one", etc. Also, these operations try to work correctly, no matter what content is in the sorted array. There are two steps here that cost time: constructing the über-function and running it. The construction time should always be negligible. The run time however can be slower than when you carefully handcraft the compare function. Still, *normally you shouldn't worry about this*, but if you're sorting very large sets, it could matter. For example, there is some overhead in making several small functions call each other instead of creating one piece of code. Also, if you know your data well, and know that a specific field is *alwways present* and is *always a number*, you could code a significantly faster compare function then thenBy's results. The unit tests contain an extreme example. + +If you use thenBy to combine multiple compare functions into one (where each function expects two parameters), the difference is small. Using unary functions adds some overhead, using direction:desc adds some, using only a property name adds a little, but will check for missing values, which could be optimized. Ignoring case will slow down, but not more so than when handcoded. + +## Installing +### Install in your HTML +To include it into your page/project, just paste the minified code from https://raw.github.com/Teun/thenBy.js/master/thenBy.min.js into yours (699 characters). If you don't want the `firstBy` function in your global namespace, you can assign it to a local variable (see sample.htm). + +### Install using npm or yarn +```npm install thenby``` + +or + +```yarn add thenby``` + +then in your app: + +```var firstBy = require('thenby');``` + +or in TypeScript/ES6: + +```import {firstBy} from "thenby";``` + +For a small demo of how TypeScript support looks in a good editor (i.e. VS Code), [check this short video](https://youtu.be/mKJovFLyxro). + + +Thanks a lot to [bergus](https://github.com/bergus), [hagabaka](https://github.com/hagabaka), [infolyzer](https://github.com/infolyzer) and [Foxhoundn](https://github.com/Foxhoundn) for their improvements. +Thanks to [jsgoupil](https://github.com/jsgoupil) and [HonoluluHenk](https://github.com/HonoluluHenk) for their help on the TypeScript declaration. + + +[npm-image]: https://img.shields.io/npm/v/thenby.svg +[npm-url]: https://npmjs.org/package/thenby +[downloads-image]: https://img.shields.io/npm/dm/thenby.svg +[downloads-url]: https://npmjs.org/package/thenby diff --git a/node_modules/thenby/package.json b/node_modules/thenby/package.json new file mode 100644 index 0000000..4efb912 --- /dev/null +++ b/node_modules/thenby/package.json @@ -0,0 +1,45 @@ +{ + "name": "thenby", + "version": "1.3.4", + "description": "Micro library for sorting arrays using the firstBy().thenBy().thenBy() syntax", + "main": "thenBy.module.js", + "types": "thenBy.module.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/Teun/thenBy.js.git" + }, + "keywords": [ + "sort", + "order", + "sorting", + "arrays", + "multiple" + ], + "files": [ + "thenBy.module.js", + "thenBy.module.d.ts", + "thenBy.min.js" + ], + "scripts": { + "test": "gulp", + "build": "gulp build" + }, + "author": "Teun Duynstee", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/Teun/thenBy.js/issues" + }, + "homepage": "https://github.com/Teun/thenBy.js", + "devDependencies": { + "chai": "^3.5.0", + "gulp": "^4.0.2", + "gulp-insert": "^0.5.0", + "gulp-mocha": "^7.0.2", + "gulp-rename": "^1.2.2", + "gulp-replace": "^0.5.4", + "gulp-uglify": "^2.1.2", + "gulp-umd": "^2.0.0", + "performance-now": "^0.2.0" + }, + "dependencies": {} +} diff --git a/node_modules/thenby/thenBy.min.js b/node_modules/thenby/thenBy.min.js new file mode 100644 index 0000000..ba9ef0b --- /dev/null +++ b/node_modules/thenby/thenBy.min.js @@ -0,0 +1,2 @@ +/*** Copyright 2020 Teun Duynstee Licensed under the Apache License, Version 2.0 ***/ +!function(n,t){"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?module.exports=t():n.firstBy=t()}(this,function(){return function(){function n(n){return n}function t(n){return"string"==typeof n?n.toLowerCase():n}function e(e,r){if(r="object"==typeof r?r:{direction:r},"function"!=typeof e){var i=e;e=function(n){return n[i]?n[i]:""}}if(1===e.length){var o=e,f=r.ignoreCase?t:n,u=r.cmp||function(n,t){return nt?1:0};e=function(n,t){return u(f(o(n)),f(o(t)))}}const c={"-1":"",desc:""};return r.direction in c?function(n,t){return-e(n,t)}:e}function r(n,t){var i="function"==typeof this&&!this.firstBy&&this,o=e(n,t),f=i?function(n,t){return i(n,t)||o(n,t)}:o;return f.thenBy=r,f}return r.firstBy=r,r}()}); \ No newline at end of file diff --git a/node_modules/thenby/thenBy.module.d.ts b/node_modules/thenby/thenBy.module.d.ts new file mode 100644 index 0000000..becb6d5 --- /dev/null +++ b/node_modules/thenby/thenBy.module.d.ts @@ -0,0 +1,51 @@ +// Type definitions for thenBy +// Definitions by: Teun Duynstee (with significant help from @HonoluluHenk) +type SortOrder = "asc" | "desc" | -1 | 1; +declare class opt { + direction?:SortOrder; + ignoreCase?:boolean; +} +declare class typedOpt extends opt { + cmp?: (a:T, b:T)=> number; +} +interface IThenBy { + (v1: T, v2: T) : number; + /** + * Full format to compare two elements and determine which sorts first. + * @param compare function that receives two values from the sorted array and returns a number indicating which comes first: < 0: first comes first, 0: doesn't matter, > 0: second comes first. + * @param direction can be used to reverse the sorting by passing -1 + **/ + thenBy(compare: ((v1: T, v2: T) => number), direction?: SortOrder | opt): IThenBy; + /** + * Shorthand for selecting a value to sort on from the sorted element. + * @param select function that receives a value from the sorted array and selects the thing to sort on + * @param direction reverse by passing -1. opt for other options + **/ + thenBy(select: ((v: T) => U), direction?: SortOrder | typedOpt): IThenBy; + /** + * Shorthand for sorting on a simple property. + * @param byPropertyName is the name of the property to sort on as a string + * @param direction reverse by passing -1. opt for other options + **/ + thenBy(byPropertyName: (keyof T), direction?: SortOrder | typedOpt): IThenBy; +} +declare module "thenby" { + /** + * Full format to compare two elements and determine which sorts first. + * @param compare function that receives two values from the sorted array and returns a number indicating which comes first: < 0: first comes first, 0: doesn't matter, > 0: second comes first. + * @param direction can be used to reverse the sorting by passing -1 + **/ + export function firstBy(compare: ((v1: T, v2: T) => number), direction?: SortOrder | opt): IThenBy; + /** + * Shorthand for selecting a value to sort on from the sorted element. + * @param select function that receives a value from the sorted array and selects the thing to sort on + * @param direction reverse by passing -1. opt for other options + **/ + export function firstBy(select: ((v: T) => U), direction?: SortOrder | typedOpt): IThenBy; + /** + * Shorthand for sorting on a simple property. + * @param byPropertyName is the name of the property to sort on as a string + * @param direction reverse by passing -1. opt for other options + **/ + export function firstBy(byPropertyName: (keyof T), direction?: SortOrder | typedOpt): IThenBy; + } diff --git a/node_modules/thenby/thenBy.module.js b/node_modules/thenby/thenBy.module.js new file mode 100644 index 0000000..bd1dccb --- /dev/null +++ b/node_modules/thenby/thenBy.module.js @@ -0,0 +1,60 @@ +/*** + Copyright 2013 Teun Duynstee + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +module.exports = (function() { + + function identity(v){return v;} + + function ignoreCase(v){return typeof(v)==="string" ? v.toLowerCase() : v;} + + function makeCompareFunction(f, opt){ + opt = typeof(opt)==="object" ? opt : {direction:opt}; + + if(typeof(f)!="function"){ + var prop = f; + // make unary function + f = function(v1){return !!v1[prop] ? v1[prop] : "";} + } + if(f.length === 1) { + // f is a unary function mapping a single item to its sort score + var uf = f; + var preprocess = opt.ignoreCase?ignoreCase:identity; + var cmp = opt.cmp || function(v1,v2) {return v1 < v2 ? -1 : v1 > v2 ? 1 : 0;} + f = function(v1,v2) {return cmp(preprocess(uf(v1)), preprocess(uf(v2)));} + } + const descTokens = {"-1":'', desc:''}; + if(opt.direction in descTokens) return function(v1,v2){return -f(v1,v2)}; + return f; + } + + /* adds a secondary compare function to the target function (`this` context) + which is applied in case the first one returns 0 (equal) + returns a new compare function, which has a `thenBy` method as well */ + function tb(func, opt) { + /* should get value false for the first call. This can be done by calling the + exported function, or the firstBy property on it (for es6 module compatibility) + */ + var x = (typeof(this) == "function" && !this.firstBy) ? this : false; + var y = makeCompareFunction(func, opt); + var f = x ? function(a, b) { + return x(a,b) || y(a,b); + } + : y; + f.thenBy = tb; + return f; + } + tb.firstBy = tb; + return tb; +})(); diff --git a/node_modules/through/.travis.yml b/node_modules/through/.travis.yml new file mode 100644 index 0000000..c693a93 --- /dev/null +++ b/node_modules/through/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - 0.6 + - 0.8 + - "0.10" diff --git a/node_modules/through/LICENSE.APACHE2 b/node_modules/through/LICENSE.APACHE2 new file mode 100644 index 0000000..6366c04 --- /dev/null +++ b/node_modules/through/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/through/LICENSE.MIT b/node_modules/through/LICENSE.MIT new file mode 100644 index 0000000..6eafbd7 --- /dev/null +++ b/node_modules/through/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/through/index.js b/node_modules/through/index.js new file mode 100644 index 0000000..ca5fc59 --- /dev/null +++ b/node_modules/through/index.js @@ -0,0 +1,108 @@ +var Stream = require('stream') + +// through +// +// a stream that does nothing but re-emit the input. +// useful for aggregating a series of changing but not ending streams into one stream) + +exports = module.exports = through +through.through = through + +//create a readable writable stream. + +function through (write, end, opts) { + write = write || function (data) { this.queue(data) } + end = end || function () { this.queue(null) } + + var ended = false, destroyed = false, buffer = [], _ended = false + var stream = new Stream() + stream.readable = stream.writable = true + stream.paused = false + +// stream.autoPause = !(opts && opts.autoPause === false) + stream.autoDestroy = !(opts && opts.autoDestroy === false) + + stream.write = function (data) { + write.call(this, data) + return !stream.paused + } + + function drain() { + while(buffer.length && !stream.paused) { + var data = buffer.shift() + if(null === data) + return stream.emit('end') + else + stream.emit('data', data) + } + } + + stream.queue = stream.push = function (data) { +// console.error(ended) + if(_ended) return stream + if(data === null) _ended = true + buffer.push(data) + drain() + return stream + } + + //this will be registered as the first 'end' listener + //must call destroy next tick, to make sure we're after any + //stream piped from here. + //this is only a problem if end is not emitted synchronously. + //a nicer way to do this is to make sure this is the last listener for 'end' + + stream.on('end', function () { + stream.readable = false + if(!stream.writable && stream.autoDestroy) + process.nextTick(function () { + stream.destroy() + }) + }) + + function _end () { + stream.writable = false + end.call(stream) + if(!stream.readable && stream.autoDestroy) + stream.destroy() + } + + stream.end = function (data) { + if(ended) return + ended = true + if(arguments.length) stream.write(data) + _end() // will emit or queue + return stream + } + + stream.destroy = function () { + if(destroyed) return + destroyed = true + ended = true + buffer.length = 0 + stream.writable = stream.readable = false + stream.emit('close') + return stream + } + + stream.pause = function () { + if(stream.paused) return + stream.paused = true + return stream + } + + stream.resume = function () { + if(stream.paused) { + stream.paused = false + stream.emit('resume') + } + drain() + //may have become paused again, + //as drain emits 'data'. + if(!stream.paused) + stream.emit('drain') + return stream + } + return stream +} + diff --git a/node_modules/through/package.json b/node_modules/through/package.json new file mode 100644 index 0000000..9862189 --- /dev/null +++ b/node_modules/through/package.json @@ -0,0 +1,36 @@ +{ + "name": "through", + "version": "2.3.8", + "description": "simplified stream construction", + "main": "index.js", + "scripts": { + "test": "set -e; for t in test/*.js; do node $t; done" + }, + "devDependencies": { + "stream-spec": "~0.3.5", + "tape": "~2.3.2", + "from": "~0.1.3" + }, + "keywords": [ + "stream", + "streams", + "user-streams", + "pipe" + ], + "author": "Dominic Tarr (dominictarr.com)", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/dominictarr/through.git" + }, + "homepage": "https://github.com/dominictarr/through", + "testling": { + "browsers": [ + "ie/8..latest", + "ff/15..latest", + "chrome/20..latest", + "safari/5.1..latest" + ], + "files": "test/*.js" + } +} diff --git a/node_modules/through/readme.markdown b/node_modules/through/readme.markdown new file mode 100644 index 0000000..cb34c81 --- /dev/null +++ b/node_modules/through/readme.markdown @@ -0,0 +1,64 @@ +#through + +[![build status](https://secure.travis-ci.org/dominictarr/through.png)](http://travis-ci.org/dominictarr/through) +[![testling badge](https://ci.testling.com/dominictarr/through.png)](https://ci.testling.com/dominictarr/through) + +Easy way to create a `Stream` that is both `readable` and `writable`. + +* Pass in optional `write` and `end` methods. +* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`. +* Use `this.pause()` and `this.resume()` to manage flow. +* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`). + +This function is the basis for most of the synchronous streams in +[event-stream](http://github.com/dominictarr/event-stream). + +``` js +var through = require('through') + +through(function write(data) { + this.queue(data) //data *must* not be null + }, + function end () { //optional + this.queue(null) + }) +``` + +Or, can also be used _without_ buffering on pause, use `this.emit('data', data)`, +and this.emit('end') + +``` js +var through = require('through') + +through(function write(data) { + this.emit('data', data) + //this.pause() + }, + function end () { //optional + this.emit('end') + }) +``` + +## Extended Options + +You will probably not need these 99% of the time. + +### autoDestroy=false + +By default, `through` emits close when the writable +and readable side of the stream has ended. +If that is not desired, set `autoDestroy=false`. + +``` js +var through = require('through') + +//like this +var ts = through(write, end, {autoDestroy: false}) +//or like this +var ts = through(write, end) +ts.autoDestroy = false +``` + +## License + +MIT / Apache2 diff --git a/node_modules/through/test/async.js b/node_modules/through/test/async.js new file mode 100644 index 0000000..46bdbae --- /dev/null +++ b/node_modules/through/test/async.js @@ -0,0 +1,28 @@ +var from = require('from') +var through = require('../') + +var tape = require('tape') + +tape('simple async example', function (t) { + + var n = 0, expected = [1,2,3,4,5], actual = [] + from(expected) + .pipe(through(function(data) { + this.pause() + n ++ + setTimeout(function(){ + console.log('pushing data', data) + this.push(data) + this.resume() + }.bind(this), 300) + })).pipe(through(function(data) { + console.log('pushing data second time', data); + this.push(data) + })).on('data', function (d) { + actual.push(d) + }).on('end', function() { + t.deepEqual(actual, expected) + t.end() + }) + +}) diff --git a/node_modules/through/test/auto-destroy.js b/node_modules/through/test/auto-destroy.js new file mode 100644 index 0000000..9a8fd00 --- /dev/null +++ b/node_modules/through/test/auto-destroy.js @@ -0,0 +1,30 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + ts.autoDestroy = false + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.notOk(closed) + ts.destroy() + assert.ok(closed) + assert.end() +}) + diff --git a/node_modules/through/test/buffering.js b/node_modules/through/test/buffering.js new file mode 100644 index 0000000..b0084bf --- /dev/null +++ b/node_modules/through/test/buffering.js @@ -0,0 +1,71 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('buffering', function(assert) { + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + assert.deepEqual(actual, [1, 2, 3]) + ts.pause() + ts.write(4) + ts.write(5) + ts.write(6) + assert.deepEqual(actual, [1, 2, 3]) + ts.resume() + assert.deepEqual(actual, [1, 2, 3, 4, 5, 6]) + ts.pause() + ts.end() + assert.ok(!ended) + ts.resume() + assert.ok(ended) + assert.end() +}) + +test('buffering has data in queue, when ends', function (assert) { + + /* + * If stream ends while paused with data in the queue, + * stream should still emit end after all data is written + * on resume. + */ + + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.pause() + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.deepEqual(actual, [], 'no data written yet, still paused') + assert.ok(!ended, 'end not emitted yet, still paused') + ts.resume() + assert.deepEqual(actual, [1, 2, 3], 'resumed, all data should be delivered') + assert.ok(ended, 'end should be emitted once all data was delivered') + assert.end(); +}) diff --git a/node_modules/through/test/end.js b/node_modules/through/test/end.js new file mode 100644 index 0000000..fa113f5 --- /dev/null +++ b/node_modules/through/test/end.js @@ -0,0 +1,45 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.ok(closed) + assert.end() +}) + +test('end only once', function (t) { + + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + t.equal(ended, false) + ended = true + }) + + ts.queue(null) + ts.queue(null) + ts.queue(null) + + ts.resume() + + t.end() +}) diff --git a/node_modules/through/test/index.js b/node_modules/through/test/index.js new file mode 100644 index 0000000..96da82f --- /dev/null +++ b/node_modules/through/test/index.js @@ -0,0 +1,133 @@ + +var test = require('tape') +var spec = require('stream-spec') +var through = require('../') + +/* + I'm using these two functions, and not streams and pipe + so there is less to break. if this test fails it must be + the implementation of _through_ +*/ + +function write(array, stream) { + array = array.slice() + function next() { + while(array.length) + if(stream.write(array.shift()) === false) + return stream.once('drain', next) + + stream.end() + } + + next() +} + +function read(stream, callback) { + var actual = [] + stream.on('data', function (data) { + actual.push(data) + }) + stream.once('end', function () { + callback(null, actual) + }) + stream.once('error', function (err) { + callback(err) + }) +} + +test('simple defaults', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through() + var s = spec(t).through().pausable() + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}); + +test('simple functions', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through(function (data) { + this.emit('data', data*2) + }) + var s = spec(t).through().pausable() + + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected.map(function (data) { + return data*2 + })) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}) + +test('pauses', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l) //Math.random()) + + var t = through() + + var s = spec(t) + .through() + .pausable() + + t.on('data', function () { + if(Math.random() > 0.1) return + t.pause() + process.nextTick(function () { + t.resume() + }) + }) + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + }) + + t.on('close', function () { + s.validate() + assert.end() + }) + + write(expected, t) +}) + +test('does not soft-end on `undefined`', function(assert) { + var stream = through() + , count = 0 + + stream.on('data', function (data) { + count++ + }) + + stream.write(undefined) + stream.write(undefined) + + assert.equal(count, 2) + + assert.end() +}) diff --git a/node_modules/to-buffer/.travis.yml b/node_modules/to-buffer/.travis.yml new file mode 100644 index 0000000..ac46872 --- /dev/null +++ b/node_modules/to-buffer/.travis.yml @@ -0,0 +1,9 @@ +sudo: false + +language: node_js + +node_js: + - "5" + - "4" + - "0.12" + - "0.10" diff --git a/node_modules/to-buffer/LICENSE b/node_modules/to-buffer/LICENSE new file mode 100644 index 0000000..bae9da7 --- /dev/null +++ b/node_modules/to-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-buffer/README.md b/node_modules/to-buffer/README.md new file mode 100644 index 0000000..fe334a4 --- /dev/null +++ b/node_modules/to-buffer/README.md @@ -0,0 +1,23 @@ +# to-buffer + +Pass in a string, get a buffer back. Pass in a buffer, get the same buffer back. + +``` +npm install to-buffer +``` + +[![build status](https://travis-ci.org/mafintosh/to-buffer.svg?branch=master)](https://travis-ci.org/mafintosh/to-buffer) + +## Usage + +``` js +var toBuffer = require('to-buffer') +console.log(toBuffer('hi')) // +console.log(toBuffer(Buffer('hi'))) // +console.log(toBuffer('6869', 'hex')) // +console.log(toBuffer(43)) // throws +``` + +## License + +MIT diff --git a/node_modules/to-buffer/index.js b/node_modules/to-buffer/index.js new file mode 100644 index 0000000..9bd4978 --- /dev/null +++ b/node_modules/to-buffer/index.js @@ -0,0 +1,14 @@ +module.exports = toBuffer + +var makeBuffer = Buffer.from && Buffer.from !== Uint8Array.from ? Buffer.from : bufferFrom + +function bufferFrom (buf, enc) { + return new Buffer(buf, enc) +} + +function toBuffer (buf, enc) { + if (Buffer.isBuffer(buf)) return buf + if (typeof buf === 'string') return makeBuffer(buf, enc) + if (Array.isArray(buf)) return makeBuffer(buf) + throw new Error('Input should be a buffer or a string') +} diff --git a/node_modules/to-buffer/package.json b/node_modules/to-buffer/package.json new file mode 100644 index 0000000..fdc9c61 --- /dev/null +++ b/node_modules/to-buffer/package.json @@ -0,0 +1,24 @@ +{ + "name": "to-buffer", + "version": "1.1.1", + "description": "Pass in a string, get a buffer back. Pass in a buffer, get the same buffer back", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "standard": "^6.0.5", + "tape": "^4.4.0" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/to-buffer.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/to-buffer/issues" + }, + "homepage": "https://github.com/mafintosh/to-buffer" +} diff --git a/node_modules/to-buffer/test.js b/node_modules/to-buffer/test.js new file mode 100644 index 0000000..f5e97d6 --- /dev/null +++ b/node_modules/to-buffer/test.js @@ -0,0 +1,26 @@ +var tape = require('tape') +var toBuffer = require('./') + +tape('buffer returns buffer', function (t) { + t.same(toBuffer(Buffer('hi')), Buffer('hi')) + t.end() +}) + +tape('string returns buffer', function (t) { + t.same(toBuffer('hi'), Buffer('hi')) + t.end() +}) + +tape('string + enc returns buffer', function (t) { + t.same(toBuffer('6869', 'hex'), Buffer('hi')) + t.end() +}) + +tape('other input throws', function (t) { + try { + toBuffer(42) + } catch (err) { + t.same(err.message, 'Input should be a buffer or a string') + t.end() + } +}) diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE new file mode 100644 index 0000000..7cccaf9 --- /dev/null +++ b/node_modules/to-regex-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md new file mode 100644 index 0000000..38887da --- /dev/null +++ b/node_modules/to-regex-range/README.md @@ -0,0 +1,305 @@ +# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) + +> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex-range +``` + +
+What does this do? + +
+ +This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. + +**Example** + +```js +const toRegexRange = require('to-regex-range'); +const regex = new RegExp(toRegexRange('15', '95')); +``` + +A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). + +
+ +
+ +
+Why use this library? + +
+ +### Convenience + +Creating regular expressions for matching numbers gets deceptively complicated pretty fast. + +For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: + +* regex for matching `1` => `/1/` (easy enough) +* regex for matching `1` through `5` => `/[1-5]/` (not bad...) +* regex for matching `1` or `5` => `/(1|5)/` (still easy...) +* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) +* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) +* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) +* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) + +The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. + +**Learn more** + +If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. + +### Heavily tested + +As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. + +Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. + +### Optimized + +Generated regular expressions are optimized: + +* duplicate sequences and character classes are reduced using quantifiers +* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative +* uses fragment caching to avoid processing the same exact string more than once + +
+ +
+ +## Usage + +Add this library to your javascript application with the following line of code + +```js +const toRegexRange = require('to-regex-range'); +``` + +The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). + +```js +const source = toRegexRange('15', '95'); +//=> 1[5-9]|[2-8][0-9]|9[0-5] + +const regex = new RegExp(`^${source}$`); +console.log(regex.test('14')); //=> false +console.log(regex.test('50')); //=> true +console.log(regex.test('94')); //=> true +console.log(regex.test('96')); //=> false +``` + +## Options + +### options.capture + +**Type**: `boolean` + +**Deafault**: `undefined` + +Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. + +```js +console.log(toRegexRange('-10', '10')); +//=> -[1-9]|-?10|[0-9] + +console.log(toRegexRange('-10', '10', { capture: true })); +//=> (-[1-9]|-?10|[0-9]) +``` + +### options.shorthand + +**Type**: `boolean` + +**Deafault**: `undefined` + +Use the regex shorthand for `[0-9]`: + +```js +console.log(toRegexRange('0', '999999')); +//=> [0-9]|[1-9][0-9]{1,5} + +console.log(toRegexRange('0', '999999', { shorthand: true })); +//=> \d|[1-9]\d{1,5} +``` + +### options.relaxZeros + +**Type**: `boolean` + +**Default**: `true` + +This option relaxes matching for leading zeros when when ranges are zero-padded. + +```js +const source = toRegexRange('-0010', '0010'); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> true +console.log(regex.test('-010')); //=> true +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> true +console.log(regex.test('010')); //=> true +console.log(regex.test('0010')); //=> true +``` + +When `relaxZeros` is false, matching is strict: + +```js +const source = toRegexRange('-0010', '0010', { relaxZeros: false }); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> false +console.log(regex.test('-010')); //=> false +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> false +console.log(regex.test('010')); //=> false +console.log(regex.test('0010')); //=> true +``` + +## Examples + +| **Range** | **Result** | **Compile time** | +| --- | --- | --- | +| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | +| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | +| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | +| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | +| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | +| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | +| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | +| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | +| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | +| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | +| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | +| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | +| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | +| `toRegexRange(5, 5)` | `5` | _8μs_ | +| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | +| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | +| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | +| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | +| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | +| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | +| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | +| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | +| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | +| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | + +## Heads up! + +**Order of arguments** + +When the `min` is larger than the `max`, values will be flipped to create a valid range: + +```js +toRegexRange('51', '29'); +``` + +Is effectively flipped to: + +```js +toRegexRange('29', '51'); +//=> 29|[3-4][0-9]|5[0-1] +``` + +**Steps / increments** + +This library does not support steps (increments). A pr to add support would be welcome. + +## History + +### v2.0.0 - 2017-04-21 + +**New features** + +Adds support for zero-padding! + +### v1.0.0 + +**Optimizations** + +Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. + +## Attribution + +Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") +* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 63 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [doowb](https://github.com/doowb) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js new file mode 100644 index 0000000..77fbace --- /dev/null +++ b/node_modules/to-regex-range/index.js @@ -0,0 +1,288 @@ +/*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +const isNumber = require('is-number'); + +const toRegexRange = (min, max, options) => { + if (isNumber(min) === false) { + throw new TypeError('toRegexRange: expected the first argument to be a number'); + } + + if (max === void 0 || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new TypeError('toRegexRange: expected the second argument to be a number.'); + } + + let opts = { relaxZeros: true, ...options }; + if (typeof opts.strictZeros === 'boolean') { + opts.relaxZeros = opts.strictZeros === false; + } + + let relax = String(opts.relaxZeros); + let shorthand = String(opts.shorthand); + let capture = String(opts.capture); + let wrap = String(opts.wrap); + let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; + + if (toRegexRange.cache.hasOwnProperty(cacheKey)) { + return toRegexRange.cache[cacheKey].result; + } + + let a = Math.min(min, max); + let b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + let result = min + '|' + max; + if (opts.capture) { + return `(${result})`; + } + if (opts.wrap === false) { + return result; + } + return `(?:${result})`; + } + + let isPadded = hasPadding(min) || hasPadding(max); + let state = { min, max, a, b }; + let positives = []; + let negatives = []; + + if (isPadded) { + state.isPadded = isPadded; + state.maxLen = String(state.max).length; + } + + if (a < 0) { + let newMin = b < 0 ? Math.abs(b) : 1; + negatives = splitToPatterns(newMin, Math.abs(a), state, opts); + a = state.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, state, opts); + } + + state.negatives = negatives; + state.positives = positives; + state.result = collatePatterns(negatives, positives, opts); + + if (opts.capture === true) { + state.result = `(${state.result})`; + } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { + state.result = `(?:${state.result})`; + } + + toRegexRange.cache[cacheKey] = state; + return state.result; +}; + +function collatePatterns(neg, pos, options) { + let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + let intersected = filterPatterns(neg, pos, '-?', true, options) || []; + let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + let nines = 1; + let zeros = 1; + + let stop = countNines(min, nines); + let stops = new Set([max]); + + while (min <= stop && stop <= max) { + stops.add(stop); + nines += 1; + stop = countNines(min, nines); + } + + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops.add(stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops = [...stops]; + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return { pattern: start, count: [], digits: 0 }; + } + + let zipped = zip(start, stop); + let digits = zipped.length; + let pattern = ''; + let count = 0; + + for (let i = 0; i < digits; i++) { + let [startDigit, stopDigit] = zipped[i]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit, options); + + } else { + count++; + } + } + + if (count) { + pattern += options.shorthand === true ? '\\d' : '[0-9]'; + } + + return { pattern, count: [count], digits }; +} + +function splitToPatterns(min, max, tok, options) { + let ranges = splitToRanges(min, max); + let tokens = []; + let start = min; + let prev; + + for (let i = 0; i < ranges.length; i++) { + let max = ranges[i]; + let obj = rangeToPattern(String(start), String(max), options); + let zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.count.length > 1) { + prev.count.pop(); + } + + prev.count.push(obj.count[0]); + prev.string = prev.pattern + toQuantifier(prev.count); + start = max + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(max, tok, options); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.count); + tokens.push(obj); + start = max + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + let result = []; + + for (let ele of arr) { + let { string } = ele; + + // only push if _both_ are negative... + if (!intersection && !contains(comparison, 'string', string)) { + result.push(prefix + string); + } + + // or _both_ are positive + if (intersection && contains(comparison, 'string', string)) { + result.push(prefix + string); + } + } + return result; +} + +/** + * Zip strings + */ + +function zip(a, b) { + let arr = []; + for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function contains(arr, key, val) { + return arr.some(ele => ele[key] === val); +} + +function countNines(min, len) { + return Number(String(min).slice(0, -len) + '9'.repeat(len)); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + let [start = 0, stop = ''] = digits; + if (stop || start > 1) { + return `{${start + (stop ? ',' + stop : '')}}`; + } + return ''; +} + +function toCharacterClass(a, b, options) { + return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +} + +function hasPadding(str) { + return /^-?(0+)\d/.test(str); +} + +function padZeros(value, tok, options) { + if (!tok.isPadded) { + return value; + } + + let diff = Math.abs(tok.maxLen - String(value).length); + let relax = options.relaxZeros !== false; + + switch (diff) { + case 0: + return ''; + case 1: + return relax ? '0?' : '0'; + case 2: + return relax ? '0{0,2}' : '00'; + default: { + return relax ? `0{0,${diff}}` : `0{${diff}}`; + } + } +} + +/** + * Cache + */ + +toRegexRange.cache = {}; +toRegexRange.clearCache = () => (toRegexRange.cache = {}); + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json new file mode 100644 index 0000000..4ef194f --- /dev/null +++ b/node_modules/to-regex-range/package.json @@ -0,0 +1,88 @@ +{ + "name": "to-regex-range", + "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", + "version": "5.0.1", + "homepage": "https://github.com/micromatch/to-regex-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "micromatch/to-regex-range", + "bugs": { + "url": "https://github.com/micromatch/to-regex-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^7.0.0" + }, + "devDependencies": { + "fill-range": "^6.0.0", + "gulp-format-md": "^2.0.0", + "mocha": "^6.0.2", + "text-table": "^0.2.0", + "time-diff": "^0.3.1" + }, + "keywords": [ + "bash", + "date", + "expand", + "expansion", + "expression", + "glob", + "match", + "match date", + "match number", + "match numbers", + "match year", + "matches", + "matching", + "number", + "numbers", + "numerical", + "range", + "ranges", + "regex", + "regexp", + "regular", + "regular expression", + "sequence" + ], + "verb": { + "layout": "default", + "toc": false, + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "helpers": { + "examples": { + "displayName": "examples" + } + }, + "related": { + "list": [ + "expand-range", + "fill-range", + "micromatch", + "repeat-element", + "repeat-string" + ] + } + } +} diff --git a/node_modules/type-fest/index.d.ts b/node_modules/type-fest/index.d.ts new file mode 100644 index 0000000..ee20662 --- /dev/null +++ b/node_modules/type-fest/index.d.ts @@ -0,0 +1,95 @@ +// Basic +export * from './source/primitive'; +export * from './source/typed-array'; +export * from './source/basic'; +export * from './source/observable-like'; + +// Utilities +export {Except} from './source/except'; +export {Mutable} from './source/mutable'; +export {Writable} from './source/writable'; +export {Merge} from './source/merge'; +export {MergeExclusive} from './source/merge-exclusive'; +export {RequireAtLeastOne} from './source/require-at-least-one'; +export {RequireExactlyOne} from './source/require-exactly-one'; +export {RequireAllOrNone} from './source/require-all-or-none'; +export {RemoveIndexSignature} from './source/remove-index-signature'; +export {PartialDeep, PartialDeepOptions} from './source/partial-deep'; +export {PartialOnUndefinedDeep, PartialOnUndefinedDeepOptions} from './source/partial-on-undefined-deep'; +export {ReadonlyDeep} from './source/readonly-deep'; +export {LiteralUnion} from './source/literal-union'; +export {Promisable} from './source/promisable'; +export {Opaque, UnwrapOpaque} from './source/opaque'; +export {InvariantOf} from './source/invariant-of'; +export {SetOptional} from './source/set-optional'; +export {SetRequired} from './source/set-required'; +export {SetNonNullable} from './source/set-non-nullable'; +export {ValueOf} from './source/value-of'; +export {PromiseValue} from './source/promise-value'; +export {AsyncReturnType} from './source/async-return-type'; +export {ConditionalExcept} from './source/conditional-except'; +export {ConditionalKeys} from './source/conditional-keys'; +export {ConditionalPick} from './source/conditional-pick'; +export {UnionToIntersection} from './source/union-to-intersection'; +export {Stringified} from './source/stringified'; +export {FixedLengthArray} from './source/fixed-length-array'; +export {MultidimensionalArray} from './source/multidimensional-array'; +export {MultidimensionalReadonlyArray} from './source/multidimensional-readonly-array'; +export {IterableElement} from './source/iterable-element'; +export {Entry} from './source/entry'; +export {Entries} from './source/entries'; +export {SetReturnType} from './source/set-return-type'; +export {Asyncify} from './source/asyncify'; +export {Simplify, SimplifyOptions} from './source/simplify'; +export {Jsonify} from './source/jsonify'; +export {Schema} from './source/schema'; +export {LiteralToPrimitive} from './source/literal-to-primitive'; +export { + PositiveInfinity, + NegativeInfinity, + Finite, + Integer, + Float, + NegativeFloat, + Negative, + NonNegative, + NegativeInteger, + NonNegativeInteger, +} from './source/numeric'; +export {StringKeyOf} from './source/string-key-of'; +export {Exact} from './source/exact'; +export {ReadonlyTuple} from './source/readonly-tuple'; +export {OptionalKeysOf} from './source/optional-keys-of'; +export {HasOptionalKeys} from './source/has-optional-keys'; +export {RequiredKeysOf} from './source/required-keys-of'; +export {HasRequiredKeys} from './source/has-required-keys'; +export {Spread} from './source/spread'; + +// Template literal types +export {CamelCase} from './source/camel-case'; +export {CamelCasedProperties} from './source/camel-cased-properties'; +export {CamelCasedPropertiesDeep} from './source/camel-cased-properties-deep'; +export {KebabCase} from './source/kebab-case'; +export {KebabCasedProperties} from './source/kebab-cased-properties'; +export {KebabCasedPropertiesDeep} from './source/kebab-cased-properties-deep'; +export {PascalCase} from './source/pascal-case'; +export {PascalCasedProperties} from './source/pascal-cased-properties'; +export {PascalCasedPropertiesDeep} from './source/pascal-cased-properties-deep'; +export {SnakeCase} from './source/snake-case'; +export {SnakeCasedProperties} from './source/snake-cased-properties'; +export {SnakeCasedPropertiesDeep} from './source/snake-cased-properties-deep'; +export {ScreamingSnakeCase} from './source/screaming-snake-case'; +export {DelimiterCase} from './source/delimiter-case'; +export {DelimiterCasedProperties} from './source/delimiter-cased-properties'; +export {DelimiterCasedPropertiesDeep} from './source/delimiter-cased-properties-deep'; +export {Join} from './source/join'; +export {Split} from './source/split'; +export {Trim} from './source/trim'; +export {Replace} from './source/replace'; +export {Includes} from './source/includes'; +export {Get} from './source/get'; +export {LastArrayElement} from './source/last-array-element'; + +// Miscellaneous +export {PackageJson} from './source/package-json'; +export {TsConfigJson} from './source/tsconfig-json'; diff --git a/node_modules/type-fest/package.json b/node_modules/type-fest/package.json new file mode 100644 index 0000000..319558a --- /dev/null +++ b/node_modules/type-fest/package.json @@ -0,0 +1,52 @@ +{ + "name": "type-fest", + "version": "2.19.0", + "description": "A collection of essential TypeScript types", + "license": "(MIT OR CC0-1.0)", + "repository": "sindresorhus/type-fest", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=12.20" + }, + "scripts": { + "test": "xo && tsd && tsc && node script/test/source-files-extension.js" + }, + "files": [ + "index.d.ts", + "source" + ], + "keywords": [ + "typescript", + "ts", + "types", + "utility", + "util", + "utilities", + "omit", + "merge", + "json" + ], + "devDependencies": { + "@sindresorhus/tsconfig": "~0.7.0", + "expect-type": "^0.13.0", + "tsd": "^0.20.0", + "typescript": "^4.6.3", + "xo": "^0.43.0" + }, + "types": "./index.d.ts", + "xo": { + "rules": { + "@typescript-eslint/ban-types": "off", + "@typescript-eslint/indent": "off", + "node/no-unsupported-features/es-builtins": "off", + "import/extensions": "off", + "@typescript-eslint/no-redeclare": "off", + "@typescript-eslint/no-confusing-void-expression": "off" + } + } +} diff --git a/node_modules/type-fest/readme.md b/node_modules/type-fest/readme.md new file mode 100644 index 0000000..0d310aa --- /dev/null +++ b/node_modules/type-fest/readme.md @@ -0,0 +1,905 @@ +
+
+
+ type-fest +
+
+ A collection of essential TypeScript types +
+
+
+
+
+ +
+
+
+
+
+ +[![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://giphy.com/gifs/illustration-rainbow-unicorn-26AHG5KGFxSkUWw1i) +[![npm dependents](https://badgen.net/npm/dependents/type-fest)](https://www.npmjs.com/package/type-fest?activeTab=dependents) +[![npm downloads](https://badgen.net/npm/dt/type-fest)](https://www.npmjs.com/package/type-fest) +[![Docs](https://paka.dev/badges/v0/cute.svg)](https://paka.dev/npm/type-fest) + +Many of the types here should have been built-in. You can help by suggesting some of them to the [TypeScript project](https://github.com/Microsoft/TypeScript/blob/main/CONTRIBUTING.md). + +Either add this package as a dependency or copy-paste the needed types. No credit required. 👌 + +PR welcome for additional commonly needed types and docs improvements. Read the [contributing guidelines](.github/contributing.md) first. + +**Help wanted with reviewing [proposals](https://github.com/sindresorhus/type-fest/issues) and [pull requests](https://github.com/sindresorhus/type-fest/pulls).** + +## Install + +```sh +npm install type-fest +``` + +*Requires TypeScript >=4.2* + +## Usage + +```ts +import type {Except} from 'type-fest'; + +type Foo = { + unicorn: string; + rainbow: boolean; +}; + +type FooWithoutRainbow = Except; +//=> {unicorn: string} +``` + +## API + +Click the type names for complete docs. + +### Basic + +- [`Primitive`](source/primitive.d.ts) - Matches any [primitive value](https://developer.mozilla.org/en-US/docs/Glossary/Primitive). +- [`Class`](source/basic.d.ts) - Matches a [`class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +- [`Constructor`](source/basic.d.ts) - Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +- [`TypedArray`](source/typed-array.d.ts) - Matches any [typed array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), like `Uint8Array` or `Float64Array`. +- [`ObservableLike`](source/observable-like.d.ts) - Matches a value that is like an [Observable](https://github.com/tc39/proposal-observable). + +### Utilities + +- [`Except`](source/except.d.ts) - Create a type from an object type without certain keys. This is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/utility-types.html#omittype-keys). +- [`Writable`](source/writable.d.ts) - Create a type that strips `readonly` from all or some of an object's keys. The inverse of `Readonly`. Formerly named `Mutable`. +- [`Merge`](source/merge.d.ts) - Merge two types into a new type. Keys of the second type overrides keys of the first type. +- [`MergeExclusive`](source/merge-exclusive.d.ts) - Create a type that has mutually exclusive keys. +- [`RequireAtLeastOne`](source/require-at-least-one.d.ts) - Create a type that requires at least one of the given keys. +- [`RequireExactlyOne`](source/require-exactly-one.d.ts) - Create a type that requires exactly a single key of the given keys and disallows more. +- [`RequireAllOrNone`](source/require-all-or-none.d.ts) - Create a type that requires all of the given keys or none of the given keys. +- [`RemoveIndexSignature`](source/remove-index-signature.d.ts) - Create a type that only has explicitly defined properties, absent of any index signatures. +- [`PartialDeep`](source/partial-deep.d.ts) - Create a deeply optional version of another type. Use [`Partial`](https://www.typescriptlang.org/docs/handbook/utility-types.html#partialtype) if you only need one level deep. +- [`PartialOnUndefinedDeep`](source/partial-on-undefined-deep.d.ts) - Create a deep version of another type where all keys accepting `undefined` type are set to optional. +- [`ReadonlyDeep`](source/readonly-deep.d.ts) - Create a deeply immutable version of an `object`/`Map`/`Set`/`Array` type. Use [`Readonly`](https://www.typescriptlang.org/docs/handbook/utility-types.html#readonlytype) if you only need one level deep. +- [`LiteralUnion`](source/literal-union.d.ts) - Create a union type by combining primitive types and literal types without sacrificing auto-completion in IDEs for the literal type part of the union. Workaround for [Microsoft/TypeScript#29729](https://github.com/Microsoft/TypeScript/issues/29729). +- [`Opaque`](source/opaque.d.ts) - Create an [opaque type](https://codemix.com/opaque-types-in-javascript/). +- [`UnwrapOpaque`](source/opaque.d.ts) - Revert an [opaque type](https://codemix.com/opaque-types-in-javascript/) back to its original type. +- [`InvariantOf`](source/invariant-of.d.ts) - Create an [invariant type](https://basarat.gitbook.io/typescript/type-system/type-compatibility#footnote-invariance), which is a type that does not accept supertypes and subtypes. +- [`SetOptional`](source/set-optional.d.ts) - Create a type that makes the given keys optional. +- [`SetRequired`](source/set-required.d.ts) - Create a type that makes the given keys required. +- [`SetNonNullable`](source/set-non-nullable.d.ts) - Create a type that makes the given keys non-nullable. +- [`ValueOf`](source/value-of.d.ts) - Create a union of the given object's values, and optionally specify which keys to get the values from. +- [`ConditionalKeys`](source/conditional-keys.d.ts) - Extract keys from a shape where values extend the given `Condition` type. +- [`ConditionalPick`](source/conditional-pick.d.ts) - Like `Pick` except it selects properties from a shape where the values extend the given `Condition` type. +- [`ConditionalExcept`](source/conditional-except.d.ts) - Like `Omit` except it removes properties from a shape where the values extend the given `Condition` type. +- [`UnionToIntersection`](source/union-to-intersection.d.ts) - Convert a union type to an intersection type. +- [`LiteralToPrimitive`](source/literal-to-primitive.d.ts) - Convert a [literal type](https://www.typescriptlang.org/docs/handbook/2/everyday-types.html#literal-types) to the [primitive type](source/primitive.d.ts) it belongs to. +- [`Stringified`](source/stringified.d.ts) - Create a type with the keys of the given type changed to `string` type. +- [`IterableElement`](source/iterable-element.d.ts) - Get the element type of an `Iterable`/`AsyncIterable`. For example, an array or a generator. +- [`Entry`](source/entry.d.ts) - Create a type that represents the type of an entry of a collection. +- [`Entries`](source/entries.d.ts) - Create a type that represents the type of the entries of a collection. +- [`SetReturnType`](source/set-return-type.d.ts) - Create a function type with a return type of your choice and the same parameters as the given function type. +- [`Simplify`](source/simplify.d.ts) - Useful to flatten the type output to improve type hints shown in editors. And also to transform an interface into a type to aide with assignability. +- [`Get`](source/get.d.ts) - Get a deeply-nested property from an object using a key path, like [Lodash's `.get()`](https://lodash.com/docs/latest#get) function. +- [`StringKeyOf`](source/string-key-of.d.ts) - Get keys of the given type as strings. +- [`Schema`](source/schema.d.ts) - Create a deep version of another object type where property values are recursively replaced into a given value type. +- [`Exact`](source/exact.d.ts) - Create a type that does not allow extra properties. +- [`OptionalKeysOf`](source/optional-keys-of.d.ts) - Extract all optional keys from the given type. +- [`HasOptionalKeys`](source/has-optional-keys.d.ts) - Create a `true`/`false` type depending on whether the given type has any optional fields. +- [`RequiredKeysOf`](source/required-keys-of.d.ts) - Extract all required keys from the given type. +- [`HasRequiredKeys`](source/has-required-keys.d.ts) - Create a `true`/`false` type depending on whether the given type has any required fields. +- [`Spread`](source/spread.d.ts) - Mimic the type inferred by TypeScript when merging two objects or two arrays/tuples using the spread syntax. + +### JSON + +- [`Jsonify`](source/jsonify.d.ts) - Transform a type to one that is assignable to the `JsonValue` type. +- [`JsonPrimitive`](source/basic.d.ts) - Matches a JSON primitive. +- [`JsonObject`](source/basic.d.ts) - Matches a JSON object. +- [`JsonArray`](source/basic.d.ts) - Matches a JSON array. +- [`JsonValue`](source/basic.d.ts) - Matches any valid JSON value. + +### Async + +- [`Promisable`](source/promisable.d.ts) - Create a type that represents either the value or the value wrapped in `PromiseLike`. +- [`AsyncReturnType`](source/async-return-type.d.ts) - Unwrap the return type of a function that returns a `Promise`. +- [`Asyncify`](source/asyncify.d.ts) - Create an async version of the given function type. + +### String + +- [`Trim`](source/trim.d.ts) - Remove leading and trailing spaces from a string. +- [`Split`](source/split.d.ts) - Represents an array of strings split using a given character or character set. +- [`Replace`](source/replace.d.ts) - Represents a string with some or all matches replaced by a replacement. + +### Array + +- [`Includes`](source/includes.d.ts) - Returns a boolean for whether the given array includes the given item. +- [`Join`](source/join.d.ts) - Join an array of strings and/or numbers using the given string as a delimiter. +- [`LastArrayElement`](source/last-array-element.d.ts) - Extracts the type of the last element of an array. +- [`FixedLengthArray`](source/fixed-length-array.d.ts) - Create a type that represents an array of the given type and length. +- [`MultidimensionalArray`](source/multidimensional-array.d.ts) - Create a type that represents a multidimensional array of the given type and dimensions. +- [`MultidimensionalReadonlyArray`](source/multidimensional-readonly-array.d.ts) - Create a type that represents a multidimensional readonly array of the given type and dimensions. +- [`ReadonlyTuple`](source/readonly-tuple.d.ts) - Create a type that represents a read-only tuple of the given type and length. + +### Numeric + +- [`PositiveInfinity`](source/numeric.d.ts) - Matches the hidden `Infinity` type. +- [`NegativeInfinity`](source/numeric.d.ts) - Matches the hidden `-Infinity` type. +- [`Finite`](source/numeric.d.ts) - A finite `number`. +- [`Integer`](source/numeric.d.ts) - A `number` that is an integer. +- [`Float`](source/numeric.d.ts) - A `number` that is not an integer. +- [`NegativeFloat`](source/numeric.d.ts) - A negative (`-∞ < x < 0`) `number` that is not an integer. +- [`Negative`](source/numeric.d.ts) - A negative `number`/`bigint` (`-∞ < x < 0`) +- [`NonNegative`](source/numeric.d.ts) - A non-negative `number`/`bigint` (`0 <= x < ∞`). +- [`NegativeInteger`](source/numeric.d.ts) - A negative (`-∞ < x < 0`) `number` that is an integer. +- [`NonNegativeInteger`](source/numeric.d.ts) - A non-negative (`0 <= x < ∞`) `number` that is an integer. + +### Change case + +- [`CamelCase`](source/camel-case.d.ts) - Convert a string literal to camel-case (`fooBar`). +- [`CamelCasedProperties`](source/camel-cased-properties.d.ts) - Convert object properties to camel-case (`fooBar`). +- [`CamelCasedPropertiesDeep`](source/camel-cased-properties-deep.d.ts) - Convert object properties to camel-case recursively (`fooBar`). +- [`KebabCase`](source/kebab-case.d.ts) - Convert a string literal to kebab-case (`foo-bar`). +- [`KebabCasedProperties`](source/kebab-cased-properties.d.ts) - Convert a object properties to kebab-case recursively (`foo-bar`). +- [`KebabCasedPropertiesDeep`](source/kebab-cased-properties-deep.d.ts) - Convert object properties to kebab-case (`foo-bar`). +- [`PascalCase`](source/pascal-case.d.ts) - Converts a string literal to pascal-case (`FooBar`) +- [`PascalCasedProperties`](source/pascal-cased-properties.d.ts) - Converts object properties to pascal-case (`FooBar`) +- [`PascalCasedPropertiesDeep`](source/pascal-cased-properties-deep.d.ts) - Converts object properties to pascal-case (`FooBar`) +- [`SnakeCase`](source/snake-case.d.ts) - Convert a string literal to snake-case (`foo_bar`). +- [`SnakeCasedProperties`](source/snake-cased-properties-deep.d.ts) - Convert object properties to snake-case (`foo_bar`). +- [`SnakeCasedPropertiesDeep`](source/snake-cased-properties-deep.d.ts) - Convert object properties to snake-case recursively (`foo_bar`). +- [`ScreamingSnakeCase`](source/screaming-snake-case.d.ts) - Convert a string literal to screaming-snake-case (`FOO_BAR`). +- [`DelimiterCase`](source/delimiter-case.d.ts) - Convert a string literal to a custom string delimiter casing. +- [`DelimiterCasedProperties`](source/delimiter-cased-properties.d.ts) - Convert object properties to a custom string delimiter casing. +- [`DelimiterCasedPropertiesDeep`](source/delimiter-cased-properties-deep.d.ts) - Convert object properties to a custom string delimiter casing recursively. + +### Miscellaneous + +- [`PackageJson`](source/package-json.d.ts) - Type for [npm's `package.json` file](https://docs.npmjs.com/creating-a-package-json-file). It also includes support for [TypeScript Declaration Files](https://www.typescriptlang.org/docs/handbook/declaration-files/publishing.html) and [Yarn Workspaces](https://classic.yarnpkg.com/lang/en/docs/workspaces/). +- [`TsConfigJson`](source/tsconfig-json.d.ts) - Type for [TypeScript's `tsconfig.json` file](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html) (TypeScript 4.4). + +## Declined types + +*If we decline a type addition, we will make sure to document the better solution here.* + +- [`Diff` and `Spread`](https://github.com/sindresorhus/type-fest/pull/7) - The pull request author didn't provide any real-world use-cases and the PR went stale. If you think this type is useful, provide some real-world use-cases and we might reconsider. +- [`Dictionary`](https://github.com/sindresorhus/type-fest/issues/33) - You only save a few characters (`Dictionary` vs `Record`) from [`Record`](https://www.typescriptlang.org/docs/handbook/utility-types.html#recordkeys-type), which is more flexible and well-known. Also, you shouldn't use an object as a dictionary. We have `Map` in JavaScript now. +- [`ExtractProperties` and `ExtractMethods`](https://github.com/sindresorhus/type-fest/pull/4) - The types violate the single responsibility principle. Instead, refine your types into more granular type hierarchies. +- [`Url2Json`](https://github.com/sindresorhus/type-fest/pull/262) - Inferring search parameters from a URL string is a cute idea, but not very useful in practice, since search parameters are usually dynamic and defined separately. +- [`Nullish`](https://github.com/sindresorhus/type-fest/pull/318) - The type only saves a couple of characters, not everyone knows what "nullish" means, and I'm also trying to [get away from `null`](https://github.com/sindresorhus/meta/discussions/7). +- [`TitleCase`](https://github.com/sindresorhus/type-fest/pull/303) - It's not solving a common need and is a better fit for a separate package. +- [`ExtendOr` and `ExtendAnd`](https://github.com/sindresorhus/type-fest/pull/247) - The benefits don't outweigh having to learn what they mean. +- [`PackageJsonExtras`](https://github.com/sindresorhus/type-fest/issues/371) - There are too many possible configurations that can be put into `package.json`. If you would like to extend `PackageJson` to support an additional configuration in your project, please see the *Extending existing types* section below. + +## Alternative type names + +*If you know one of our types by a different name, add it here for discovery.* + +- `PartialBy` - See [`SetOptional`](https://github.com/sindresorhus/type-fest/blob/main/source/set-optional.d.ts) +- `RecordDeep`- See [`Schema`](https://github.com/sindresorhus/type-fest/blob/main/source/schema.d.ts) + +## Tips + +### Extending existing types + +- [`PackageJson`](source/package-json.d.ts) - There are a lot of tools that place extra configurations inside the `package.json` file. You can extend `PackageJson` to support these additional configurations. +
+ + Example + + + [Playground](https://www.typescriptlang.org/play?#code/JYWwDg9gTgLgBDAnmApnA3gBQIYGMDW2A5igFIDOEAdnNuXAEJ0o4HFmVUC+cAZlBBBwA5ElQBaXinIxhAbgCwAKFCRYCZGnQAZYFRgooPfoJHSANntmKlysWlaESFanAC8jZo-YuaAMgwLKwBhal5gIgB+AC44XX1DADpQqnCiLhsgA) + + ```ts + import type {PackageJson as BasePackageJson} from 'type-fest'; + import type {Linter} from 'eslint'; + + type PackageJson = BasePackageJson & {eslintConfig?: Linter.Config}; + ``` +
+ +### Related + +- [typed-query-selector](https://github.com/g-plane/typed-query-selector) - Enhances `document.querySelector` and `document.querySelectorAll` with a template literal type that matches element types returned from an HTML element query selector. +- [`Linter.Config`](https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/eslint/index.d.ts) - Definitions for the [ESLint configuration schema](https://eslint.org/docs/user-guide/configuring/language-options). + +### Built-in types + +There are many advanced types most users don't know about. + +- [`Partial`](https://www.typescriptlang.org/docs/handbook/utility-types.html#partialtype) - Make all properties in `T` optional. +
+ + Example + + + [Playground](https://www.typescriptlang.org/play/#code/JYOwLgpgTgZghgYwgAgHIHsAmEDC6QzADmyA3gLABQyycADnanALYQBcyAzmFKEQNxUaddFDAcQAV2YAjaIMoBfKlQQAbOJ05osEAIIMAQpOBrsUMkOR1eANziRkCfISKSoD4Pg4ZseAsTIALyW1DS0DEysHADkvvoMMQA0VsKi4sgAzAAMuVaKClY2wPaOknSYDrguADwA0sgQAB6QIJjaANYQAJ7oMDp+LsQAfAAUXd0cdUnI9mo+uv6uANp1ALoAlKHhyGAAFsCcAHTOAW4eYF4gyxNrwbNwago0ypRWp66jH8QcAApwYmAjxq8SWIy2FDCNDA3ToKFBQyIdR69wmfQG1TOhShyBgomQX3w3GQE2Q6IA8jIAFYQBBgI4TTiEs5bTQYsFInrLTbbHZOIlgZDlSqQABqj0kKBC3yINx6a2xfOQwH6o2FVXFaklwSCIUkbQghBAEEwENSfNOlykEGefNe5uhB2O6sgS3GPRmLogmslG1tLxUOKgEDA7hAuydtteryAA) + + ```ts + interface NodeConfig { + appName: string; + port: number; + } + + class NodeAppBuilder { + private configuration: NodeConfig = { + appName: 'NodeApp', + port: 3000 + }; + + private updateConfig(key: Key, value: NodeConfig[Key]) { + this.configuration[key] = value; + } + + config(config: Partial) { + type NodeConfigKey = keyof NodeConfig; + + for (const key of Object.keys(config) as NodeConfigKey[]) { + const updateValue = config[key]; + + if (updateValue === undefined) { + continue; + } + + this.updateConfig(key, updateValue); + } + + return this; + } + } + + // `Partial`` allows us to provide only a part of the + // NodeConfig interface. + new NodeAppBuilder().config({appName: 'ToDoApp'}); + ``` +
+ +- [`Required`](https://www.typescriptlang.org/docs/handbook/utility-types.html#requiredtype) - Make all properties in `T` required. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4SwOwFwUwJwGYEMDGNgGED21VQGJZwC2wA3gFCjXAzFJgA2A-AFzADOUckA5gNxUaIYjA4ckvGG07c+g6gF8KQkAgCuEFFDA5O6gEbEwUbLm2ESwABQIixACJIoSdgCUYAR3Vg4MACYAPGYuFvYAfACU5Ko0APRxwADKMBD+wFAAFuh2Vv7OSBlYGdmc8ABu8LHKsRyGxqY4oQT21pTCIHQMjOwA5DAAHgACxAAOjDAAdChYxL0ANLHUouKSMH0AEmAAhJhY6ozpAJ77GTCMjMCiV0ToSAb7UJPPC9WRgrEJwAAqR6MwSRQPFGUFocDgRHYxnEfGAowh-zgUCOwF6KwkUl6tXqJhCeEsxDaS1AXSYfUGI3GUxmc0WSneQA) + + ```ts + interface ContactForm { + email?: string; + message?: string; + } + + function submitContactForm(formData: Required) { + // Send the form data to the server. + } + + submitContactForm({ + email: 'ex@mple.com', + message: 'Hi! Could you tell me more about…', + }); + + // TypeScript error: missing property 'message' + submitContactForm({ + email: 'ex@mple.com', + }); + ``` +
+ +- [`Readonly`](https://www.typescriptlang.org/docs/handbook/utility-types.html#readonlytype) - Make all properties in `T` readonly. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4UwOwVwW2AZA9gc3mAbmANsA3gKFCOAHkAzMgGkOJABEwAjKZa2kAUQCcvEu32AMQCGAF2FYBIAL4BufDRABLCKLBcywgMZgEKZOoDCiCGSXI8i4hGEwwALmABnUVxXJ57YFgzZHSVF8sT1BpBSItLGEnJz1kAy5LLy0TM2RHACUwYQATEywATwAeAITjU3MAPnkrCJMXLigtUT4AClxgGztKbyDgaX99I1TzAEokr1BRAAslJwA6FIqLAF48TtswHp9MHDla9hJGACswZvmyLjAwAC8wVpm5xZHkUZDaMKIwqyWXYCW0oN4sNlsA1h0ug5gAByACyBQAggAHJHQ7ZBIFoXbzBjMCz7OoQP5YIaJNYQMAAdziCVaALGNSIAHomcAACoFJFgADKWjcSNEwG4vC4ji0wggEEQguiTnMEGALWAV1yAFp8gVgEjeFyuKICvMrCTgVxnst5jtsGC4ljsPNhXxGaAWcAAOq6YRXYDCRg+RWIcA5JSC+kWdCepQ+v3RYCU3RInzRMCGwlpC19NYBW1Ye08R1AA) + + ```ts + enum LogLevel { + Off, + Debug, + Error, + Fatal + }; + + interface LoggerConfig { + name: string; + level: LogLevel; + } + + class Logger { + config: Readonly; + + constructor({name, level}: LoggerConfig) { + this.config = {name, level}; + Object.freeze(this.config); + } + } + + const config: LoggerConfig = { + name: 'MyApp', + level: LogLevel.Debug + }; + + const logger = new Logger(config); + + // TypeScript Error: cannot assign to read-only property. + logger.config.level = LogLevel.Error; + + // We are able to edit config variable as we please. + config.level = LogLevel.Error; + ``` +
+ +- [`Pick`](https://www.typescriptlang.org/docs/handbook/utility-types.html#picktype-keys) - From `T`, pick a set of properties whose keys are in the union `K`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4SwOwFwUwJwGYEMDGNgEE5TCgNugN4BQoZwOUBAXMAM5RyQDmA3KeSFABYCuAtgCMISMHloMmENh04oA9tBjQJjFuzIBfYrOAB6PcADCcGElh1gEGAHcKATwAO6ebyjB5CTNlwFwSxFR0BX5HeToYABNgBDh5fm8cfBg6AHIKG3ldA2BHOOcfFNpUygJ0pAhokr4hETFUgDpswywkggAFUwA3MFtgAF5gQgowKhhVKTYKGuFRcXo1aVZgbTIoJ3RW3xhOmB6+wfbcAGsAHi3kgBpgEtGy4AAfG54BWfqAPnZm4AAlZUj4MAkMA8GAGB4vEgfMlLLw6CwPBA8PYRmMgZVgAC6CgmI4cIommQELwICh8RBgKZKvALh1ur0bHQABR5PYMui0Wk7em2ADaAF0AJS0AASABUALIAGQAogR+Mp3CROCAFBBwVC2ikBpj5CgBIqGjizLA5TAFdAmalImAuqlBRoVQh5HBgEy1eDWfs7J5cjzGYKhroVfpDEhHM4MV6GRR5NN0JrtnRg6BVirTFBeHAKYmYY6QNpdB73LmCJZBlSAXAubtvczeSmQMNSuMbmKNgBlHFgPEUNwusBIPAAQlS1xetTmxT0SDoESgdD0C4aACtHMwxytLrohawgA) + + ```ts + interface Article { + title: string; + thumbnail: string; + content: string; + } + + // Creates new type out of the `Article` interface composed + // from the Articles' two properties: `title` and `thumbnail`. + // `ArticlePreview = {title: string; thumbnail: string}` + type ArticlePreview = Pick; + + // Render a list of articles using only title and description. + function renderArticlePreviews(previews: ArticlePreview[]): HTMLElement { + const articles = document.createElement('div'); + + for (const preview of previews) { + // Append preview to the articles. + } + + return articles; + } + + const articles = renderArticlePreviews([ + { + title: 'TypeScript tutorial!', + thumbnail: '/assets/ts.jpg' + } + ]); + ``` +
+ +- [`Record`](https://www.typescriptlang.org/docs/handbook/utility-types.html#recordkeys-type) - Construct a type with a set of properties `K` of type `T`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4ejYAUHsGcCWAXBMB2dgwGbAKYC2ADgDYwCeeemCaWArgE7ADGMxAhmuQHQBQoYEnJE8wALKEARnkaxEKdMAC8wAOS0kstGuAAfdQBM8ANzxlRjXQbVaWACwC0JPB0NqA3HwGgIwAJJoWozYHCxixnAsjAhStADmwESMMJYo1Fi4HMCIaPEu+MRklHj8gpqyoeHAAKJFFFTAAN4+giDYCIxwSAByHAR4AFw5SDF5Xm2gJBzdfQPD3WPxE5PAlBxdAPLYNQAelgh4aOHDaPQEMowrIAC+3oJ+AMKMrlrAXFhSAFZ4LEhC9g4-0BmA4JBISXgiCkBQABpILrJ5MhUGhYcATGD6Bk4Hh-jNgABrPDkOBlXyQAAq9ngYmJpOAAHcEOCRjAXqwYODfoo6DhakUSph+Uh7GI4P0xER4Cj0OSQGwMP8tP1hgAlX7swwAHgRl2RvIANALSA08ABtAC6AD4VM1Wm0Kow0MMrYaHYJjGYLLJXZb3at1HYnC43Go-QHQDcvA6-JsmEJXARgCDgMYWAhjIYhDAU+YiMAAFIwex0ZmilMITCGF79TLAGRsAgJYAAZRwSEZGzEABFTOZUrJ5Yn+jwnWgeER6HB7AAKJrADpdXqS4ZqYultTG6azVfqHswPBbtauLY7fayQ7HIbAAAMwBuAEoYw9IBq2Ixs9h2eFMOQYPQObALQKJgggABeYhghCIpikkKRpOQRIknAsZUiIeCttECBEP8NSMCkjDDAARMGziuIYxHwYOjDCMBmDNnAuTxA6irdCOBB1Lh5Dqpqn66tISIykawBnOCtqqC0gbjqc9DgpGkxegOliyfJDrRkAA) + + ```ts + // Positions of employees in our company. + type MemberPosition = 'intern' | 'developer' | 'tech-lead'; + + // Interface describing properties of a single employee. + interface Employee { + firstName: string; + lastName: string; + yearsOfExperience: number; + } + + // Create an object that has all possible `MemberPosition` values set as keys. + // Those keys will store a collection of Employees of the same position. + const team: Record = { + intern: [], + developer: [], + 'tech-lead': [], + }; + + // Our team has decided to help John with his dream of becoming Software Developer. + team.intern.push({ + firstName: 'John', + lastName: 'Doe', + yearsOfExperience: 0 + }); + + // `Record` forces you to initialize all of the property keys. + // TypeScript Error: "tech-lead" property is missing + const teamEmpty: Record = { + intern: null, + developer: null, + }; + ``` +
+ +- [`Exclude`](https://www.typescriptlang.org/docs/handbook/utility-types.html#excludetype-excludedunion) - Exclude from `T` those types that are assignable to `U`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/JYOwLgpgTgZghgYwgAgMrQG7QMIHsQzADmyA3gFDLIAOuUYAXMiAK4A2byAPsgM5hRQJHqwC2AI2gBucgF9y5MAE9qKAEoQAjiwj8AEnBAATNtGQBeZAAooWphu26wAGmS3e93bRC8IASgsAPmRDJRlyAHoI5ABRAA8ENhYjFFYOZGVVZBgoXFFkAAM0zh5+QRBhZhYJaAKAOkjogEkQZAQ4X2QAdwALCFbaemRgXmQtFjhOMFwq9K6ULuB0lk6U+HYwZAxJnQaYFhAEMGB8ZCIIMAAFOjAANR2IK0HGWISklIAedCgsKDwCYgAbQA5M9gQBdVzFQJ+JhiSRQMiUYYwayZCC4VHPCzmSzAspCYEBWxgFhQAZwKC+FpgJ43VwARgADH4ZFQSWSBjcZPJyPtDsdTvxKWBvr8rD1DCZoJ5HPopaYoK4EPhCEQmGKcKriLCtrhgEYkVQVT5Nr4fmZLLZtMBbFZgT0wGBqES6ghbHBIJqoBKFdBWQpjfh+DQbhY2tqiHVsbjLMVkAB+ZAAZiZaeQTHOVxu9ySjxNaujNwDVHNvzqbBGkBAdPoAfkQA) + + ```ts + interface ServerConfig { + port: null | string | number; + } + + type RequestHandler = (request: Request, response: Response) => void; + + // Exclude `null` type from `null | string | number`. + // In case the port is equal to `null`, we will use default value. + function getPortValue(port: Exclude): number { + if (typeof port === 'string') { + return parseInt(port, 10); + } + + return port; + } + + function startServer(handler: RequestHandler, config: ServerConfig): void { + const server = require('http').createServer(handler); + + const port = config.port === null ? 3000 : getPortValue(config.port); + server.listen(port); + } + ``` +
+ +- [`Extract`](https://www.typescriptlang.org/docs/handbook/utility-types.html#extracttype-union) - Extract from `T` those types that are assignable to `U`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/CYUwxgNghgTiAEAzArgOzAFwJYHtXzSwEdkQBJYACgEoAueVZAWwCMQYBuAKDDwGcM8MgBF4AXngBlAJ6scESgHIRi6ty5ZUGdoihgEABXZ888AN5d48ANoiAuvUat23K6ihMQ9ATE0BzV3goPy8GZjZOLgBfLi4Aejj4AEEICBwAdz54MAALKFQQ+BxEeAAHY1NgKAwoIKy0grr4DByEUpgccpgMaXgAaxBerCzi+B9-ZulygDouFHRsU1z8kKMYE1RhaqgAHkt4AHkWACt4EAAPbVRgLLWNgBp9gGlBs8uQa6yAUUuYPQwdgNpKM7nh7mMML4CgA+R5WABqUAgpDeVxuhxO1he0jsXGh8EoOBO9COx3BQPo2PBADckaR6IjkSA6PBqTgsMBzPsicdrEC7OJWXSQNwYvFEgAVTS9JLXODpeDpKBZFg4GCoWa8VACIJykAKiQWKy2YQOAioYikCg0OEMDyhRSy4DyxS24KhAAMjyi6gS8AAwjh5OD0iBFHAkJoEOksC1mnkMJq8gUQKDNttKPlnfrwYp3J5XfBHXqoKpfYkAOI4ansTxaeDADmoRSCCBYAbxhC6TDx6rwYHIRX5bScjA4bLJwoDmDwDkfbA9JMrVMVdM1TN69LgkTgwgkchUahqIA) + + ```ts + declare function uniqueId(): number; + + const ID = Symbol('ID'); + + interface Person { + [ID]: number; + name: string; + age: number; + } + + // Allows changing the person data as long as the property key is of string type. + function changePersonData< + Obj extends Person, + Key extends Extract, + Value extends Obj[Key] + > (obj: Obj, key: Key, value: Value): void { + obj[key] = value; + } + + // Tiny Andrew was born. + const andrew = { + [ID]: uniqueId(), + name: 'Andrew', + age: 0, + }; + + // Cool, we're fine with that. + changePersonData(andrew, 'name', 'Pony'); + + // Goverment didn't like the fact that you wanted to change your identity. + changePersonData(andrew, ID, uniqueId()); + ``` +
+ +- [`NonNullable`](https://www.typescriptlang.org/docs/handbook/utility-types.html#nonnullabletype) - Exclude `null` and `undefined` from `T`. +
+ + Example + + Works with strictNullChecks set to true. + + [Playground](https://typescript-play.js.org/?target=6#code/C4TwDgpgBACg9gJ2AOQK4FsBGEFQLxQDOwCAlgHYDmUAPlORtrnQwDasDcAUFwPQBU-WAEMkUOADMowqAGNWwwoSgATCBIqlgpOOSjAAFsOBRSy1IQgr9cKJlSlW1mZYQA3HFH68u8xcoBlHA8EACEHJ08Aby4oKDBUTFZSWXjEFEYcAEIALihkXTR2YSSIAB54JDQsHAA+blj4xOTUsHSACkMzPKD3HHDHNQQAGjSkPMqMmoQASh7g-oihqBi4uNIpdraxPAI2VhmVxrX9AzMAOm2ppnwoAA4ABifuE4BfKAhWSyOTuK7CS7pao3AhXF5rV48E4ICDAVAIPT-cGQyG+XTEIgLMJLTx7CAAdygvRCA0iCHaMwarhJOIQjUBSHaACJHk8mYdeLwxtdcVAAOSsh58+lXdr7Dlcq7A3n3J4PEUdADMcspUE53OluAIUGVTx46oAKuAIAFZGQwCYAKIIBCILjUxaDHAMnla+iodjcIA) + + ```ts + type PortNumber = string | number | null; + + /** Part of a class definition that is used to build a server */ + class ServerBuilder { + portNumber!: NonNullable; + + port(this: ServerBuilder, port: PortNumber): ServerBuilder { + if (port == null) { + this.portNumber = 8000; + } else { + this.portNumber = port; + } + + return this; + } + } + + const serverBuilder = new ServerBuilder(); + + serverBuilder + .port('8000') // portNumber = '8000' + .port(null) // portNumber = 8000 + .port(3000); // portNumber = 3000 + + // TypeScript error + serverBuilder.portNumber = null; + ``` +
+ +- [`Parameters`](https://www.typescriptlang.org/docs/handbook/utility-types.html#parameterstype) - Obtain the parameters of a function type in a tuple. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/GYVwdgxgLglg9mABAZwBYmMANgUwBQxgAOIUAXIgIZgCeA2gLoCUFAbnDACaIDeAUIkQB6IYgCypSlBxUATrMo1ECsJzgBbLEoipqAc0J7EMKMgDkiHLnU4wp46pwAPHMgB0fAL58+oSLARECEosLAA5ABUYG2QAHgAxJGdpVWREPDdMylk9ZApqemZEAF4APipacrw-CApEgBogkKwAYThwckQwEHUAIxxZJl4BYVEImiIZKF0oZRwiWVdbeygJmThgOYgcGFYcbhqApCJsyhtpWXcR1cnEePBoeDAABVPzgbTixFeFd8uEsClADcIxGiygIFkSEOT3SmTc2VydQeRx+ZxwF2QQ34gkEwDgsnSuFmMBKiAADEDjIhYk1Qm0OlSYABqZnYka4xA1DJZHJYkGc7yCbyeRA+CAIZCzNAYbA4CIAdxg2zJwVCkWirjwMswuEaACYmCCgA) + + ```ts + function shuffle(input: any[]): void { + // Mutate array randomly changing its' elements indexes. + } + + function callNTimes any> (func: Fn, callCount: number) { + // Type that represents the type of the received function parameters. + type FunctionParameters = Parameters; + + return function (...args: FunctionParameters) { + for (let i = 0; i < callCount; i++) { + func(...args); + } + } + } + + const shuffleTwice = callNTimes(shuffle, 2); + ``` +
+ +- [`ConstructorParameters`](https://www.typescriptlang.org/docs/handbook/utility-types.html#constructorparameterstype) - Obtain the parameters of a constructor function type in a tuple. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECCBOAXAlqApgWQPYBM0mgG8AoaaFRENALmgkXmQDsBzAblOmCycTV4D8teo1YdO3JiICuwRFngAKClWENmLAJRFOZRAAtkEAHQq00ALzlklNBzIBfYk+KhIMAJJTEYJsDQAwmDA+mgAPAAq0GgAHnxMODCKTGgA7tCKxllg8CwQtL4AngDaALraFgB80EWa1SRkAA6MAG5gfNAB4FABPDJyCrQR9tDNyG0dwMGhtBhgjWEiGgA00F70vv4RhY3hEZXVVinpc42KmuJkkv3y8Bly8EPaDWTkhiZd7r3e8LK3llwGCMXGQWGhEOsfH5zJlsrl8p0+gw-goAAo5MAAW3BaHgEEilU0tEhmzQ212BJ0ry4SOg+kg+gBBiMximIGA0nAfAQLGk2N4EAAEgzYcYcnkLsRdDTvNEYkYUKwSdCme9WdM0MYwYhFPSIPpJdTkAAzDKxBUaZX+aAAQgsVmkCTQxuYaBw2ng4Ok8CYcotSu8pMur09iG9vuObxZnx6SN+AyUWTF8MN0CcZE4Ywm5jZHK5aB5fP4iCFIqT4oRRTKRLo6lYVNeAHpG50wOzOe1zHr9NLQ+HoABybsD4HOKXXRA1JCoKhBELmI5pNaB6Fz0KKBAodDYPAgSUTmqYsAALx4m5nC6nW9nGq14KtaEUA9gR9PvuNCjQ9BgACNvcwNBtAcLiAA) + + ```ts + class ArticleModel { + title: string; + content?: string; + + constructor(title: string) { + this.title = title; + } + } + + class InstanceCache any)> { + private ClassConstructor: T; + private cache: Map> = new Map(); + + constructor (ctr: T) { + this.ClassConstructor = ctr; + } + + getInstance (...args: ConstructorParameters): InstanceType { + const hash = this.calculateArgumentsHash(...args); + + const existingInstance = this.cache.get(hash); + if (existingInstance !== undefined) { + return existingInstance; + } + + return new this.ClassConstructor(...args); + } + + private calculateArgumentsHash(...args: any[]): string { + // Calculate hash. + return 'hash'; + } + } + + const articleCache = new InstanceCache(ArticleModel); + const amazonArticle = articleCache.getInstance('Amazon forests burining!'); + ``` +
+ +- [`ReturnType`](https://www.typescriptlang.org/docs/handbook/utility-types.html#returntypetype) - Obtain the return type of a function type. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECSAmICmBlJAnAbgS2E6A3gFDTTwD2AcuQC4AW2AdgOYAUAlAFzSbnbyEAvkWFFQkGJSQB3GMVI1sNZNwg10TZgG4S0YOUY0kh1es07d+xmvQBXYDXLpWi5UlMaWAGj0GjJ6BtNdkJdBQYIADpXZGgAXmgYpB1ScOwoq38aeN9DYxoU6GFRKzVoJjUwRjwAYXJbPPRuAFkwAAcAHgAxBodsAx9GWwBbACMMAD4cxhloVraOCyYjdAAzMDxoOut1e0d0UNIZ6WhWSPOwdGYIbiqATwBtAF0uaHudUQB6ACpv6ABpJBINqJdAbADW0Do5BOw3u5R2VTwMHIq2gAANtjZ0bkbHsnFCwJh8ONjHp0EgwEZ4JFoN9PkRVr1FAZoMwkDRYIjqkgOrosepoEgAB7+eAwAV2BxOLy6ACCVxgIrFEoMeOl6AACpcwMMORgIB1JRMiBNWKVdhruJKfOdIpdrtwFddXlzKjyACp3Nq842HaDIbL6BrZBIVGhIpB1EMYSLsmjmtWW-YhAA+qegAAYLKQLQj3ZsEsdccmnGcLor2Dn8xGedHGpEIBzEzspfsfMHDNAANTQACMVaIljV5GQkRA5DYmIpVKQAgAJARO9le33BDXIyi0YuLW2nJFGLqkOvxFB0YPdBSaLZ0IwNzyPkO8-xkGgsLh8Al427a3hWAhXwwHA8EHT5PmgAB1bAQBAANJ24adKWpft72RaBUTgRBUCAj89HAM8xCTaBjggABRQx0DuHJv25P9dCkWRZVIAAiBjoFImpmjlFBgA0NpsjadByDacgIDAEAIAAQmYpjoGYgAZSBsmGPw6DtZiiFA8CoJguDmAQmoZ2QvtUKQLdoAYmBTwgdEiCAA) + + ```ts + /** Provides every element of the iterable `iter` into the `callback` function and stores the results in an array. */ + function mapIter< + Elem, + Func extends (elem: Elem) => any, + Ret extends ReturnType + >(iter: Iterable, callback: Func): Ret[] { + const mapped: Ret[] = []; + + for (const elem of iter) { + mapped.push(callback(elem)); + } + + return mapped; + } + + const setObject: Set = new Set(); + const mapObject: Map = new Map(); + + mapIter(setObject, (value: string) => value.indexOf('Foo')); // number[] + + mapIter(mapObject, ([key, value]: [number, string]) => { + return key % 2 === 0 ? value : 'Odd'; + }); // string[] + ``` +
+ +- [`InstanceType`](https://www.typescriptlang.org/docs/handbook/utility-types.html#instancetypetype) - Obtain the instance type of a constructor function type. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECSAmICmBlJAnAbgS2E6A3gFDTTwD2AcuQC4AW2AdgOYAUAlAFzSbnbyEAvkWFFQkGJSQB3GMVI1sNZNwg10TZgG4S0YOUY0kh1es07d+xmvQBXYDXLpWi5UlMaWAGj0GjJ6BtNdkJdBQYIADpXZGgAXmgYpB1ScOwoq38aeN9DYxoU6GFRKzVoJjUwRjwAYXJbPPRuAFkwAAcAHgAxBodsAx9GWwBbACMMAD4cxhloVraOCyYjdAAzMDxoOut1e0d0UNIZ6WhWSPOwdGYIbiqATwBtAF0uaHudUQB6ACpv6ABpJBINqJdAbADW0Do5BOw3u5R2VTwMHIq2gAANtjZ0bkbHsnFCwJh8ONjHp0EgwEZ4JFoN9PkRVr1FAZoMwkDRYIjqkgOrosepoEgAB7+eAwAV2BxOLy6ACCVxgIrFEoMeOl6AACpcwMMORgIB1JRMiBNWKVdhruJKfOdIpdrtwFddXlzKjyACp3Nq842HaDIbL6BrZBIVGhIpB1EMYSLsmjmtWW-YhAA+qegAAYLKQLQj3ZsEsdccmnGcLor2Dn8xGedHGpEIBzEzspfsfMHDNAANTQACMVaIljV5GQkRA5DYmIpVKQAgAJARO9le33BDXIyi0YuLW2nJFGLqkOvxFB0YPdBSaLZ0IwNzyPkO8-xkGgsLh8Al427a3hWAhXwwHA8EHT5PmgAB1bAQBAANJ24adKWpft72RaBUTgRBUCAj89HAM8xCTaBjggABRQx0DuHJv25P9dCkWRZVIAAiBjoFImpmjlFBgA0NpsjadByDacgIDAEAIAAQmYpjoGYgAZSBsmGPw6DtZiiFA8CoJguDmAQmoZ2QvtUKQLdoAYmBTwgdEiCAA) + + ```ts + class IdleService { + doNothing (): void {} + } + + class News { + title: string; + content: string; + + constructor(title: string, content: string) { + this.title = title; + this.content = content; + } + } + + const instanceCounter: Map = new Map(); + + interface Constructor { + new(...args: any[]): any; + } + + // Keep track how many instances of `Constr` constructor have been created. + function getInstance< + Constr extends Constructor, + Args extends ConstructorParameters + >(constructor: Constr, ...args: Args): InstanceType { + let count = instanceCounter.get(constructor) || 0; + + const instance = new constructor(...args); + + instanceCounter.set(constructor, count + 1); + + console.log(`Created ${count + 1} instances of ${Constr.name} class`); + + return instance; + } + + + const idleService = getInstance(IdleService); + // Will log: `Created 1 instances of IdleService class` + const newsEntry = getInstance(News, 'New ECMAScript proposals!', 'Last month...'); + // Will log: `Created 1 instances of News class` + ``` +
+ +- [`Omit`](https://www.typescriptlang.org/docs/handbook/utility-types.html#omittype-keys) - Constructs a type by picking all properties from T and then removing K. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/JYOwLgpgTgZghgYwgAgIImAWzgG2QbwChlks4BzCAVShwC5kBnMKUcgbmKYAcIFgIjBs1YgOXMpSFMWbANoBdTiW5woFddwAW0kfKWEAvoUIB6U8gDCUCHEiNkICAHdkYAJ69kz4GC3JcPG4oAHteKDABBxCYNAxsPFBIWEQUCAAPJG4wZABySUFcgJAAEzMLXNV1ck0dIuCw6EjBADpy5AB1FAQ4EGQAV0YUP2AHDy8wEOQbUugmBLwtEIA3OcmQnEjuZBgQqE7gAGtgZAhwKHdkHFGwNvGUdDIcAGUliIBJEF3kAF5kAHlML4ADyPBIAGjyBUYRQAPnkqho4NoYQA+TiEGD9EAISIhPozErQMG4AASK2gn2+AApek9pCSXm8wFSQooAJQMUkAFQAsgAZACiOAgmDOOSIJAQ+OYyGl4DgoDmf2QJRCCH6YvALQQNjsEGFovF1NyJWAy1y7OUyHMyE+yRAuFImG4Iq1YDswHxbRINjA-SgfXlHqVUE4xiAA) + + ```ts + interface Animal { + imageUrl: string; + species: string; + images: string[]; + paragraphs: string[]; + } + + // Creates new type with all properties of the `Animal` interface + // except 'images' and 'paragraphs' properties. We can use this + // type to render small hover tooltip for a wiki entry list. + type AnimalShortInfo = Omit; + + function renderAnimalHoverInfo (animals: AnimalShortInfo[]): HTMLElement { + const container = document.createElement('div'); + // Internal implementation. + return container; + } + ``` +
+ +- [`Uppercase`](https://www.typescriptlang.org/docs/handbook/utility-types.html#uppercasestringtype) - Transforms every character in a string into uppercase. +
+ + Example + + + ```ts + type T = Uppercase<'hello'>; // 'HELLO' + + type T2 = Uppercase<'foo' | 'bar'>; // 'FOO' | 'BAR' + + type T3 = Uppercase<`aB${S}`>; + type T4 = T3<'xYz'>; // 'ABXYZ' + + type T5 = Uppercase; // string + type T6 = Uppercase; // any + type T7 = Uppercase; // never + type T8 = Uppercase<42>; // Error, type 'number' does not satisfy the constraint 'string' + ``` +
+ +- [`Lowercase`](https://www.typescriptlang.org/docs/handbook/utility-types.html#lowercasestringtype) - Transforms every character in a string into lowercase. +
+ + Example + + + ```ts + type T = Lowercase<'HELLO'>; // 'hello' + + type T2 = Lowercase<'FOO' | 'BAR'>; // 'foo' | 'bar' + + type T3 = Lowercase<`aB${S}`>; + type T4 = T3<'xYz'>; // 'abxyz' + + type T5 = Lowercase; // string + type T6 = Lowercase; // any + type T7 = Lowercase; // never + type T8 = Lowercase<42>; // Error, type 'number' does not satisfy the constraint 'string' + ``` +
+ +- [`Capitalize`](https://www.typescriptlang.org/docs/handbook/utility-types.html#capitalizestringtype) - Transforms the first character in a string into uppercase. +
+ + Example + + + ```ts + type T = Capitalize<'hello'>; // 'Hello' + + type T2 = Capitalize<'foo' | 'bar'>; // 'Foo' | 'Bar' + + type T3 = Capitalize<`aB${S}`>; + type T4 = T3<'xYz'>; // 'ABxYz' + + type T5 = Capitalize; // string + type T6 = Capitalize; // any + type T7 = Capitalize; // never + type T8 = Capitalize<42>; // Error, type 'number' does not satisfy the constraint 'string' + ``` +
+ +- [`Uncapitalize`](https://www.typescriptlang.org/docs/handbook/utility-types.html#uncapitalizestringtype) - Transforms the first character in a string into lowercase. +
+ + Example + + + ```ts + type T = Uncapitalize<'Hello'>; // 'hello' + + type T2 = Uncapitalize<'Foo' | 'Bar'>; // 'foo' | 'bar' + + type T3 = Uncapitalize<`AB${S}`>; + type T4 = T3<'xYz'>; // 'aBxYz' + + type T5 = Uncapitalize; // string + type T6 = Uncapitalize; // any + type T7 = Uncapitalize; // never + type T8 = Uncapitalize<42>; // Error, type 'number' does not satisfy the constraint 'string' + ``` +
+ +You can find some examples in the [TypeScript docs](https://www.typescriptlang.org/docs/handbook/utility-types.html). + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Jarek Radosz](https://github.com/CvX) +- [Dimitri Benin](https://github.com/BendingBender) +- [Pelle Wessman](https://github.com/voxpelli) + +## License + +SPDX-License-Identifier: (MIT OR CC0-1.0) + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/type-fest/source/async-return-type.d.ts b/node_modules/type-fest/source/async-return-type.d.ts new file mode 100644 index 0000000..57cc363 --- /dev/null +++ b/node_modules/type-fest/source/async-return-type.d.ts @@ -0,0 +1,25 @@ +import type {PromiseValue} from './promise-value'; + +type AsyncFunction = (...args: any[]) => Promise; + +/** +Unwrap the return type of a function that returns a `Promise`. + +There has been [discussion](https://github.com/microsoft/TypeScript/pull/35998) about implementing this type in TypeScript. + +@example +```ts +import type {AsyncReturnType} from 'type-fest'; +import {asyncFunction} from 'api'; + +// This type resolves to the unwrapped return type of `asyncFunction`. +type Value = AsyncReturnType; + +async function doSomething(value: Value) {} + +asyncFunction().then(value => doSomething(value)); +``` + +@category Async +*/ +export type AsyncReturnType = PromiseValue>; diff --git a/node_modules/type-fest/source/asyncify.d.ts b/node_modules/type-fest/source/asyncify.d.ts new file mode 100644 index 0000000..6726b7e --- /dev/null +++ b/node_modules/type-fest/source/asyncify.d.ts @@ -0,0 +1,33 @@ +import type {PromiseValue} from './promise-value'; +import type {SetReturnType} from './set-return-type'; + +/** +Create an async version of the given function type, by boxing the return type in `Promise` while keeping the same parameter types. + +Use-case: You have two functions, one synchronous and one asynchronous that do the same thing. Instead of having to duplicate the type definition, you can use `Asyncify` to reuse the synchronous type. + +@example +``` +import type {Asyncify} from 'type-fest'; + +// Synchronous function. +function getFooSync(someArg: SomeType): Foo { + // … +} + +type AsyncifiedFooGetter = Asyncify; +//=> type AsyncifiedFooGetter = (someArg: SomeType) => Promise; + +// Same as `getFooSync` but asynchronous. +const getFooAsync: AsyncifiedFooGetter = (someArg) => { + // TypeScript now knows that `someArg` is `SomeType` automatically. + // It also knows that this function must return `Promise`. + // If you have `@typescript-eslint/promise-function-async` linter rule enabled, it will even report that "Functions that return promises must be async.". + + // … +} +``` + +@category Async +*/ +export type Asyncify any> = SetReturnType>>>; diff --git a/node_modules/type-fest/source/basic.d.ts b/node_modules/type-fest/source/basic.d.ts new file mode 100644 index 0000000..ac74fdc --- /dev/null +++ b/node_modules/type-fest/source/basic.d.ts @@ -0,0 +1,45 @@ +/** +Matches a [`class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). + +@category Class +*/ +export type Class = Constructor & {prototype: T}; + +/** +Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). + +@category Class +*/ +export type Constructor = new(...arguments_: Arguments) => T; + +/** +Matches a JSON object. + +This type can be useful to enforce some input to be JSON-compatible or as a super-type to be extended from. Don't use this as a direct return type as the user would have to double-cast it: `jsonObject as unknown as CustomResponse`. Instead, you could extend your CustomResponse type from it to ensure your type only uses JSON-compatible types: `interface CustomResponse extends JsonObject { … }`. + +@category JSON +*/ +export type JsonObject = {[Key in string]?: JsonValue}; + +/** +Matches a JSON array. + +@category JSON +*/ +export type JsonArray = JsonValue[]; + +/** +Matches any valid JSON primitive value. + +@category JSON +*/ +export type JsonPrimitive = string | number | boolean | null; + +/** +Matches any valid JSON value. + +@see `Jsonify` if you need to transform a type to one that is assignable to `JsonValue`. + +@category JSON +*/ +export type JsonValue = JsonPrimitive | JsonObject | JsonArray; diff --git a/node_modules/type-fest/source/camel-case.d.ts b/node_modules/type-fest/source/camel-case.d.ts new file mode 100644 index 0000000..bcf54fc --- /dev/null +++ b/node_modules/type-fest/source/camel-case.d.ts @@ -0,0 +1,73 @@ +import type {WordSeparators} from '../source/internal'; +import type {Split} from './split'; + +/** +Step by step takes the first item in an array literal, formats it and adds it to a string literal, and then recursively appends the remainder. + +Only to be used by `CamelCaseStringArray<>`. + +@see CamelCaseStringArray +*/ +type InnerCamelCaseStringArray = + Parts extends [`${infer FirstPart}`, ...infer RemainingParts] + ? FirstPart extends undefined + ? '' + : FirstPart extends '' + ? InnerCamelCaseStringArray + : `${PreviousPart extends '' ? FirstPart : Capitalize}${InnerCamelCaseStringArray}` + : ''; + +/** +Starts fusing the output of `Split<>`, an array literal of strings, into a camel-cased string literal. + +It's separate from `InnerCamelCaseStringArray<>` to keep a clean API outwards to the rest of the code. + +@see Split +*/ +type CamelCaseStringArray = + Parts extends [`${infer FirstPart}`, ...infer RemainingParts] + ? Uncapitalize<`${FirstPart}${InnerCamelCaseStringArray}`> + : never; + +/** +Convert a string literal to camel-case. + +This can be useful when, for example, converting some kebab-cased command-line flags or a snake-cased database result. + +@example +``` +import type {CamelCase} from 'type-fest'; + +// Simple + +const someVariable: CamelCase<'foo-bar'> = 'fooBar'; + +// Advanced + +type CamelCasedProperties = { + [K in keyof T as CamelCase]: T[K] +}; + +interface RawOptions { + 'dry-run': boolean; + 'full_family_name': string; + foo: number; + BAR: string; + QUZ_QUX: number; + 'OTHER-FIELD': boolean; +} + +const dbResult: CamelCasedProperties = { + dryRun: true, + fullFamilyName: 'bar.js', + foo: 123, + bar: 'foo', + quzQux: 6, + otherField: false +}; +``` + +@category Change case +@category Template literal +*/ +export type CamelCase = K extends string ? CamelCaseStringArray ? Lowercase : K, WordSeparators>> : K; diff --git a/node_modules/type-fest/source/camel-cased-properties-deep.d.ts b/node_modules/type-fest/source/camel-cased-properties-deep.d.ts new file mode 100644 index 0000000..c821dc4 --- /dev/null +++ b/node_modules/type-fest/source/camel-cased-properties-deep.d.ts @@ -0,0 +1,54 @@ +import type {CamelCase} from './camel-case'; + +/** +Convert object properties to camel case recursively. + +This can be useful when, for example, converting some API types from a different style. + +@see CamelCasedProperties +@see CamelCase + +@example +``` +import type {CamelCasedPropertiesDeep} from 'type-fest'; + +interface User { + UserId: number; + UserName: string; +} + +interface UserWithFriends { + UserInfo: User; + UserFriends: User[]; +} + +const result: CamelCasedPropertiesDeep = { + userInfo: { + userId: 1, + userName: 'Tom', + }, + userFriends: [ + { + userId: 2, + userName: 'Jerry', + }, + { + userId: 3, + userName: 'Spike', + }, + ], +}; +``` + +@category Change case +@category Template literal +@category Object +*/ +export type CamelCasedPropertiesDeep = Value extends Function + ? Value + : Value extends Array + ? Array> + : Value extends Set + ? Set> : { + [K in keyof Value as CamelCase]: CamelCasedPropertiesDeep; + }; diff --git a/node_modules/type-fest/source/camel-cased-properties.d.ts b/node_modules/type-fest/source/camel-cased-properties.d.ts new file mode 100644 index 0000000..afe0629 --- /dev/null +++ b/node_modules/type-fest/source/camel-cased-properties.d.ts @@ -0,0 +1,36 @@ +import type {CamelCase} from './camel-case'; + +/** +Convert object properties to camel case but not recursively. + +This can be useful when, for example, converting some API types from a different style. + +@see CamelCasedPropertiesDeep +@see CamelCase + +@example +``` +import type {CamelCasedProperties} from 'type-fest'; + +interface User { + UserId: number; + UserName: string; +} + +const result: CamelCasedProperties = { + userId: 1, + userName: 'Tom', +}; +``` + +@category Change case +@category Template literal +@category Object +*/ +export type CamelCasedProperties = Value extends Function + ? Value + : Value extends Array + ? Value + : { + [K in keyof Value as CamelCase]: Value[K]; + }; diff --git a/node_modules/type-fest/source/conditional-except.d.ts b/node_modules/type-fest/source/conditional-except.d.ts new file mode 100644 index 0000000..d61792a --- /dev/null +++ b/node_modules/type-fest/source/conditional-except.d.ts @@ -0,0 +1,45 @@ +import type {Except} from './except'; +import type {ConditionalKeys} from './conditional-keys'; + +/** +Exclude keys from a shape that matches the given `Condition`. + +This is useful when you want to create a new type with a specific set of keys from a shape. For example, you might want to exclude all the primitive properties from a class and form a new shape containing everything but the primitive properties. + +@example +``` +import type {Primitive, ConditionalExcept} from 'type-fest'; + +class Awesome { + name: string; + successes: number; + failures: bigint; + + run() {} +} + +type ExceptPrimitivesFromAwesome = ConditionalExcept; +//=> {run: () => void} +``` + +@example +``` +import type {ConditionalExcept} from 'type-fest'; + +interface Example { + a: string; + b: string | number; + c: () => void; + d: {}; +} + +type NonStringKeysOnly = ConditionalExcept; +//=> {b: string | number; c: () => void; d: {}} +``` + +@category Object +*/ +export type ConditionalExcept = Except< + Base, + ConditionalKeys +>; diff --git a/node_modules/type-fest/source/conditional-keys.d.ts b/node_modules/type-fest/source/conditional-keys.d.ts new file mode 100644 index 0000000..c337dbb --- /dev/null +++ b/node_modules/type-fest/source/conditional-keys.d.ts @@ -0,0 +1,47 @@ +/** +Extract the keys from a type where the value type of the key extends the given `Condition`. + +Internally this is used for the `ConditionalPick` and `ConditionalExcept` types. + +@example +``` +import type {ConditionalKeys} from 'type-fest'; + +interface Example { + a: string; + b: string | number; + c?: string; + d: {}; +} + +type StringKeysOnly = ConditionalKeys; +//=> 'a' +``` + +To support partial types, make sure your `Condition` is a union of undefined (for example, `string | undefined`) as demonstrated below. + +@example +``` +import type {ConditionalKeys} from 'type-fest'; + +type StringKeysAndUndefined = ConditionalKeys; +//=> 'a' | 'c' +``` + +@category Object +*/ +export type ConditionalKeys = NonNullable< + // Wrap in `NonNullable` to strip away the `undefined` type from the produced union. + { + // Map through all the keys of the given base type. + [Key in keyof Base]: + // Pick only keys with types extending the given `Condition` type. + Base[Key] extends Condition + // Retain this key since the condition passes. + ? Key + // Discard this key since the condition fails. + : never; + + // Convert the produced object into a union type of the keys which passed the conditional test. + }[keyof Base] +>; diff --git a/node_modules/type-fest/source/conditional-pick.d.ts b/node_modules/type-fest/source/conditional-pick.d.ts new file mode 100644 index 0000000..f90e399 --- /dev/null +++ b/node_modules/type-fest/source/conditional-pick.d.ts @@ -0,0 +1,44 @@ +import type {ConditionalKeys} from './conditional-keys'; + +/** +Pick keys from the shape that matches the given `Condition`. + +This is useful when you want to create a new type from a specific subset of an existing type. For example, you might want to pick all the primitive properties from a class and form a new automatically derived type. + +@example +``` +import type {Primitive, ConditionalPick} from 'type-fest'; + +class Awesome { + name: string; + successes: number; + failures: bigint; + + run() {} +} + +type PickPrimitivesFromAwesome = ConditionalPick; +//=> {name: string; successes: number; failures: bigint} +``` + +@example +``` +import type {ConditionalPick} from 'type-fest'; + +interface Example { + a: string; + b: string | number; + c: () => void; + d: {}; +} + +type StringKeysOnly = ConditionalPick; +//=> {a: string} +``` + +@category Object +*/ +export type ConditionalPick = Pick< + Base, + ConditionalKeys +>; diff --git a/node_modules/type-fest/source/delimiter-case.d.ts b/node_modules/type-fest/source/delimiter-case.d.ts new file mode 100644 index 0000000..770707e --- /dev/null +++ b/node_modules/type-fest/source/delimiter-case.d.ts @@ -0,0 +1,93 @@ +import type {UpperCaseCharacters, WordSeparators} from '../source/internal'; + +/** +Unlike a simpler split, this one includes the delimiter splitted on in the resulting array literal. This is to enable splitting on, for example, upper-case characters. + +@category Template literal +*/ +export type SplitIncludingDelimiters = + Source extends '' ? [] : + Source extends `${infer FirstPart}${Delimiter}${infer SecondPart}` ? + ( + Source extends `${FirstPart}${infer UsedDelimiter}${SecondPart}` + ? UsedDelimiter extends Delimiter + ? Source extends `${infer FirstPart}${UsedDelimiter}${infer SecondPart}` + ? [...SplitIncludingDelimiters, UsedDelimiter, ...SplitIncludingDelimiters] + : never + : never + : never + ) : + [Source]; + +/** +Format a specific part of the splitted string literal that `StringArrayToDelimiterCase<>` fuses together, ensuring desired casing. + +@see StringArrayToDelimiterCase +*/ +type StringPartToDelimiterCase = + StringPart extends UsedWordSeparators ? Delimiter : + Start extends true ? Lowercase : + StringPart extends UsedUpperCaseCharacters ? `${Delimiter}${Lowercase}` : + StringPart; + +/** +Takes the result of a splitted string literal and recursively concatenates it together into the desired casing. + +It receives `UsedWordSeparators` and `UsedUpperCaseCharacters` as input to ensure it's fully encapsulated. + +@see SplitIncludingDelimiters +*/ +type StringArrayToDelimiterCase = + Parts extends [`${infer FirstPart}`, ...infer RemainingParts] + ? `${StringPartToDelimiterCase}${StringArrayToDelimiterCase}` + : Parts extends [string] + ? string + : ''; + +/** +Convert a string literal to a custom string delimiter casing. + +This can be useful when, for example, converting a camel-cased object property to an oddly cased one. + +@see KebabCase +@see SnakeCase + +@example +``` +import type {DelimiterCase} from 'type-fest'; + +// Simple + +const someVariable: DelimiterCase<'fooBar', '#'> = 'foo#bar'; + +// Advanced + +type OddlyCasedProperties = { + [K in keyof T as DelimiterCase]: T[K] +}; + +interface SomeOptions { + dryRun: boolean; + includeFile: string; + foo: number; +} + +const rawCliOptions: OddlyCasedProperties = { + 'dry#run': true, + 'include#file': 'bar.js', + foo: 123 +}; +``` + +@category Change case +@category Template literal +*/ +export type DelimiterCase = Value extends string + ? StringArrayToDelimiterCase< + SplitIncludingDelimiters, + true, + WordSeparators, + UpperCaseCharacters, + Delimiter + > + : Value; diff --git a/node_modules/type-fest/source/delimiter-cased-properties-deep.d.ts b/node_modules/type-fest/source/delimiter-cased-properties-deep.d.ts new file mode 100644 index 0000000..36b4725 --- /dev/null +++ b/node_modules/type-fest/source/delimiter-cased-properties-deep.d.ts @@ -0,0 +1,60 @@ +import type {DelimiterCase} from './delimiter-case'; + +/** +Convert object properties to delimiter case recursively. + +This can be useful when, for example, converting some API types from a different style. + +@see DelimiterCase +@see DelimiterCasedProperties + +@example +``` +import type {DelimiterCasedPropertiesDeep} from 'type-fest'; + +interface User { + userId: number; + userName: string; +} + +interface UserWithFriends { + userInfo: User; + userFriends: User[]; +} + +const result: DelimiterCasedPropertiesDeep = { + 'user-info': { + 'user-id': 1, + 'user-name': 'Tom', + }, + 'user-friends': [ + { + 'user-id': 2, + 'user-name': 'Jerry', + }, + { + 'user-id': 3, + 'user-name': 'Spike', + }, + ], +}; +``` + +@category Change case +@category Template literal +@category Object +*/ +export type DelimiterCasedPropertiesDeep< + Value, + Delimiter extends string, +> = Value extends Function | Date | RegExp + ? Value + : Value extends Array + ? Array> + : Value extends Set + ? Set> : { + [K in keyof Value as DelimiterCase< + K, + Delimiter + >]: DelimiterCasedPropertiesDeep; + }; diff --git a/node_modules/type-fest/source/delimiter-cased-properties.d.ts b/node_modules/type-fest/source/delimiter-cased-properties.d.ts new file mode 100644 index 0000000..03b76af --- /dev/null +++ b/node_modules/type-fest/source/delimiter-cased-properties.d.ts @@ -0,0 +1,37 @@ +import type {DelimiterCase} from './delimiter-case'; + +/** +Convert object properties to delimiter case but not recursively. + +This can be useful when, for example, converting some API types from a different style. + +@see DelimiterCase +@see DelimiterCasedPropertiesDeep + +@example +``` +import type {DelimiterCasedProperties} from 'type-fest'; + +interface User { + userId: number; + userName: string; +} + +const result: DelimiterCasedProperties = { + 'user-id': 1, + 'user-name': 'Tom', +}; +``` + +@category Change case +@category Template literal +@category Object +*/ +export type DelimiterCasedProperties< + Value, + Delimiter extends string, +> = Value extends Function + ? Value + : Value extends Array + ? Value + : {[K in keyof Value as DelimiterCase]: Value[K]}; diff --git a/node_modules/type-fest/source/entries.d.ts b/node_modules/type-fest/source/entries.d.ts new file mode 100644 index 0000000..9fe2fd3 --- /dev/null +++ b/node_modules/type-fest/source/entries.d.ts @@ -0,0 +1,62 @@ +import type {ArrayEntry, MapEntry, ObjectEntry, SetEntry} from './entry'; + +type ArrayEntries = Array>; +type MapEntries = Array>; +type ObjectEntries = Array>; +type SetEntries> = Array>; + +/** +Many collections have an `entries` method which returns an array of a given object's own enumerable string-keyed property [key, value] pairs. The `Entries` type will return the type of that collection's entries. + +For example the {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries|`Object`}, {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries|`Map`}, {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/entries|`Array`}, and {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set/entries|`Set`} collections all have this method. Note that `WeakMap` and `WeakSet` do not have this method since their entries are not enumerable. + +@see `Entry` if you want to just access the type of a single entry. + +@example +``` +import type {Entries} from 'type-fest'; + +interface Example { + someKey: number; +} + +const manipulatesEntries = (examples: Entries) => examples.map(example => [ + // Does some arbitrary processing on the key (with type information available) + example[0].toUpperCase(), + + // Does some arbitrary processing on the value (with type information available) + example[1].toFixed() +]); + +const example: Example = {someKey: 1}; +const entries = Object.entries(example) as Entries; +const output = manipulatesEntries(entries); + +// Objects +const objectExample = {a: 1}; +const objectEntries: Entries = [['a', 1]]; + +// Arrays +const arrayExample = ['a', 1]; +const arrayEntries: Entries = [[0, 'a'], [1, 1]]; + +// Maps +const mapExample = new Map([['a', 1]]); +const mapEntries: Entries = [['a', 1]]; + +// Sets +const setExample = new Set(['a', 1]); +const setEntries: Entries = [['a', 'a'], [1, 1]]; +``` + +@category Object +@category Map +@category Set +@category Array +*/ +export type Entries = + BaseType extends Map ? MapEntries + : BaseType extends Set ? SetEntries + : BaseType extends readonly unknown[] ? ArrayEntries + : BaseType extends object ? ObjectEntries + : never; diff --git a/node_modules/type-fest/source/entry.d.ts b/node_modules/type-fest/source/entry.d.ts new file mode 100644 index 0000000..678b67a --- /dev/null +++ b/node_modules/type-fest/source/entry.d.ts @@ -0,0 +1,65 @@ +type MapKey = BaseType extends Map ? KeyType : never; +type MapValue = BaseType extends Map ? ValueType : never; + +export type ArrayEntry = [number, BaseType[number]]; +export type MapEntry = [MapKey, MapValue]; +export type ObjectEntry = [keyof BaseType, BaseType[keyof BaseType]]; +export type SetEntry = BaseType extends Set ? [ItemType, ItemType] : never; + +/** +Many collections have an `entries` method which returns an array of a given object's own enumerable string-keyed property [key, value] pairs. The `Entry` type will return the type of that collection's entry. + +For example the {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries|`Object`}, {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries|`Map`}, {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/entries|`Array`}, and {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set/entries|`Set`} collections all have this method. Note that `WeakMap` and `WeakSet` do not have this method since their entries are not enumerable. + +@see `Entries` if you want to just access the type of the array of entries (which is the return of the `.entries()` method). + +@example +``` +import type {Entry} from 'type-fest'; + +interface Example { + someKey: number; +} + +const manipulatesEntry = (example: Entry) => [ + // Does some arbitrary processing on the key (with type information available) + example[0].toUpperCase(), + + // Does some arbitrary processing on the value (with type information available) + example[1].toFixed(), +]; + +const example: Example = {someKey: 1}; +const entry = Object.entries(example)[0] as Entry; +const output = manipulatesEntry(entry); + +// Objects +const objectExample = {a: 1}; +const objectEntry: Entry = ['a', 1]; + +// Arrays +const arrayExample = ['a', 1]; +const arrayEntryString: Entry = [0, 'a']; +const arrayEntryNumber: Entry = [1, 1]; + +// Maps +const mapExample = new Map([['a', 1]]); +const mapEntry: Entry = ['a', 1]; + +// Sets +const setExample = new Set(['a', 1]); +const setEntryString: Entry = ['a', 'a']; +const setEntryNumber: Entry = [1, 1]; +``` + +@category Object +@category Map +@category Array +@category Set +*/ +export type Entry = + BaseType extends Map ? MapEntry + : BaseType extends Set ? SetEntry + : BaseType extends readonly unknown[] ? ArrayEntry + : BaseType extends object ? ObjectEntry + : never; diff --git a/node_modules/type-fest/source/exact.d.ts b/node_modules/type-fest/source/exact.d.ts new file mode 100644 index 0000000..0227f3d --- /dev/null +++ b/node_modules/type-fest/source/exact.d.ts @@ -0,0 +1,73 @@ +import type {KeysOfUnion} from './internal'; + +/** +Extract the element of an array that also works for array union. + +Returns `never` if T is not an array. + +It creates a type-safe way to access the element type of `unknown` type. +*/ +type ArrayElement = T extends readonly unknown[] ? T[0] : never; + +/** +Extract the object field type if T is an object and K is a key of T, return `never` otherwise. + +It creates a type-safe way to access the member type of `unknown` type. +*/ +type ObjectValue = K extends keyof T ? T[K] : never; + +/** +Create a type from `ParameterType` and `InputType` and change keys exclusive to `InputType` to `never`. +- Generate a list of keys that exists in `InputType` but not in `ParameterType`. +- Mark these excess keys as `never`. +*/ +type ExactObject = {[Key in keyof ParameterType]: Exact>} + & Record>, never>; + +/** +Create a type that does not allow extra properties, meaning it only allows properties that are explicitly declared. + +This is useful for function type-guarding to reject arguments with excess properties. Due to the nature of TypeScript, it does not complain if excess properties are provided unless the provided value is an object literal. + +*Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/12936) if you want to have this type as a built-in in TypeScript.* + +@example +``` +type OnlyAcceptName = {name: string}; + +function onlyAcceptName(args: OnlyAcceptName) {} + +// TypeScript complains about excess properties when an object literal is provided. +onlyAcceptName({name: 'name', id: 1}); +//=> `id` is excess + +// TypeScript does not complain about excess properties when the provided value is a variable (not an object literal). +const invalidInput = {name: 'name', id: 1}; +onlyAcceptName(invalidInput); // No errors +``` + +Having `Exact` allows TypeScript to reject excess properties. + +@example +``` +import {Exact} from 'type-fest'; + +type OnlyAcceptName = {name: string}; + +function onlyAcceptNameImproved>(args: T) {} + +const invalidInput = {name: 'name', id: 1}; +onlyAcceptNameImproved(invalidInput); // Compilation error +``` + +[Read more](https://stackoverflow.com/questions/49580725/is-it-possible-to-restrict-typescript-object-to-contain-only-properties-defined) + +@category Utilities +*/ +export type Exact = + // Convert union of array to array of union: A[] & B[] => (A & B)[] + ParameterType extends unknown[] ? Array, ArrayElement>> + // In TypeScript, Array is a subtype of ReadonlyArray, so always test Array before ReadonlyArray. + : ParameterType extends readonly unknown[] ? ReadonlyArray, ArrayElement>> + : ParameterType extends object ? ExactObject + : ParameterType; diff --git a/node_modules/type-fest/source/except.d.ts b/node_modules/type-fest/source/except.d.ts new file mode 100644 index 0000000..dab21d5 --- /dev/null +++ b/node_modules/type-fest/source/except.d.ts @@ -0,0 +1,57 @@ +import type {IsEqual} from './internal'; + +/** +Filter out keys from an object. + +Returns `never` if `Exclude` is strictly equal to `Key`. +Returns `never` if `Key` extends `Exclude`. +Returns `Key` otherwise. + +@example +``` +type Filtered = Filter<'foo', 'foo'>; +//=> never +``` + +@example +``` +type Filtered = Filter<'bar', string>; +//=> never +``` + +@example +``` +type Filtered = Filter<'bar', 'foo'>; +//=> 'bar' +``` + +@see {Except} +*/ +type Filter = IsEqual extends true ? never : (KeyType extends ExcludeType ? never : KeyType); + +/** +Create a type from an object type without certain keys. + +This type is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-5.html#the-omit-helper-type). The `Omit` type does not restrict the omitted keys to be keys present on the given type, while `Except` does. The benefits of a stricter type are avoiding typos and allowing the compiler to pick up on rename refactors automatically. + +This type was proposed to the TypeScript team, which declined it, saying they prefer that libraries implement stricter versions of the built-in types ([microsoft/TypeScript#30825](https://github.com/microsoft/TypeScript/issues/30825#issuecomment-523668235)). + +@example +``` +import type {Except} from 'type-fest'; + +type Foo = { + a: number; + b: string; + c: boolean; +}; + +type FooWithoutA = Except; +//=> {b: string}; +``` + +@category Object +*/ +export type Except = { + [KeyType in keyof ObjectType as Filter]: ObjectType[KeyType]; +}; diff --git a/node_modules/type-fest/source/fixed-length-array.d.ts b/node_modules/type-fest/source/fixed-length-array.d.ts new file mode 100644 index 0000000..9152e90 --- /dev/null +++ b/node_modules/type-fest/source/fixed-length-array.d.ts @@ -0,0 +1,43 @@ +/** +Methods to exclude. +*/ +type ArrayLengthMutationKeys = 'splice' | 'push' | 'pop' | 'shift' | 'unshift'; + +/** +Create a type that represents an array of the given type and length. The array's length and the `Array` prototype methods that manipulate its length are excluded in the resulting type. + +Please participate in [this issue](https://github.com/microsoft/TypeScript/issues/26223) if you want to have a similiar type built into TypeScript. + +Use-cases: +- Declaring fixed-length tuples or arrays with a large number of items. +- Creating a range union (for example, `0 | 1 | 2 | 3 | 4` from the keys of such a type) without having to resort to recursive types. +- Creating an array of coordinates with a static length, for example, length of 3 for a 3D vector. + +Note: This type does not prevent out-of-bounds access. Prefer `ReadonlyTuple` unless you need mutability. + +@example +``` +import type {FixedLengthArray} from 'type-fest'; + +type FencingTeam = FixedLengthArray; + +const guestFencingTeam: FencingTeam = ['Josh', 'Michael', 'Robert']; + +const homeFencingTeam: FencingTeam = ['George', 'John']; +//=> error TS2322: Type string[] is not assignable to type 'FencingTeam' + +guestFencingTeam.push('Sam'); +//=> error TS2339: Property 'push' does not exist on type 'FencingTeam' +``` + +@category Array +@see ReadonlyTuple +*/ +export type FixedLengthArray = Pick< + ArrayPrototype, + Exclude +> & { + [index: number]: Element; + [Symbol.iterator]: () => IterableIterator; + readonly length: Length; +}; diff --git a/node_modules/type-fest/source/get.d.ts b/node_modules/type-fest/source/get.d.ts new file mode 100644 index 0000000..a95394c --- /dev/null +++ b/node_modules/type-fest/source/get.d.ts @@ -0,0 +1,184 @@ +import type {StringDigit} from '../source/internal'; +import type {Split} from './split'; +import type {StringKeyOf} from './string-key-of'; + +type GetOptions = { + strict?: boolean; +}; + +/** +Like the `Get` type but receives an array of strings as a path parameter. +*/ +type GetWithPath = + Keys extends [] + ? BaseType + : Keys extends readonly [infer Head, ...infer Tail] + ? GetWithPath< + PropertyOf, Options>, + Extract, + Options + > + : never; + +/** +Adds `undefined` to `Type` if `strict` is enabled. +*/ +type Strictify = + Options['strict'] extends true ? Type | undefined : Type; + +/** +If `Options['strict']` is `true`, includes `undefined` in the returned type when accessing properties on `Record`. + +Known limitations: +- Does not include `undefined` in the type on object types with an index signature (for example, `{a: string; [key: string]: string}`). +*/ +type StrictPropertyOf = + Record extends BaseType + ? string extends keyof BaseType + ? Strictify // Record + : BaseType[Key] // Record<'a' | 'b', any> (Records with a string union as keys have required properties) + : BaseType[Key]; + +/** +Splits a dot-prop style path into a tuple comprised of the properties in the path. Handles square-bracket notation. + +@example +``` +ToPath<'foo.bar.baz'> +//=> ['foo', 'bar', 'baz'] + +ToPath<'foo[0].bar.baz'> +//=> ['foo', '0', 'bar', 'baz'] +``` +*/ +type ToPath = Split, '.'>; + +/** +Replaces square-bracketed dot notation with dots, for example, `foo[0].bar` -> `foo.0.bar`. +*/ +type FixPathSquareBrackets = + Path extends `[${infer Head}]${infer Tail}` + ? Tail extends `[${string}` + ? `${Head}.${FixPathSquareBrackets}` + : `${Head}${FixPathSquareBrackets}` + : Path extends `${infer Head}[${infer Middle}]${infer Tail}` + ? `${Head}.${FixPathSquareBrackets<`[${Middle}]${Tail}`>}` + : Path; + +/** +Returns true if `LongString` is made up out of `Substring` repeated 0 or more times. + +@example +``` +ConsistsOnlyOf<'aaa', 'a'> //=> true +ConsistsOnlyOf<'ababab', 'ab'> //=> true +ConsistsOnlyOf<'aBa', 'a'> //=> false +ConsistsOnlyOf<'', 'a'> //=> true +``` +*/ +type ConsistsOnlyOf = + LongString extends '' + ? true + : LongString extends `${Substring}${infer Tail}` + ? ConsistsOnlyOf + : false; + +/** +Convert a type which may have number keys to one with string keys, making it possible to index using strings retrieved from template types. + +@example +``` +type WithNumbers = {foo: string; 0: boolean}; +type WithStrings = WithStringKeys; + +type WithNumbersKeys = keyof WithNumbers; +//=> 'foo' | 0 +type WithStringsKeys = keyof WithStrings; +//=> 'foo' | '0' +``` +*/ +type WithStringKeys = { + [Key in StringKeyOf]: UncheckedIndex +}; + +/** +Perform a `T[U]` operation if `T` supports indexing. +*/ +type UncheckedIndex = [T] extends [Record] ? T[U] : never; + +/** +Get a property of an object or array. Works when indexing arrays using number-literal-strings, for example, `PropertyOf = number`, and when indexing objects with number keys. + +Note: +- Returns `unknown` if `Key` is not a property of `BaseType`, since TypeScript uses structural typing, and it cannot be guaranteed that extra properties unknown to the type system will exist at runtime. +- Returns `undefined` from nullish values, to match the behaviour of most deep-key libraries like `lodash`, `dot-prop`, etc. +*/ +type PropertyOf = + BaseType extends null | undefined + ? undefined + : Key extends keyof BaseType + ? StrictPropertyOf + : BaseType extends [] | [unknown, ...unknown[]] + ? unknown // It's a tuple, but `Key` did not extend `keyof BaseType`. So the index is out of bounds. + : BaseType extends { + [n: number]: infer Item; + length: number; // Note: This is needed to avoid being too lax with records types using number keys like `{0: string; 1: boolean}`. + } + ? ( + ConsistsOnlyOf extends true + ? Strictify + : unknown + ) + : Key extends keyof WithStringKeys + ? StrictPropertyOf, Key, Options> + : unknown; + +// This works by first splitting the path based on `.` and `[...]` characters into a tuple of string keys. Then it recursively uses the head key to get the next property of the current object, until there are no keys left. Number keys extract the item type from arrays, or are converted to strings to extract types from tuples and dictionaries with number keys. +/** +Get a deeply-nested property from an object using a key path, like Lodash's `.get()` function. + +Use-case: Retrieve a property from deep inside an API response or some other complex object. + +@example +``` +import type {Get} from 'type-fest'; +import * as lodash from 'lodash'; + +const get = (object: BaseType, path: Path): Get => + lodash.get(object, path); + +interface ApiResponse { + hits: { + hits: Array<{ + _id: string + _source: { + name: Array<{ + given: string[] + family: string + }> + birthDate: string + } + }> + } +} + +const getName = (apiResponse: ApiResponse) => + get(apiResponse, 'hits.hits[0]._source.name'); + //=> Array<{given: string[]; family: string}> + +// Path also supports a readonly array of strings +const getNameWithPathArray = (apiResponse: ApiResponse) => + get(apiResponse, ['hits','hits', '0', '_source', 'name'] as const); + //=> Array<{given: string[]; family: string}> + +// Strict mode: +Get //=> string | undefined +Get, 'foo', {strict: true}> // => string | undefined +``` + +@category Object +@category Array +@category Template literal +*/ +export type Get = + GetWithPath : Path, Options>; diff --git a/node_modules/type-fest/source/has-optional-keys.d.ts b/node_modules/type-fest/source/has-optional-keys.d.ts new file mode 100644 index 0000000..597e8aa --- /dev/null +++ b/node_modules/type-fest/source/has-optional-keys.d.ts @@ -0,0 +1,21 @@ +import {OptionalKeysOf} from './optional-keys-of'; + +/** +Creates a type that represents `true` or `false` depending on whether the given type has any optional fields. + +This is useful when you want to create an API whose behavior depends on the presence or absence of optional fields. + +@example +``` +import type {HasOptionalKeys, OptionalKeysOf} from 'type-fest'; + +type UpdateService = { + removeField: HasOptionalKeys extends true + ? (field: OptionalKeysOf) => Promise + : never +} +``` + +@category Utilities +*/ +export type HasOptionalKeys = OptionalKeysOf extends never ? false : true; diff --git a/node_modules/type-fest/source/has-required-keys.d.ts b/node_modules/type-fest/source/has-required-keys.d.ts new file mode 100644 index 0000000..ec9daf3 --- /dev/null +++ b/node_modules/type-fest/source/has-required-keys.d.ts @@ -0,0 +1,59 @@ +import {RequiredKeysOf} from './required-keys-of'; + +/** +Creates a type that represents `true` or `false` depending on whether the given type has any required fields. + +This is useful when you want to create an API whose behavior depends on the presence or absence of required fields. + +@example +``` +import type {HasRequiredKeys} from 'type-fest'; + +type GeneratorOptions