Refactored the project
This commit is contained in:
parent
7b7e6ce838
commit
9c495876d5
27
Cargo.toml
27
Cargo.toml
@ -1,27 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "mini-game"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["lanxu <lanxu@posteo.net>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib", "rlib"]
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["console_error_panic_hook"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
wasm-bindgen = "0.2.84"
|
|
||||||
|
|
||||||
# The `console_error_panic_hook` crate provides better debugging of panics by
|
|
||||||
# logging them with `console.error`. This is great for development, but requires
|
|
||||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
|
||||||
# code size when deploying.
|
|
||||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
wasm-bindgen-test = "0.3.34"
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
# Tell `rustc` to optimize for small code size.
|
|
||||||
opt-level = "s"
|
|
201
LICENSE_APACHE
201
LICENSE_APACHE
@ -1,201 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
25
LICENSE_MIT
25
LICENSE_MIT
@ -1,25 +0,0 @@
|
|||||||
Copyright (c) 2018 lanxu <lanxu@posteo.net>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any
|
|
||||||
person obtaining a copy of this software and associated
|
|
||||||
documentation files (the "Software"), to deal in the
|
|
||||||
Software without restriction, including without
|
|
||||||
limitation the rights to use, copy, modify, merge,
|
|
||||||
publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software
|
|
||||||
is furnished to do so, subject to the following
|
|
||||||
conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice
|
|
||||||
shall be included in all copies or substantial portions
|
|
||||||
of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
|
||||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
|
||||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
|
||||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
|
||||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
|
||||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
11
README.md
11
README.md
@ -1,11 +0,0 @@
|
|||||||
# Silver Fang mini game
|
|
||||||
|
|
||||||
```sh
|
|
||||||
wargo watch -i .gitignore -i "pkg/*" -s "wasm-pack build --target=bundler --dev --out-name index"
|
|
||||||
```
|
|
||||||
|
|
||||||
And run in www
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm start
|
|
||||||
```
|
|
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../acorn/bin/acorn
|
1
node_modules/.bin/esbuild
generated
vendored
Symbolic link
1
node_modules/.bin/esbuild
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../esbuild/bin/esbuild
|
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../nanoid/bin/nanoid.cjs
|
1
node_modules/.bin/rollup
generated
vendored
Symbolic link
1
node_modules/.bin/rollup
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../rollup/dist/bin/rollup
|
1
node_modules/.bin/terser
generated
vendored
Symbolic link
1
node_modules/.bin/terser
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../terser/bin/terser
|
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsc
|
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsserver
|
1
node_modules/.bin/vite
generated
vendored
Symbolic link
1
node_modules/.bin/vite
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../vite/bin/vite.js
|
459
node_modules/.package-lock.json
generated
vendored
Normal file
459
node_modules/.package-lock.json
generated
vendored
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
{
|
||||||
|
"name": "silver-fang-mini-game",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"../pkg": {
|
||||||
|
"name": "mini-game",
|
||||||
|
"version": "0.1.0"
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/gen-mapping": {
|
||||||
|
"version": "0.3.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz",
|
||||||
|
"integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/set-array": "^1.2.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.24"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/resolve-uri": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/set-array": {
|
||||||
|
"version": "1.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
|
||||||
|
"integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/source-map": {
|
||||||
|
"version": "0.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz",
|
||||||
|
"integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.3.5",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.25"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/sourcemap-codec": {
|
||||||
|
"version": "1.4.15",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
|
||||||
|
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/trace-mapping": {
|
||||||
|
"version": "0.3.25",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
|
||||||
|
"integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.1.0",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||||
|
"version": "4.24.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz",
|
||||||
|
"integrity": "sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||||
|
"version": "4.24.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz",
|
||||||
|
"integrity": "sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@types/estree": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
|
||||||
|
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/node": {
|
||||||
|
"version": "20.12.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
|
||||||
|
"integrity": "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"undici-types": "~5.26.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/acorn": {
|
||||||
|
"version": "8.11.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
|
||||||
|
"integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"bin": {
|
||||||
|
"acorn": "bin/acorn"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/buffer-from": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
|
},
|
||||||
|
"node_modules/commander": {
|
||||||
|
"version": "2.20.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
|
||||||
|
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
|
},
|
||||||
|
"node_modules/esbuild": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
|
||||||
|
"dev": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"esbuild": "bin/esbuild"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@esbuild/aix-ppc64": "0.21.5",
|
||||||
|
"@esbuild/android-arm": "0.21.5",
|
||||||
|
"@esbuild/android-arm64": "0.21.5",
|
||||||
|
"@esbuild/android-x64": "0.21.5",
|
||||||
|
"@esbuild/darwin-arm64": "0.21.5",
|
||||||
|
"@esbuild/darwin-x64": "0.21.5",
|
||||||
|
"@esbuild/freebsd-arm64": "0.21.5",
|
||||||
|
"@esbuild/freebsd-x64": "0.21.5",
|
||||||
|
"@esbuild/linux-arm": "0.21.5",
|
||||||
|
"@esbuild/linux-arm64": "0.21.5",
|
||||||
|
"@esbuild/linux-ia32": "0.21.5",
|
||||||
|
"@esbuild/linux-loong64": "0.21.5",
|
||||||
|
"@esbuild/linux-mips64el": "0.21.5",
|
||||||
|
"@esbuild/linux-ppc64": "0.21.5",
|
||||||
|
"@esbuild/linux-riscv64": "0.21.5",
|
||||||
|
"@esbuild/linux-s390x": "0.21.5",
|
||||||
|
"@esbuild/linux-x64": "0.21.5",
|
||||||
|
"@esbuild/netbsd-x64": "0.21.5",
|
||||||
|
"@esbuild/openbsd-x64": "0.21.5",
|
||||||
|
"@esbuild/sunos-x64": "0.21.5",
|
||||||
|
"@esbuild/win32-arm64": "0.21.5",
|
||||||
|
"@esbuild/win32-ia32": "0.21.5",
|
||||||
|
"@esbuild/win32-x64": "0.21.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mini-game": {
|
||||||
|
"resolved": "../pkg",
|
||||||
|
"link": true
|
||||||
|
},
|
||||||
|
"node_modules/nanoid": {
|
||||||
|
"version": "3.3.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
|
||||||
|
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
|
||||||
|
"dev": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/ai"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"nanoid": "bin/nanoid.cjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/phaser": {
|
||||||
|
"version": "3.86.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/phaser/-/phaser-3.86.0.tgz",
|
||||||
|
"integrity": "sha512-A7rog3GhXYcWsPimteQmudyY6W0fx0eviOoo9Cwpzae1RPeJBDkhmWv5IFrQO9dL+T907E4dprQPnd2D4WhoEw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"eventemitter3": "^5.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/phaser/node_modules/eventemitter3": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/picocolors": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/postcss": {
|
||||||
|
"version": "8.4.47",
|
||||||
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz",
|
||||||
|
"integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==",
|
||||||
|
"dev": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/postcss/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "tidelift",
|
||||||
|
"url": "https://tidelift.com/funding/github/npm/postcss"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/ai"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"nanoid": "^3.3.7",
|
||||||
|
"picocolors": "^1.1.0",
|
||||||
|
"source-map-js": "^1.2.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^10 || ^12 || >=14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/rollup": {
|
||||||
|
"version": "4.24.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.24.0.tgz",
|
||||||
|
"integrity": "sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/estree": "1.0.6"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"rollup": "dist/bin/rollup"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.0.0",
|
||||||
|
"npm": ">=8.0.0"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@rollup/rollup-android-arm-eabi": "4.24.0",
|
||||||
|
"@rollup/rollup-android-arm64": "4.24.0",
|
||||||
|
"@rollup/rollup-darwin-arm64": "4.24.0",
|
||||||
|
"@rollup/rollup-darwin-x64": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-arm-gnueabihf": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-arm-musleabihf": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-arm64-gnu": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-arm64-musl": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-powerpc64le-gnu": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-riscv64-gnu": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-s390x-gnu": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-x64-gnu": "4.24.0",
|
||||||
|
"@rollup/rollup-linux-x64-musl": "4.24.0",
|
||||||
|
"@rollup/rollup-win32-arm64-msvc": "4.24.0",
|
||||||
|
"@rollup/rollup-win32-ia32-msvc": "4.24.0",
|
||||||
|
"@rollup/rollup-win32-x64-msvc": "4.24.0",
|
||||||
|
"fsevents": "~2.3.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/source-map": {
|
||||||
|
"version": "0.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
|
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/source-map-js": {
|
||||||
|
"version": "1.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
||||||
|
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/source-map-support": {
|
||||||
|
"version": "0.5.21",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||||
|
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"buffer-from": "^1.0.0",
|
||||||
|
"source-map": "^0.6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/terser": {
|
||||||
|
"version": "5.31.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/terser/-/terser-5.31.0.tgz",
|
||||||
|
"integrity": "sha512-Q1JFAoUKE5IMfI4Z/lkE/E6+SwgzO+x4tq4v1AyBLRj8VSYvRO6A/rQrPg1yud4g0En9EKI1TvFRF2tQFcoUkg==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/source-map": "^0.3.3",
|
||||||
|
"acorn": "^8.8.2",
|
||||||
|
"commander": "^2.20.0",
|
||||||
|
"source-map-support": "~0.5.20"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"terser": "bin/terser"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/typescript": {
|
||||||
|
"version": "5.6.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz",
|
||||||
|
"integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"bin": {
|
||||||
|
"tsc": "bin/tsc",
|
||||||
|
"tsserver": "bin/tsserver"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.17"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/undici-types": {
|
||||||
|
"version": "5.26.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||||
|
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
|
},
|
||||||
|
"node_modules/vite": {
|
||||||
|
"version": "5.4.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.9.tgz",
|
||||||
|
"integrity": "sha512-20OVpJHh0PAM0oSOELa5GaZNWeDjcAvQjGXy2Uyr+Tp+/D2/Hdz6NLgpJLsarPTA2QJ6v8mX2P1ZfbsSKvdMkg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"esbuild": "^0.21.3",
|
||||||
|
"postcss": "^8.4.43",
|
||||||
|
"rollup": "^4.20.0"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"vite": "bin/vite.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^18.0.0 || >=20.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/vitejs/vite?sponsor=1"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"fsevents": "~2.3.3"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@types/node": "^18.0.0 || >=20.0.0",
|
||||||
|
"less": "*",
|
||||||
|
"lightningcss": "^1.21.0",
|
||||||
|
"sass": "*",
|
||||||
|
"sass-embedded": "*",
|
||||||
|
"stylus": "*",
|
||||||
|
"sugarss": "*",
|
||||||
|
"terser": "^5.4.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@types/node": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"less": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"lightningcss": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"sass": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"sass-embedded": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"stylus": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"sugarss": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"terser": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
15
node_modules/.vite/deps/_metadata.json
generated
vendored
Normal file
15
node_modules/.vite/deps/_metadata.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"hash": "98f77b91",
|
||||||
|
"configHash": "4bc9e537",
|
||||||
|
"lockfileHash": "2d1ddbde",
|
||||||
|
"browserHash": "b405ef05",
|
||||||
|
"optimized": {
|
||||||
|
"phaser": {
|
||||||
|
"src": "../../phaser/dist/phaser.js",
|
||||||
|
"file": "phaser.js",
|
||||||
|
"fileHash": "0b1a0411",
|
||||||
|
"needsInterop": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"chunks": {}
|
||||||
|
}
|
3
node_modules/.vite/deps/package.json
generated
vendored
Normal file
3
node_modules/.vite/deps/package.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type": "module"
|
||||||
|
}
|
138736
node_modules/.vite/deps/phaser.js
generated
vendored
Normal file
138736
node_modules/.vite/deps/phaser.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
node_modules/.vite/deps/phaser.js.map
generated
vendored
Normal file
7
node_modules/.vite/deps/phaser.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@esbuild/linux-x64/README.md
generated
vendored
Normal file
3
node_modules/@esbuild/linux-x64/README.md
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# esbuild
|
||||||
|
|
||||||
|
This is the Linux 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details.
|
20
node_modules/@esbuild/linux-x64/package.json
generated
vendored
Normal file
20
node_modules/@esbuild/linux-x64/package.json
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "@esbuild/linux-x64",
|
||||||
|
"version": "0.21.5",
|
||||||
|
"description": "The Linux 64-bit binary for esbuild, a JavaScript bundler.",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/evanw/esbuild.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"preferUnplugged": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
]
|
||||||
|
}
|
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
# @jridgewell/gen-mapping
|
||||||
|
|
||||||
|
> Generate source maps
|
||||||
|
|
||||||
|
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||||
|
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||||
|
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||||
|
provides the same `addMapping` and `setSourceContent` API.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/gen-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
|
||||||
|
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 9 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toDecodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: [
|
||||||
|
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: 'AAAA,SAASA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Smaller Sourcemaps
|
||||||
|
|
||||||
|
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||||
|
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||||
|
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||||
|
skipped.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping();
|
||||||
|
|
||||||
|
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Adding a new source marking is useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||||
|
// generated column changed.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
names: [],
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: [null],
|
||||||
|
mappings: 'AAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 5852872 bytes
|
||||||
|
gen-mapping: addMapping 7716042 bytes
|
||||||
|
source-map-js 6143250 bytes
|
||||||
|
source-map-0.6.1 6124102 bytes
|
||||||
|
source-map-0.8.0 6121173 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||||
|
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||||
|
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||||
|
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 37578063 bytes
|
||||||
|
gen-mapping: addMapping 37212897 bytes
|
||||||
|
source-map-js 47638527 bytes
|
||||||
|
source-map-0.6.1 47690503 bytes
|
||||||
|
source-map-0.8.0 47470188 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addMapping
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||||
|
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||||
|
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||||
|
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||||
|
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 416247 bytes
|
||||||
|
gen-mapping: addMapping 419824 bytes
|
||||||
|
source-map-js 1024619 bytes
|
||||||
|
source-map-0.6.1 1146004 bytes
|
||||||
|
source-map-0.8.0 1113250 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||||
|
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||||
|
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||||
|
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||||
|
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 975096 bytes
|
||||||
|
gen-mapping: addMapping 1102981 bytes
|
||||||
|
source-map-js 2918836 bytes
|
||||||
|
source-map-0.6.1 2885435 bytes
|
||||||
|
source-map-0.8.0 2874336 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||||
|
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||||
|
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||||
|
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
import { SetArray, put, remove } from '@jridgewell/set-array';
|
||||||
|
import { encode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new SetArray();
|
||||||
|
this._sources = new SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this._ignoreList = new SetArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
}
|
||||||
|
function addMapping(map, mapping) {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
function setSourceContent(map, source, content) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||||
|
const index = put(sources, source);
|
||||||
|
sourcesContent[index] = content;
|
||||||
|
}
|
||||||
|
function setIgnore(map, source, ignore = true) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||||
|
const index = put(sources, source);
|
||||||
|
if (index === sourcesContent.length)
|
||||||
|
sourcesContent[index] = null;
|
||||||
|
if (ignore)
|
||||||
|
put(ignoreList, index);
|
||||||
|
else
|
||||||
|
remove(ignoreList, index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toDecodedMap(map) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: map.sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: ignoreList.array,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toEncodedMap(map) {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
function fromMap(input) {
|
||||||
|
const map = new TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(cast(gen)._names, map.names);
|
||||||
|
putAll(cast(gen)._sources, map.sources);
|
||||||
|
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
cast(gen)._mappings = decodedMappings(map);
|
||||||
|
if (map.ignoreList)
|
||||||
|
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
function allMappings(map) {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
}
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(setarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
put(setarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
|
||||||
|
//# sourceMappingURL=gen-mapping.mjs.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||||
|
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new setArray.SetArray();
|
||||||
|
this._sources = new setArray.SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this._ignoreList = new setArray.SetArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
}
|
||||||
|
function addMapping(map, mapping) {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
function setSourceContent(map, source, content) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||||
|
const index = setArray.put(sources, source);
|
||||||
|
sourcesContent[index] = content;
|
||||||
|
}
|
||||||
|
function setIgnore(map, source, ignore = true) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||||
|
const index = setArray.put(sources, source);
|
||||||
|
if (index === sourcesContent.length)
|
||||||
|
sourcesContent[index] = null;
|
||||||
|
if (ignore)
|
||||||
|
setArray.put(ignoreList, index);
|
||||||
|
else
|
||||||
|
setArray.remove(ignoreList, index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toDecodedMap(map) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: map.sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: ignoreList.array,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toEncodedMap(map) {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
function fromMap(input) {
|
||||||
|
const map = new traceMapping.TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(cast(gen)._names, map.names);
|
||||||
|
putAll(cast(gen)._sources, map.sources);
|
||||||
|
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
cast(gen)._mappings = traceMapping.decodedMappings(map);
|
||||||
|
if (map.ignoreList)
|
||||||
|
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
function allMappings(map) {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = setArray.put(sources, source);
|
||||||
|
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
}
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(setarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
setArray.put(setarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.GenMapping = GenMapping;
|
||||||
|
exports.addMapping = addMapping;
|
||||||
|
exports.addSegment = addSegment;
|
||||||
|
exports.allMappings = allMappings;
|
||||||
|
exports.fromMap = fromMap;
|
||||||
|
exports.maybeAddMapping = maybeAddMapping;
|
||||||
|
exports.maybeAddSegment = maybeAddSegment;
|
||||||
|
exports.setIgnore = setIgnore;
|
||||||
|
exports.setSourceContent = setSourceContent;
|
||||||
|
exports.toDecodedMap = toDecodedMap;
|
||||||
|
exports.toEncodedMap = toEncodedMap;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=gen-mapping.umd.js.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||||
|
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||||
|
export declare type Options = {
|
||||||
|
file?: string | null;
|
||||||
|
sourceRoot?: string | null;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
export declare class GenMapping {
|
||||||
|
private _names;
|
||||||
|
private _sources;
|
||||||
|
private _sourcesContent;
|
||||||
|
private _mappings;
|
||||||
|
private _ignoreList;
|
||||||
|
file: string | null | undefined;
|
||||||
|
sourceRoot: string | null | undefined;
|
||||||
|
constructor({ file, sourceRoot }?: Options);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source?: null;
|
||||||
|
original?: null;
|
||||||
|
name?: null;
|
||||||
|
content?: null;
|
||||||
|
}): void;
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name?: null;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare const maybeAddSegment: typeof addSegment;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare const maybeAddMapping: typeof addMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
||||||
|
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
export declare function fromMap(input: SourceMapInput): GenMapping;
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function allMappings(map: GenMapping): Mapping[];
|
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
declare type GeneratedColumn = number;
|
||||||
|
declare type SourcesIndex = number;
|
||||||
|
declare type SourceLine = number;
|
||||||
|
declare type SourceColumn = number;
|
||||||
|
declare type NamesIndex = number;
|
||||||
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export {};
|
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: readonly string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: readonly (string | null)[];
|
||||||
|
sourcesContent?: readonly (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList?: readonly number[];
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Pos {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
export declare type Mapping = {
|
||||||
|
generated: Pos;
|
||||||
|
source: undefined;
|
||||||
|
original: undefined;
|
||||||
|
name: undefined;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: undefined;
|
||||||
|
};
|
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/gen-mapping",
|
||||||
|
"version": "0.3.5",
|
||||||
|
"description": "Generate source maps",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||||
|
"main": "dist/gen-mapping.umd.js",
|
||||||
|
"module": "dist/gen-mapping.mjs",
|
||||||
|
"types": "dist/types/gen-mapping.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/gen-mapping.d.ts",
|
||||||
|
"browser": "./dist/gen-mapping.umd.js",
|
||||||
|
"require": "./dist/gen-mapping.umd.js",
|
||||||
|
"import": "./dist/gen-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/gen-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node benchmark/index.mjs",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.2",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||||
|
"@typescript-eslint/parser": "5.21.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.14.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"mocha": "9.2.2",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.6.2",
|
||||||
|
"rollup": "2.70.2",
|
||||||
|
"tsx": "4.7.1",
|
||||||
|
"typescript": "4.6.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/set-array": "^1.2.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.24"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# @jridgewell/resolve-uri
|
||||||
|
|
||||||
|
> Resolve a URI relative to an optional base URI
|
||||||
|
|
||||||
|
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/resolve-uri
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
function resolve(input: string, base?: string): string;
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
import resolve from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||||
|
```
|
||||||
|
|
||||||
|
| Input | Base | Resolution | Explanation |
|
||||||
|
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||||
|
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||||
|
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||||
|
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||||
|
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||||
|
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||||
|
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||||
|
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||||
|
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
|||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: 7 /* Absolute */,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = 6 /* SchemeRelative */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = 5 /* AbsolutePath */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? 3 /* Query */
|
||||||
|
: input.startsWith('#')
|
||||||
|
? 2 /* Hash */
|
||||||
|
: 4 /* RelativePath */
|
||||||
|
: 1 /* Empty */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= 4 /* RelativePath */;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== 7 /* Absolute */) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case 1 /* Empty */:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case 2 /* Hash */:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case 3 /* Query */:
|
||||||
|
case 4 /* RelativePath */:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case 6 /* SchemeRelative */:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case 2 /* Hash */:
|
||||||
|
case 3 /* Query */:
|
||||||
|
return queryHash;
|
||||||
|
case 4 /* RelativePath */: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { resolve as default };
|
||||||
|
//# sourceMappingURL=resolve-uri.mjs.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||||
|
typeof define === 'function' && define.amd ? define(factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||||
|
})(this, (function () { 'use strict';
|
||||||
|
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: 7 /* Absolute */,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = 6 /* SchemeRelative */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = 5 /* AbsolutePath */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? 3 /* Query */
|
||||||
|
: input.startsWith('#')
|
||||||
|
? 2 /* Hash */
|
||||||
|
: 4 /* RelativePath */
|
||||||
|
: 1 /* Empty */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= 4 /* RelativePath */;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== 7 /* Absolute */) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case 1 /* Empty */:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case 2 /* Hash */:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case 3 /* Query */:
|
||||||
|
case 4 /* RelativePath */:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case 6 /* SchemeRelative */:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case 2 /* Hash */:
|
||||||
|
case 3 /* Query */:
|
||||||
|
return queryHash;
|
||||||
|
case 4 /* RelativePath */: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=resolve-uri.umd.js.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/resolve-uri",
|
||||||
|
"version": "3.1.2",
|
||||||
|
"description": "Resolve a URI relative to an optional base URI",
|
||||||
|
"keywords": [
|
||||||
|
"resolve",
|
||||||
|
"uri",
|
||||||
|
"url",
|
||||||
|
"path"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||||
|
"main": "dist/resolve-uri.umd.js",
|
||||||
|
"module": "dist/resolve-uri.mjs",
|
||||||
|
"types": "dist/types/resolve-uri.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/resolve-uri.d.ts",
|
||||||
|
"browser": "./dist/resolve-uri.umd.js",
|
||||||
|
"require": "./dist/resolve-uri.umd.js",
|
||||||
|
"import": "./dist/resolve-uri.mjs"
|
||||||
|
},
|
||||||
|
"./dist/resolve-uri.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# @jridgewell/set-array
|
||||||
|
|
||||||
|
> Like a Set, but provides the index of the `key` in the backing array
|
||||||
|
|
||||||
|
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||||
|
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||||
|
are never duplicates.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/set-array
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||||
|
|
||||||
|
const sa = new SetArray();
|
||||||
|
|
||||||
|
let index = put(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
index = put(sa, 'second');
|
||||||
|
assert.strictEqual(index, 1);
|
||||||
|
|
||||||
|
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||||
|
|
||||||
|
index = get(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
pop(sa);
|
||||||
|
index = get(sa, 'second');
|
||||||
|
assert.strictEqual(index, undefined);
|
||||||
|
assert.deepEqual(sa.array, [ 'first' ]);
|
||||||
|
```
|
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(set) {
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
function get(setarr, key) {
|
||||||
|
return cast(setarr)._indexes[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
function put(setarr, key) {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
const length = array.push(key);
|
||||||
|
return (indexes[key] = length - 1);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
function pop(setarr) {
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
function remove(setarr, key) {
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index === undefined)
|
||||||
|
return;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
for (let i = index + 1; i < array.length; i++) {
|
||||||
|
const k = array[i];
|
||||||
|
array[i - 1] = k;
|
||||||
|
indexes[k]--;
|
||||||
|
}
|
||||||
|
indexes[key] = undefined;
|
||||||
|
array.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SetArray, get, pop, put, remove };
|
||||||
|
//# sourceMappingURL=set-array.mjs.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}
|
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(set) {
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
function get(setarr, key) {
|
||||||
|
return cast(setarr)._indexes[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
function put(setarr, key) {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
const length = array.push(key);
|
||||||
|
return (indexes[key] = length - 1);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
function pop(setarr) {
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
function remove(setarr, key) {
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index === undefined)
|
||||||
|
return;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
for (let i = index + 1; i < array.length; i++) {
|
||||||
|
const k = array[i];
|
||||||
|
array[i - 1] = k;
|
||||||
|
indexes[k]--;
|
||||||
|
}
|
||||||
|
indexes[key] = undefined;
|
||||||
|
array.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SetArray = SetArray;
|
||||||
|
exports.get = get;
|
||||||
|
exports.pop = pop;
|
||||||
|
exports.put = put;
|
||||||
|
exports.remove = remove;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=set-array.umd.js.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}
|
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
declare type Key = string | number | symbol;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
export declare class SetArray<T extends Key = Key> {
|
||||||
|
private _indexes;
|
||||||
|
array: readonly T[];
|
||||||
|
constructor();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
||||||
|
export {};
|
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/set-array",
|
||||||
|
"version": "1.2.1",
|
||||||
|
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||||
|
"keywords": [],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/set-array",
|
||||||
|
"main": "dist/set-array.umd.js",
|
||||||
|
"module": "dist/set-array.mjs",
|
||||||
|
"typings": "dist/types/set-array.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/set-array.d.ts",
|
||||||
|
"browser": "./dist/set-array.umd.js",
|
||||||
|
"require": "./dist/set-array.umd.js",
|
||||||
|
"import": "./dist/set-array.mjs"
|
||||||
|
},
|
||||||
|
"./dist/set-array.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"tsx": "4.7.1",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
# @jridgewell/source-map
|
||||||
|
|
||||||
|
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
|
||||||
|
|
||||||
|
This isn't the full API, but it's the core functionality. This wraps
|
||||||
|
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
|
||||||
|
implementations.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/source-map
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
### SourceMapConsumer
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapConsumer } from '@jridgewell/source-map';
|
||||||
|
const smc = new SourceMapConsumer({
|
||||||
|
version: 3,
|
||||||
|
names: ['foo'],
|
||||||
|
sources: ['input.js'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.fromSourceMap(mapGenerator[, mapUrl])
|
||||||
|
|
||||||
|
Transforms a `SourceMapGenerator` into a `SourceMapConsumer`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
|
||||||
|
const smc = SourceMapConsumer.fromSourceMap(map);
|
||||||
|
smc.originalPositionFor({ line: 1, column: 0 });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.originalPositionFor({ line: 1, column: 0 });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.mappings
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.mappings; // AAAA
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.allGeneratedpositionsfor({ line: 1, column: 5, source: "baz.ts" });
|
||||||
|
// [
|
||||||
|
// { line: 2, column: 8 }
|
||||||
|
// ]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.eachMapping(callback[, context[, order]])
|
||||||
|
|
||||||
|
> This implementation currently does not support the "order" parameter.
|
||||||
|
> This function can only iterate in Generated order.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.eachMapping((mapping) => {
|
||||||
|
// { source: 'baz.ts',
|
||||||
|
// generatedLine: 4,
|
||||||
|
// generatedColumn: 5,
|
||||||
|
// originalLine: 4,
|
||||||
|
// originalColumn: 5,
|
||||||
|
// name: null }
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.generatedPositionFor({ line: 1, column: 5, source: "baz.ts" });
|
||||||
|
// { line: 2, column: 8 }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.hasContentsOfAllSources();
|
||||||
|
// true
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.generatedPositionFor("baz.ts");
|
||||||
|
// "export default ..."
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.version
|
||||||
|
|
||||||
|
Returns the source map's version
|
||||||
|
|
||||||
|
### SourceMapGenerator
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapGenerator } from '@jridgewell/source-map';
|
||||||
|
const smg = new SourceMapGenerator({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.fromSourceMap(map)
|
||||||
|
|
||||||
|
Transform a `SourceMapConsumer` into a `SourceMapGenerator`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer();
|
||||||
|
const smg = SourceMapGenerator.fromSourceMap(smc);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||||
|
|
||||||
|
> This method is not implemented yet
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.setSourceContent('input.js', 'foobar');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toJSON()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toString()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toJSON(); // "{version:3,names:[],sources:[],mappings:''}"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toDecodedMap()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
|
||||||
|
```
|
||||||
|
|
||||||
|
## Known differences with other implementations
|
||||||
|
|
||||||
|
This implementation has some differences with `source-map` and `source-map-js`.
|
||||||
|
|
||||||
|
- `SourceMapConsumer.prototype.eachMapping()`
|
||||||
|
- Does not support the `order` argument
|
||||||
|
- `SourceMapGenerator.prototype.applySourceMap()`
|
||||||
|
- Not implemented
|
||||||
|
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping/
|
||||||
|
[gen-mapping]: https://github.com/jridgewell/gen-mapping/
|
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
var traceMapping = require('@jridgewell/trace-mapping');
|
||||||
|
var genMapping = require('@jridgewell/gen-mapping');
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new traceMapping.AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
this.version = trace.version;
|
||||||
|
}
|
||||||
|
static fromSourceMap(map, mapUrl) {
|
||||||
|
// This is more performant if we receive
|
||||||
|
// a @jridgewell/source-map SourceMapGenerator
|
||||||
|
if (map.toDecodedMap) {
|
||||||
|
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||||
|
}
|
||||||
|
// This is a fallback for `source-map` and `source-map-js`
|
||||||
|
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||||
|
}
|
||||||
|
get mappings() {
|
||||||
|
return traceMapping.encodedMappings(this._map);
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return traceMapping.originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
generatedPositionFor(originalPosition) {
|
||||||
|
return traceMapping.generatedPositionFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
allGeneratedPositionsFor(originalPosition) {
|
||||||
|
return traceMapping.allGeneratedPositionsFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
hasContentsOfAllSources() {
|
||||||
|
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (const content of this.sourcesContent) {
|
||||||
|
if (content == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
sourceContentFor(source, nullOnMissing) {
|
||||||
|
const sourceContent = traceMapping.sourceContentFor(this._map, source);
|
||||||
|
if (sourceContent != null) {
|
||||||
|
return sourceContent;
|
||||||
|
}
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||||
|
}
|
||||||
|
eachMapping(callback, context /*, order?: number*/) {
|
||||||
|
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||||
|
traceMapping.eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
// TODO :: should this be duck-typed ?
|
||||||
|
this._map = opts instanceof genMapping.GenMapping ? opts : new genMapping.GenMapping(opts);
|
||||||
|
}
|
||||||
|
static fromSourceMap(consumer) {
|
||||||
|
return new SourceMapGenerator(genMapping.fromMap(consumer));
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
genMapping.maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
genMapping.setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return genMapping.toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this.toJSON());
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return genMapping.toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SourceMapConsumer = SourceMapConsumer;
|
||||||
|
exports.SourceMapGenerator = SourceMapGenerator;
|
||||||
|
//# sourceMappingURL=source-map.cjs.map
|
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import { AnyMap, encodedMappings, originalPositionFor, generatedPositionFor, allGeneratedPositionsFor, sourceContentFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, fromMap, maybeAddMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
this.version = trace.version;
|
||||||
|
}
|
||||||
|
static fromSourceMap(map, mapUrl) {
|
||||||
|
// This is more performant if we receive
|
||||||
|
// a @jridgewell/source-map SourceMapGenerator
|
||||||
|
if (map.toDecodedMap) {
|
||||||
|
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||||
|
}
|
||||||
|
// This is a fallback for `source-map` and `source-map-js`
|
||||||
|
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||||
|
}
|
||||||
|
get mappings() {
|
||||||
|
return encodedMappings(this._map);
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
generatedPositionFor(originalPosition) {
|
||||||
|
return generatedPositionFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
allGeneratedPositionsFor(originalPosition) {
|
||||||
|
return allGeneratedPositionsFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
hasContentsOfAllSources() {
|
||||||
|
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (const content of this.sourcesContent) {
|
||||||
|
if (content == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
sourceContentFor(source, nullOnMissing) {
|
||||||
|
const sourceContent = sourceContentFor(this._map, source);
|
||||||
|
if (sourceContent != null) {
|
||||||
|
return sourceContent;
|
||||||
|
}
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||||
|
}
|
||||||
|
eachMapping(callback, context /*, order?: number*/) {
|
||||||
|
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||||
|
eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
// TODO :: should this be duck-typed ?
|
||||||
|
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
|
||||||
|
}
|
||||||
|
static fromSourceMap(consumer) {
|
||||||
|
return new SourceMapGenerator(fromMap(consumer));
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this.toJSON());
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SourceMapConsumer, SourceMapGenerator };
|
||||||
|
//# sourceMappingURL=source-map.mjs.map
|
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
|
||||||
|
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
|
||||||
|
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
|
||||||
|
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
|
||||||
|
export type { Mapping, EncodedSourceMap };
|
||||||
|
export declare class SourceMapConsumer {
|
||||||
|
private _map;
|
||||||
|
file: TraceMap['file'];
|
||||||
|
names: TraceMap['names'];
|
||||||
|
sourceRoot: TraceMap['sourceRoot'];
|
||||||
|
sources: TraceMap['sources'];
|
||||||
|
sourcesContent: TraceMap['sourcesContent'];
|
||||||
|
version: TraceMap['version'];
|
||||||
|
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl: Parameters<typeof AnyMap>[1]);
|
||||||
|
static fromSourceMap(map: SourceMapGenerator, mapUrl: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
|
||||||
|
get mappings(): string;
|
||||||
|
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
|
||||||
|
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
|
||||||
|
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
|
||||||
|
hasContentsOfAllSources(): boolean;
|
||||||
|
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
|
||||||
|
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
|
||||||
|
destroy(): void;
|
||||||
|
}
|
||||||
|
export declare class SourceMapGenerator {
|
||||||
|
private _map;
|
||||||
|
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
|
||||||
|
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
|
||||||
|
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
|
||||||
|
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
|
||||||
|
toJSON(): ReturnType<typeof toEncodedMap>;
|
||||||
|
toString(): string;
|
||||||
|
toDecodedMap(): ReturnType<typeof toDecodedMap>;
|
||||||
|
}
|
71
node_modules/@jridgewell/source-map/package.json
generated
vendored
Normal file
71
node_modules/@jridgewell/source-map/package.json
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/source-map",
|
||||||
|
"version": "0.3.6",
|
||||||
|
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API",
|
||||||
|
"keywords": [
|
||||||
|
"sourcemap",
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/source-map",
|
||||||
|
"main": "dist/source-map.cjs",
|
||||||
|
"module": "dist/source-map.mjs",
|
||||||
|
"types": "dist/types/source-map.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/source-map.d.ts",
|
||||||
|
"browser": "./dist/source-map.umd.js",
|
||||||
|
"require": "./dist/source-map.cjs",
|
||||||
|
"import": "./dist/source-map.mjs"
|
||||||
|
},
|
||||||
|
"./dist/source-map.cjs"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "ts-mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "ts-mocha",
|
||||||
|
"test:coverage": "c8 --reporter text --reporter html ts-mocha",
|
||||||
|
"test:watch": "ts-mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-node-resolve": "13.2.1",
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.30",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "10.0.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"ts-mocha": "10.0.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.3.5",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.25"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2015 Rich Harris
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
# @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||||
|
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||||
|
|
||||||
|
This package makes the process slightly easier.
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @jridgewell/sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
|
||||||
|
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
|
||||||
|
assert.deepEqual( decoded, [
|
||||||
|
// the first line (of the generated code) has no mappings,
|
||||||
|
// as shown by the starting semi-colon (which separates lines)
|
||||||
|
[],
|
||||||
|
|
||||||
|
// the second line contains four (comma-separated) segments
|
||||||
|
[
|
||||||
|
// segments are encoded as you'd expect:
|
||||||
|
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||||
|
|
||||||
|
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||||
|
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||||
|
// name in the `map.names` array
|
||||||
|
[ 2, 0, 2, 2, 0 ],
|
||||||
|
|
||||||
|
// the remaining segments are 4-length rather than 5-length,
|
||||||
|
// because they don't map a name
|
||||||
|
[ 4, 0, 2, 4 ],
|
||||||
|
[ 6, 0, 2, 5 ],
|
||||||
|
[ 7, 0, 2, 7 ]
|
||||||
|
],
|
||||||
|
|
||||||
|
// the final line contains two segments
|
||||||
|
[
|
||||||
|
[ 2, 1, 10, 19 ],
|
||||||
|
[ 12, 1, 11, 20 ]
|
||||||
|
]
|
||||||
|
]);
|
||||||
|
|
||||||
|
var encoded = encode( decoded );
|
||||||
|
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 5479160 bytes
|
||||||
|
sourcemap-codec 5659336 bytes
|
||||||
|
source-map-0.6.1 17144440 bytes
|
||||||
|
source-map-0.8.0 6867424 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
|
||||||
|
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 1261620 bytes
|
||||||
|
sourcemap-codec 9119248 bytes
|
||||||
|
source-map-0.6.1 8968560 bytes
|
||||||
|
source-map-0.8.0 8952952 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 35338184 bytes
|
||||||
|
sourcemap-codec 35922736 bytes
|
||||||
|
source-map-0.6.1 62366360 bytes
|
||||||
|
source-map-0.8.0 44337416 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
|
||||||
|
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
|
||||||
|
Fastest is decode: source-map-0.8.0
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 7212604 bytes
|
||||||
|
sourcemap-codec 21421456 bytes
|
||||||
|
source-map-0.6.1 25286888 bytes
|
||||||
|
source-map-0.8.0 25498744 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
|
||||||
|
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 500272 bytes
|
||||||
|
sourcemap-codec 516864 bytes
|
||||||
|
source-map-0.6.1 1596672 bytes
|
||||||
|
source-map-0.8.0 517272 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
|
||||||
|
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 321026 bytes
|
||||||
|
sourcemap-codec 830832 bytes
|
||||||
|
source-map-0.6.1 586608 bytes
|
||||||
|
source-map-0.8.0 586680 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
|
||||||
|
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 734848 bytes
|
||||||
|
sourcemap-codec 954200 bytes
|
||||||
|
source-map-0.6.1 2276432 bytes
|
||||||
|
source-map-0.8.0 955488 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
|
||||||
|
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 638672 bytes
|
||||||
|
sourcemap-codec 1109840 bytes
|
||||||
|
source-map-0.6.1 1321224 bytes
|
||||||
|
source-map-0.8.0 1324448 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
MIT
|
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { decode, encode };
|
||||||
|
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.decode = decode;
|
||||||
|
exports.encode = encode;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||||
|
export declare type SourceMapLine = SourceMapSegment[];
|
||||||
|
export declare type SourceMapMappings = SourceMapLine[];
|
||||||
|
export declare function decode(mappings: string): SourceMapMappings;
|
||||||
|
export declare function encode(decoded: SourceMapMappings): string;
|
||||||
|
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/sourcemap-codec",
|
||||||
|
"version": "1.4.15",
|
||||||
|
"description": "Encode/decode sourcemap mappings",
|
||||||
|
"keywords": [
|
||||||
|
"sourcemap",
|
||||||
|
"vlq"
|
||||||
|
],
|
||||||
|
"main": "dist/sourcemap-codec.umd.js",
|
||||||
|
"module": "dist/sourcemap-codec.mjs",
|
||||||
|
"types": "dist/types/sourcemap-codec.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||||
|
"browser": "./dist/sourcemap-codec.umd.js",
|
||||||
|
"require": "./dist/sourcemap-codec.umd.js",
|
||||||
|
"import": "./dist/sourcemap-codec.mjs"
|
||||||
|
},
|
||||||
|
"./dist/sourcemap-codec.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||||
|
},
|
||||||
|
"author": "Rich Harris",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/node": "17.0.15",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.64.0",
|
||||||
|
"source-map": "0.6.1",
|
||||||
|
"source-map-js": "1.0.2",
|
||||||
|
"sourcemap-codec": "1.4.8",
|
||||||
|
"typescript": "4.5.4"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
TraceMap,
|
||||||
|
originalPositionFor,
|
||||||
|
generatedPositionFor,
|
||||||
|
sourceContentFor,
|
||||||
|
isIgnored,
|
||||||
|
} from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['content of input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
ignoreList: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = sourceContentFor(tracer, traced.source);
|
||||||
|
assert.strictEqual(content, 'content for input.js');
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ignored = isIgnored(tracer, 'input.js');
|
||||||
|
assert.equal(ignored, false);
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 562400 bytes
|
||||||
|
trace-mapping encoded 5706544 bytes
|
||||||
|
source-map-js 10717664 bytes
|
||||||
|
source-map-0.6.1 17446384 bytes
|
||||||
|
source-map-0.8.0 9701757 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 89832 bytes
|
||||||
|
trace-mapping encoded 35474640 bytes
|
||||||
|
source-map-js 51257176 bytes
|
||||||
|
source-map-0.6.1 63515664 bytes
|
||||||
|
source-map-0.8.0 42933752 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 37128 bytes
|
||||||
|
trace-mapping encoded 247280 bytes
|
||||||
|
source-map-js 1143536 bytes
|
||||||
|
source-map-0.6.1 1290992 bytes
|
||||||
|
source-map-0.8.0 96544 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 16176 bytes
|
||||||
|
trace-mapping encoded 681552 bytes
|
||||||
|
source-map-js 2418352 bytes
|
||||||
|
source-map-0.6.1 2443672 bytes
|
||||||
|
source-map-0.8.0 111768 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,580 @@
|
|||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,600 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.AnyMap = AnyMap;
|
||||||
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
|
exports.TraceMap = TraceMap;
|
||||||
|
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||||
|
exports.decodedMap = decodedMap;
|
||||||
|
exports.decodedMappings = decodedMappings;
|
||||||
|
exports.eachMapping = eachMapping;
|
||||||
|
exports.encodedMap = encodedMap;
|
||||||
|
exports.encodedMappings = encodedMappings;
|
||||||
|
exports.generatedPositionFor = generatedPositionFor;
|
||||||
|
exports.isIgnored = isIgnored;
|
||||||
|
exports.originalPositionFor = originalPositionFor;
|
||||||
|
exports.presortedDecodedMap = presortedDecodedMap;
|
||||||
|
exports.sourceContentFor = sourceContentFor;
|
||||||
|
exports.traceSegment = traceSegment;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import { TraceMap } from './trace-mapping';
|
||||||
|
import type { SectionedSourceMapInput } from './types';
|
||||||
|
type AnyMap = {
|
||||||
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
};
|
||||||
|
export declare const AnyMap: AnyMap;
|
||||||
|
export {};
|
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
|
export type MemoState = {
|
||||||
|
lastKey: number;
|
||||||
|
lastNeedle: number;
|
||||||
|
lastIndex: number;
|
||||||
|
};
|
||||||
|
export declare let found: boolean;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||||
|
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function memoizedState(): MemoState;
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { MemoState } from './binary-search';
|
||||||
|
export type Source = {
|
||||||
|
__proto__: null;
|
||||||
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
|
};
|
||||||
|
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
type GeneratedColumn = number;
|
||||||
|
type SourcesIndex = number;
|
||||||
|
type SourceLine = number;
|
||||||
|
type SourceColumn = number;
|
||||||
|
type NamesIndex = number;
|
||||||
|
type GeneratedLine = number;
|
||||||
|
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export declare const REV_GENERATED_LINE = 1;
|
||||||
|
export declare const REV_GENERATED_COLUMN = 2;
|
||||||
|
export {};
|
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
export default function stripFilename(path: string | undefined | null): string;
|
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||||
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
export { AnyMap } from './any-map';
|
||||||
|
export declare class TraceMap implements SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
resolvedSources: string[];
|
||||||
|
private _encoded;
|
||||||
|
private _decoded;
|
||||||
|
private _decodedMemo;
|
||||||
|
private _bySources;
|
||||||
|
private _bySourceMemos;
|
||||||
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList?: number[];
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Section {
|
||||||
|
offset: {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||||
|
}
|
||||||
|
export interface SectionedSourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
sections: Section[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export type OriginalMapping = {
|
||||||
|
source: string | null;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export type InvalidOriginalMapping = {
|
||||||
|
source: null;
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
name: null;
|
||||||
|
};
|
||||||
|
export type GeneratedMapping = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
export type InvalidGeneratedMapping = {
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
};
|
||||||
|
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||||
|
export type XInput = {
|
||||||
|
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||||
|
};
|
||||||
|
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||||
|
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||||
|
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||||
|
sections: SectionXInput[];
|
||||||
|
};
|
||||||
|
export type SectionXInput = Omit<Section, 'map'> & {
|
||||||
|
map: SectionedSourceMapInput;
|
||||||
|
};
|
||||||
|
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||||
|
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||||
|
export type Needle = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type SourceNeedle = {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type EachMapping = {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: null;
|
||||||
|
originalLine: null;
|
||||||
|
originalColumn: null;
|
||||||
|
name: null;
|
||||||
|
} | {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: string | null;
|
||||||
|
originalLine: number;
|
||||||
|
originalColumn: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare abstract class SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: SourceMapV3['sources'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
}
|
77
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
77
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/trace-mapping",
|
||||||
|
"version": "0.3.25",
|
||||||
|
"description": "Trace the original position through a source map",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"main": "dist/trace-mapping.umd.js",
|
||||||
|
"module": "dist/trace-mapping.mjs",
|
||||||
|
"types": "dist/types/trace-mapping.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/trace-mapping.d.ts",
|
||||||
|
"browser": "./dist/trace-mapping.umd.js",
|
||||||
|
"require": "./dist/trace-mapping.umd.js",
|
||||||
|
"import": "./dist/trace-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/trace-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.mjs",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "11.1.6",
|
||||||
|
"@types/mocha": "10.0.6",
|
||||||
|
"@types/node": "20.11.20",
|
||||||
|
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||||
|
"@typescript-eslint/parser": "6.18.1",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "9.0.0",
|
||||||
|
"esbuild": "0.19.11",
|
||||||
|
"eslint": "8.56.0",
|
||||||
|
"eslint-config-prettier": "9.1.0",
|
||||||
|
"eslint-plugin-no-only-tests": "3.1.0",
|
||||||
|
"mocha": "10.3.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "3.1.1",
|
||||||
|
"rollup": "4.9.4",
|
||||||
|
"tsx": "4.7.0",
|
||||||
|
"typescript": "5.3.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.1.0",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||||
|
}
|
||||||
|
}
|
3
node_modules/@rollup/rollup-linux-x64-gnu/README.md
generated
vendored
Normal file
3
node_modules/@rollup/rollup-linux-x64-gnu/README.md
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# `@rollup/rollup-linux-x64-gnu`
|
||||||
|
|
||||||
|
This is the **x86_64-unknown-linux-gnu** binary for `rollup`
|
22
node_modules/@rollup/rollup-linux-x64-gnu/package.json
generated
vendored
Normal file
22
node_modules/@rollup/rollup-linux-x64-gnu/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"name": "@rollup/rollup-linux-x64-gnu",
|
||||||
|
"version": "4.24.0",
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"files": [
|
||||||
|
"rollup.linux-x64-gnu.node"
|
||||||
|
],
|
||||||
|
"description": "Native bindings for Rollup",
|
||||||
|
"author": "Lukas Taegert-Atkinson",
|
||||||
|
"homepage": "https://rollupjs.org/",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "rollup/rollup",
|
||||||
|
"libc": [
|
||||||
|
"glibc"
|
||||||
|
],
|
||||||
|
"main": "./rollup.linux-x64-gnu.node"
|
||||||
|
}
|
BIN
node_modules/@rollup/rollup-linux-x64-gnu/rollup.linux-x64-gnu.node
generated
vendored
Normal file
BIN
node_modules/@rollup/rollup-linux-x64-gnu/rollup.linux-x64-gnu.node
generated
vendored
Normal file
Binary file not shown.
3
node_modules/@rollup/rollup-linux-x64-musl/README.md
generated
vendored
Normal file
3
node_modules/@rollup/rollup-linux-x64-musl/README.md
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# `@rollup/rollup-linux-x64-musl`
|
||||||
|
|
||||||
|
This is the **x86_64-unknown-linux-musl** binary for `rollup`
|
22
node_modules/@rollup/rollup-linux-x64-musl/package.json
generated
vendored
Normal file
22
node_modules/@rollup/rollup-linux-x64-musl/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"name": "@rollup/rollup-linux-x64-musl",
|
||||||
|
"version": "4.24.0",
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"files": [
|
||||||
|
"rollup.linux-x64-musl.node"
|
||||||
|
],
|
||||||
|
"description": "Native bindings for Rollup",
|
||||||
|
"author": "Lukas Taegert-Atkinson",
|
||||||
|
"homepage": "https://rollupjs.org/",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "rollup/rollup",
|
||||||
|
"libc": [
|
||||||
|
"musl"
|
||||||
|
],
|
||||||
|
"main": "./rollup.linux-x64-musl.node"
|
||||||
|
}
|
BIN
node_modules/@rollup/rollup-linux-x64-musl/rollup.linux-x64-musl.node
generated
vendored
Normal file
BIN
node_modules/@rollup/rollup-linux-x64-musl/rollup.linux-x64-musl.node
generated
vendored
Normal file
Binary file not shown.
21
node_modules/@types/estree/LICENSE
generated
vendored
Normal file
21
node_modules/@types/estree/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
15
node_modules/@types/estree/README.md
generated
vendored
Normal file
15
node_modules/@types/estree/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Installation
|
||||||
|
> `npm install --save @types/estree`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for estree (https://github.com/estree/estree).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Wed, 18 Sep 2024 09:37:00 GMT
|
||||||
|
* Dependencies: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [RReverser](https://github.com/RReverser).
|
167
node_modules/@types/estree/flow.d.ts
generated
vendored
Normal file
167
node_modules/@types/estree/flow.d.ts
generated
vendored
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
declare namespace ESTree {
|
||||||
|
interface FlowTypeAnnotation extends Node {}
|
||||||
|
|
||||||
|
interface FlowBaseTypeAnnotation extends FlowTypeAnnotation {}
|
||||||
|
|
||||||
|
interface FlowLiteralTypeAnnotation extends FlowTypeAnnotation, Literal {}
|
||||||
|
|
||||||
|
interface FlowDeclaration extends Declaration {}
|
||||||
|
|
||||||
|
interface AnyTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
|
||||||
|
interface ArrayTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
elementType: FlowTypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BooleanLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {}
|
||||||
|
|
||||||
|
interface BooleanTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
|
||||||
|
interface ClassImplements extends Node {
|
||||||
|
id: Identifier;
|
||||||
|
typeParameters?: TypeParameterInstantiation | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ClassProperty {
|
||||||
|
key: Expression;
|
||||||
|
value?: Expression | null;
|
||||||
|
typeAnnotation?: TypeAnnotation | null;
|
||||||
|
computed: boolean;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeclareClass extends FlowDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
typeParameters?: TypeParameterDeclaration | null;
|
||||||
|
body: ObjectTypeAnnotation;
|
||||||
|
extends: InterfaceExtends[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeclareFunction extends FlowDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeclareModule extends FlowDeclaration {
|
||||||
|
id: Literal | Identifier;
|
||||||
|
body: BlockStatement;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeclareVariable extends FlowDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FunctionTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
params: FunctionTypeParam[];
|
||||||
|
returnType: FlowTypeAnnotation;
|
||||||
|
rest?: FunctionTypeParam | null;
|
||||||
|
typeParameters?: TypeParameterDeclaration | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FunctionTypeParam {
|
||||||
|
name: Identifier;
|
||||||
|
typeAnnotation: FlowTypeAnnotation;
|
||||||
|
optional: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GenericTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
id: Identifier | QualifiedTypeIdentifier;
|
||||||
|
typeParameters?: TypeParameterInstantiation | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InterfaceExtends extends Node {
|
||||||
|
id: Identifier | QualifiedTypeIdentifier;
|
||||||
|
typeParameters?: TypeParameterInstantiation | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InterfaceDeclaration extends FlowDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
typeParameters?: TypeParameterDeclaration | null;
|
||||||
|
extends: InterfaceExtends[];
|
||||||
|
body: ObjectTypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IntersectionTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
types: FlowTypeAnnotation[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface MixedTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
|
||||||
|
interface NullableTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
typeAnnotation: TypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface NumberLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {}
|
||||||
|
|
||||||
|
interface NumberTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
|
||||||
|
interface StringLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {}
|
||||||
|
|
||||||
|
interface StringTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
|
||||||
|
interface TupleTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
types: FlowTypeAnnotation[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeofTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
argument: FlowTypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeAlias extends FlowDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
typeParameters?: TypeParameterDeclaration | null;
|
||||||
|
right: FlowTypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeAnnotation extends Node {
|
||||||
|
typeAnnotation: FlowTypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeCastExpression extends Expression {
|
||||||
|
expression: Expression;
|
||||||
|
typeAnnotation: TypeAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeParameterDeclaration extends Node {
|
||||||
|
params: Identifier[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeParameterInstantiation extends Node {
|
||||||
|
params: FlowTypeAnnotation[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ObjectTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
properties: ObjectTypeProperty[];
|
||||||
|
indexers: ObjectTypeIndexer[];
|
||||||
|
callProperties: ObjectTypeCallProperty[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ObjectTypeCallProperty extends Node {
|
||||||
|
value: FunctionTypeAnnotation;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ObjectTypeIndexer extends Node {
|
||||||
|
id: Identifier;
|
||||||
|
key: FlowTypeAnnotation;
|
||||||
|
value: FlowTypeAnnotation;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ObjectTypeProperty extends Node {
|
||||||
|
key: Expression;
|
||||||
|
value: FlowTypeAnnotation;
|
||||||
|
optional: boolean;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface QualifiedTypeIdentifier extends Node {
|
||||||
|
qualification: Identifier | QualifiedTypeIdentifier;
|
||||||
|
id: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UnionTypeAnnotation extends FlowTypeAnnotation {
|
||||||
|
types: FlowTypeAnnotation[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface VoidTypeAnnotation extends FlowBaseTypeAnnotation {}
|
||||||
|
}
|
684
node_modules/@types/estree/index.d.ts
generated
vendored
Normal file
684
node_modules/@types/estree/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,684 @@
|
|||||||
|
// This definition file follows a somewhat unusual format. ESTree allows
|
||||||
|
// runtime type checks based on the `type` parameter. In order to explain this
|
||||||
|
// to typescript we want to use discriminated union types:
|
||||||
|
// https://github.com/Microsoft/TypeScript/pull/9163
|
||||||
|
//
|
||||||
|
// For ESTree this is a bit tricky because the high level interfaces like
|
||||||
|
// Node or Function are pulling double duty. We want to pass common fields down
|
||||||
|
// to the interfaces that extend them (like Identifier or
|
||||||
|
// ArrowFunctionExpression), but you can't extend a type union or enforce
|
||||||
|
// common fields on them. So we've split the high level interfaces into two
|
||||||
|
// types, a base type which passes down inherited fields, and a type union of
|
||||||
|
// all types which extend the base type. Only the type union is exported, and
|
||||||
|
// the union is how other types refer to the collection of inheriting types.
|
||||||
|
//
|
||||||
|
// This makes the definitions file here somewhat more difficult to maintain,
|
||||||
|
// but it has the notable advantage of making ESTree much easier to use as
|
||||||
|
// an end user.
|
||||||
|
|
||||||
|
export interface BaseNodeWithoutComments {
|
||||||
|
// Every leaf interface that extends BaseNode must specify a type property.
|
||||||
|
// The type property should be a string literal. For example, Identifier
|
||||||
|
// has: `type: "Identifier"`
|
||||||
|
type: string;
|
||||||
|
loc?: SourceLocation | null | undefined;
|
||||||
|
range?: [number, number] | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BaseNode extends BaseNodeWithoutComments {
|
||||||
|
leadingComments?: Comment[] | undefined;
|
||||||
|
trailingComments?: Comment[] | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NodeMap {
|
||||||
|
AssignmentProperty: AssignmentProperty;
|
||||||
|
CatchClause: CatchClause;
|
||||||
|
Class: Class;
|
||||||
|
ClassBody: ClassBody;
|
||||||
|
Expression: Expression;
|
||||||
|
Function: Function;
|
||||||
|
Identifier: Identifier;
|
||||||
|
Literal: Literal;
|
||||||
|
MethodDefinition: MethodDefinition;
|
||||||
|
ModuleDeclaration: ModuleDeclaration;
|
||||||
|
ModuleSpecifier: ModuleSpecifier;
|
||||||
|
Pattern: Pattern;
|
||||||
|
PrivateIdentifier: PrivateIdentifier;
|
||||||
|
Program: Program;
|
||||||
|
Property: Property;
|
||||||
|
PropertyDefinition: PropertyDefinition;
|
||||||
|
SpreadElement: SpreadElement;
|
||||||
|
Statement: Statement;
|
||||||
|
Super: Super;
|
||||||
|
SwitchCase: SwitchCase;
|
||||||
|
TemplateElement: TemplateElement;
|
||||||
|
VariableDeclarator: VariableDeclarator;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Node = NodeMap[keyof NodeMap];
|
||||||
|
|
||||||
|
export interface Comment extends BaseNodeWithoutComments {
|
||||||
|
type: "Line" | "Block";
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SourceLocation {
|
||||||
|
source?: string | null | undefined;
|
||||||
|
start: Position;
|
||||||
|
end: Position;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Position {
|
||||||
|
/** >= 1 */
|
||||||
|
line: number;
|
||||||
|
/** >= 0 */
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Program extends BaseNode {
|
||||||
|
type: "Program";
|
||||||
|
sourceType: "script" | "module";
|
||||||
|
body: Array<Directive | Statement | ModuleDeclaration>;
|
||||||
|
comments?: Comment[] | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Directive extends BaseNode {
|
||||||
|
type: "ExpressionStatement";
|
||||||
|
expression: Literal;
|
||||||
|
directive: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BaseFunction extends BaseNode {
|
||||||
|
params: Pattern[];
|
||||||
|
generator?: boolean | undefined;
|
||||||
|
async?: boolean | undefined;
|
||||||
|
// The body is either BlockStatement or Expression because arrow functions
|
||||||
|
// can have a body that's either. FunctionDeclarations and
|
||||||
|
// FunctionExpressions have only BlockStatement bodies.
|
||||||
|
body: BlockStatement | Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Function = FunctionDeclaration | FunctionExpression | ArrowFunctionExpression;
|
||||||
|
|
||||||
|
export type Statement =
|
||||||
|
| ExpressionStatement
|
||||||
|
| BlockStatement
|
||||||
|
| StaticBlock
|
||||||
|
| EmptyStatement
|
||||||
|
| DebuggerStatement
|
||||||
|
| WithStatement
|
||||||
|
| ReturnStatement
|
||||||
|
| LabeledStatement
|
||||||
|
| BreakStatement
|
||||||
|
| ContinueStatement
|
||||||
|
| IfStatement
|
||||||
|
| SwitchStatement
|
||||||
|
| ThrowStatement
|
||||||
|
| TryStatement
|
||||||
|
| WhileStatement
|
||||||
|
| DoWhileStatement
|
||||||
|
| ForStatement
|
||||||
|
| ForInStatement
|
||||||
|
| ForOfStatement
|
||||||
|
| Declaration;
|
||||||
|
|
||||||
|
export interface BaseStatement extends BaseNode {}
|
||||||
|
|
||||||
|
export interface EmptyStatement extends BaseStatement {
|
||||||
|
type: "EmptyStatement";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BlockStatement extends BaseStatement {
|
||||||
|
type: "BlockStatement";
|
||||||
|
body: Statement[];
|
||||||
|
innerComments?: Comment[] | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StaticBlock extends Omit<BlockStatement, "type"> {
|
||||||
|
type: "StaticBlock";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExpressionStatement extends BaseStatement {
|
||||||
|
type: "ExpressionStatement";
|
||||||
|
expression: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IfStatement extends BaseStatement {
|
||||||
|
type: "IfStatement";
|
||||||
|
test: Expression;
|
||||||
|
consequent: Statement;
|
||||||
|
alternate?: Statement | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LabeledStatement extends BaseStatement {
|
||||||
|
type: "LabeledStatement";
|
||||||
|
label: Identifier;
|
||||||
|
body: Statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BreakStatement extends BaseStatement {
|
||||||
|
type: "BreakStatement";
|
||||||
|
label?: Identifier | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ContinueStatement extends BaseStatement {
|
||||||
|
type: "ContinueStatement";
|
||||||
|
label?: Identifier | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WithStatement extends BaseStatement {
|
||||||
|
type: "WithStatement";
|
||||||
|
object: Expression;
|
||||||
|
body: Statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SwitchStatement extends BaseStatement {
|
||||||
|
type: "SwitchStatement";
|
||||||
|
discriminant: Expression;
|
||||||
|
cases: SwitchCase[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReturnStatement extends BaseStatement {
|
||||||
|
type: "ReturnStatement";
|
||||||
|
argument?: Expression | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThrowStatement extends BaseStatement {
|
||||||
|
type: "ThrowStatement";
|
||||||
|
argument: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TryStatement extends BaseStatement {
|
||||||
|
type: "TryStatement";
|
||||||
|
block: BlockStatement;
|
||||||
|
handler?: CatchClause | null | undefined;
|
||||||
|
finalizer?: BlockStatement | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WhileStatement extends BaseStatement {
|
||||||
|
type: "WhileStatement";
|
||||||
|
test: Expression;
|
||||||
|
body: Statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DoWhileStatement extends BaseStatement {
|
||||||
|
type: "DoWhileStatement";
|
||||||
|
body: Statement;
|
||||||
|
test: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForStatement extends BaseStatement {
|
||||||
|
type: "ForStatement";
|
||||||
|
init?: VariableDeclaration | Expression | null | undefined;
|
||||||
|
test?: Expression | null | undefined;
|
||||||
|
update?: Expression | null | undefined;
|
||||||
|
body: Statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BaseForXStatement extends BaseStatement {
|
||||||
|
left: VariableDeclaration | Pattern;
|
||||||
|
right: Expression;
|
||||||
|
body: Statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForInStatement extends BaseForXStatement {
|
||||||
|
type: "ForInStatement";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DebuggerStatement extends BaseStatement {
|
||||||
|
type: "DebuggerStatement";
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Declaration = FunctionDeclaration | VariableDeclaration | ClassDeclaration;
|
||||||
|
|
||||||
|
export interface BaseDeclaration extends BaseStatement {}
|
||||||
|
|
||||||
|
export interface MaybeNamedFunctionDeclaration extends BaseFunction, BaseDeclaration {
|
||||||
|
type: "FunctionDeclaration";
|
||||||
|
/** It is null when a function declaration is a part of the `export default function` statement */
|
||||||
|
id: Identifier | null;
|
||||||
|
body: BlockStatement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionDeclaration extends MaybeNamedFunctionDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclaration extends BaseDeclaration {
|
||||||
|
type: "VariableDeclaration";
|
||||||
|
declarations: VariableDeclarator[];
|
||||||
|
kind: "var" | "let" | "const";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclarator extends BaseNode {
|
||||||
|
type: "VariableDeclarator";
|
||||||
|
id: Pattern;
|
||||||
|
init?: Expression | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExpressionMap {
|
||||||
|
ArrayExpression: ArrayExpression;
|
||||||
|
ArrowFunctionExpression: ArrowFunctionExpression;
|
||||||
|
AssignmentExpression: AssignmentExpression;
|
||||||
|
AwaitExpression: AwaitExpression;
|
||||||
|
BinaryExpression: BinaryExpression;
|
||||||
|
CallExpression: CallExpression;
|
||||||
|
ChainExpression: ChainExpression;
|
||||||
|
ClassExpression: ClassExpression;
|
||||||
|
ConditionalExpression: ConditionalExpression;
|
||||||
|
FunctionExpression: FunctionExpression;
|
||||||
|
Identifier: Identifier;
|
||||||
|
ImportExpression: ImportExpression;
|
||||||
|
Literal: Literal;
|
||||||
|
LogicalExpression: LogicalExpression;
|
||||||
|
MemberExpression: MemberExpression;
|
||||||
|
MetaProperty: MetaProperty;
|
||||||
|
NewExpression: NewExpression;
|
||||||
|
ObjectExpression: ObjectExpression;
|
||||||
|
SequenceExpression: SequenceExpression;
|
||||||
|
TaggedTemplateExpression: TaggedTemplateExpression;
|
||||||
|
TemplateLiteral: TemplateLiteral;
|
||||||
|
ThisExpression: ThisExpression;
|
||||||
|
UnaryExpression: UnaryExpression;
|
||||||
|
UpdateExpression: UpdateExpression;
|
||||||
|
YieldExpression: YieldExpression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Expression = ExpressionMap[keyof ExpressionMap];
|
||||||
|
|
||||||
|
export interface BaseExpression extends BaseNode {}
|
||||||
|
|
||||||
|
export type ChainElement = SimpleCallExpression | MemberExpression;
|
||||||
|
|
||||||
|
export interface ChainExpression extends BaseExpression {
|
||||||
|
type: "ChainExpression";
|
||||||
|
expression: ChainElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThisExpression extends BaseExpression {
|
||||||
|
type: "ThisExpression";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayExpression extends BaseExpression {
|
||||||
|
type: "ArrayExpression";
|
||||||
|
elements: Array<Expression | SpreadElement | null>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectExpression extends BaseExpression {
|
||||||
|
type: "ObjectExpression";
|
||||||
|
properties: Array<Property | SpreadElement>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PrivateIdentifier extends BaseNode {
|
||||||
|
type: "PrivateIdentifier";
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Property extends BaseNode {
|
||||||
|
type: "Property";
|
||||||
|
key: Expression | PrivateIdentifier;
|
||||||
|
value: Expression | Pattern; // Could be an AssignmentProperty
|
||||||
|
kind: "init" | "get" | "set";
|
||||||
|
method: boolean;
|
||||||
|
shorthand: boolean;
|
||||||
|
computed: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PropertyDefinition extends BaseNode {
|
||||||
|
type: "PropertyDefinition";
|
||||||
|
key: Expression | PrivateIdentifier;
|
||||||
|
value?: Expression | null | undefined;
|
||||||
|
computed: boolean;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionExpression extends BaseFunction, BaseExpression {
|
||||||
|
id?: Identifier | null | undefined;
|
||||||
|
type: "FunctionExpression";
|
||||||
|
body: BlockStatement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SequenceExpression extends BaseExpression {
|
||||||
|
type: "SequenceExpression";
|
||||||
|
expressions: Expression[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UnaryExpression extends BaseExpression {
|
||||||
|
type: "UnaryExpression";
|
||||||
|
operator: UnaryOperator;
|
||||||
|
prefix: true;
|
||||||
|
argument: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BinaryExpression extends BaseExpression {
|
||||||
|
type: "BinaryExpression";
|
||||||
|
operator: BinaryOperator;
|
||||||
|
left: Expression | PrivateIdentifier;
|
||||||
|
right: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentExpression extends BaseExpression {
|
||||||
|
type: "AssignmentExpression";
|
||||||
|
operator: AssignmentOperator;
|
||||||
|
left: Pattern | MemberExpression;
|
||||||
|
right: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateExpression extends BaseExpression {
|
||||||
|
type: "UpdateExpression";
|
||||||
|
operator: UpdateOperator;
|
||||||
|
argument: Expression;
|
||||||
|
prefix: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LogicalExpression extends BaseExpression {
|
||||||
|
type: "LogicalExpression";
|
||||||
|
operator: LogicalOperator;
|
||||||
|
left: Expression;
|
||||||
|
right: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConditionalExpression extends BaseExpression {
|
||||||
|
type: "ConditionalExpression";
|
||||||
|
test: Expression;
|
||||||
|
alternate: Expression;
|
||||||
|
consequent: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BaseCallExpression extends BaseExpression {
|
||||||
|
callee: Expression | Super;
|
||||||
|
arguments: Array<Expression | SpreadElement>;
|
||||||
|
}
|
||||||
|
export type CallExpression = SimpleCallExpression | NewExpression;
|
||||||
|
|
||||||
|
export interface SimpleCallExpression extends BaseCallExpression {
|
||||||
|
type: "CallExpression";
|
||||||
|
optional: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NewExpression extends BaseCallExpression {
|
||||||
|
type: "NewExpression";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MemberExpression extends BaseExpression, BasePattern {
|
||||||
|
type: "MemberExpression";
|
||||||
|
object: Expression | Super;
|
||||||
|
property: Expression | PrivateIdentifier;
|
||||||
|
computed: boolean;
|
||||||
|
optional: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Pattern = Identifier | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | MemberExpression;
|
||||||
|
|
||||||
|
export interface BasePattern extends BaseNode {}
|
||||||
|
|
||||||
|
export interface SwitchCase extends BaseNode {
|
||||||
|
type: "SwitchCase";
|
||||||
|
test?: Expression | null | undefined;
|
||||||
|
consequent: Statement[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CatchClause extends BaseNode {
|
||||||
|
type: "CatchClause";
|
||||||
|
param: Pattern | null;
|
||||||
|
body: BlockStatement;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Identifier extends BaseNode, BaseExpression, BasePattern {
|
||||||
|
type: "Identifier";
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Literal = SimpleLiteral | RegExpLiteral | BigIntLiteral;
|
||||||
|
|
||||||
|
export interface SimpleLiteral extends BaseNode, BaseExpression {
|
||||||
|
type: "Literal";
|
||||||
|
value: string | boolean | number | null;
|
||||||
|
raw?: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RegExpLiteral extends BaseNode, BaseExpression {
|
||||||
|
type: "Literal";
|
||||||
|
value?: RegExp | null | undefined;
|
||||||
|
regex: {
|
||||||
|
pattern: string;
|
||||||
|
flags: string;
|
||||||
|
};
|
||||||
|
raw?: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BigIntLiteral extends BaseNode, BaseExpression {
|
||||||
|
type: "Literal";
|
||||||
|
value?: bigint | null | undefined;
|
||||||
|
bigint: string;
|
||||||
|
raw?: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete";
|
||||||
|
|
||||||
|
export type BinaryOperator =
|
||||||
|
| "=="
|
||||||
|
| "!="
|
||||||
|
| "==="
|
||||||
|
| "!=="
|
||||||
|
| "<"
|
||||||
|
| "<="
|
||||||
|
| ">"
|
||||||
|
| ">="
|
||||||
|
| "<<"
|
||||||
|
| ">>"
|
||||||
|
| ">>>"
|
||||||
|
| "+"
|
||||||
|
| "-"
|
||||||
|
| "*"
|
||||||
|
| "/"
|
||||||
|
| "%"
|
||||||
|
| "**"
|
||||||
|
| "|"
|
||||||
|
| "^"
|
||||||
|
| "&"
|
||||||
|
| "in"
|
||||||
|
| "instanceof";
|
||||||
|
|
||||||
|
export type LogicalOperator = "||" | "&&" | "??";
|
||||||
|
|
||||||
|
export type AssignmentOperator =
|
||||||
|
| "="
|
||||||
|
| "+="
|
||||||
|
| "-="
|
||||||
|
| "*="
|
||||||
|
| "/="
|
||||||
|
| "%="
|
||||||
|
| "**="
|
||||||
|
| "<<="
|
||||||
|
| ">>="
|
||||||
|
| ">>>="
|
||||||
|
| "|="
|
||||||
|
| "^="
|
||||||
|
| "&="
|
||||||
|
| "||="
|
||||||
|
| "&&="
|
||||||
|
| "??=";
|
||||||
|
|
||||||
|
export type UpdateOperator = "++" | "--";
|
||||||
|
|
||||||
|
export interface ForOfStatement extends BaseForXStatement {
|
||||||
|
type: "ForOfStatement";
|
||||||
|
await: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Super extends BaseNode {
|
||||||
|
type: "Super";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpreadElement extends BaseNode {
|
||||||
|
type: "SpreadElement";
|
||||||
|
argument: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrowFunctionExpression extends BaseExpression, BaseFunction {
|
||||||
|
type: "ArrowFunctionExpression";
|
||||||
|
expression: boolean;
|
||||||
|
body: BlockStatement | Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface YieldExpression extends BaseExpression {
|
||||||
|
type: "YieldExpression";
|
||||||
|
argument?: Expression | null | undefined;
|
||||||
|
delegate: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateLiteral extends BaseExpression {
|
||||||
|
type: "TemplateLiteral";
|
||||||
|
quasis: TemplateElement[];
|
||||||
|
expressions: Expression[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TaggedTemplateExpression extends BaseExpression {
|
||||||
|
type: "TaggedTemplateExpression";
|
||||||
|
tag: Expression;
|
||||||
|
quasi: TemplateLiteral;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateElement extends BaseNode {
|
||||||
|
type: "TemplateElement";
|
||||||
|
tail: boolean;
|
||||||
|
value: {
|
||||||
|
/** It is null when the template literal is tagged and the text has an invalid escape (e.g. - tag`\unicode and \u{55}`) */
|
||||||
|
cooked?: string | null | undefined;
|
||||||
|
raw: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentProperty extends Property {
|
||||||
|
value: Pattern;
|
||||||
|
kind: "init";
|
||||||
|
method: boolean; // false
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectPattern extends BasePattern {
|
||||||
|
type: "ObjectPattern";
|
||||||
|
properties: Array<AssignmentProperty | RestElement>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayPattern extends BasePattern {
|
||||||
|
type: "ArrayPattern";
|
||||||
|
elements: Array<Pattern | null>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RestElement extends BasePattern {
|
||||||
|
type: "RestElement";
|
||||||
|
argument: Pattern;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentPattern extends BasePattern {
|
||||||
|
type: "AssignmentPattern";
|
||||||
|
left: Pattern;
|
||||||
|
right: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Class = ClassDeclaration | ClassExpression;
|
||||||
|
export interface BaseClass extends BaseNode {
|
||||||
|
superClass?: Expression | null | undefined;
|
||||||
|
body: ClassBody;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassBody extends BaseNode {
|
||||||
|
type: "ClassBody";
|
||||||
|
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MethodDefinition extends BaseNode {
|
||||||
|
type: "MethodDefinition";
|
||||||
|
key: Expression | PrivateIdentifier;
|
||||||
|
value: FunctionExpression;
|
||||||
|
kind: "constructor" | "method" | "get" | "set";
|
||||||
|
computed: boolean;
|
||||||
|
static: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MaybeNamedClassDeclaration extends BaseClass, BaseDeclaration {
|
||||||
|
type: "ClassDeclaration";
|
||||||
|
/** It is null when a class declaration is a part of the `export default class` statement */
|
||||||
|
id: Identifier | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassDeclaration extends MaybeNamedClassDeclaration {
|
||||||
|
id: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassExpression extends BaseClass, BaseExpression {
|
||||||
|
type: "ClassExpression";
|
||||||
|
id?: Identifier | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetaProperty extends BaseExpression {
|
||||||
|
type: "MetaProperty";
|
||||||
|
meta: Identifier;
|
||||||
|
property: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ModuleDeclaration =
|
||||||
|
| ImportDeclaration
|
||||||
|
| ExportNamedDeclaration
|
||||||
|
| ExportDefaultDeclaration
|
||||||
|
| ExportAllDeclaration;
|
||||||
|
export interface BaseModuleDeclaration extends BaseNode {}
|
||||||
|
|
||||||
|
export type ModuleSpecifier = ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier;
|
||||||
|
export interface BaseModuleSpecifier extends BaseNode {
|
||||||
|
local: Identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDeclaration extends BaseModuleDeclaration {
|
||||||
|
type: "ImportDeclaration";
|
||||||
|
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>;
|
||||||
|
source: Literal;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportSpecifier extends BaseModuleSpecifier {
|
||||||
|
type: "ImportSpecifier";
|
||||||
|
imported: Identifier | Literal;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportExpression extends BaseExpression {
|
||||||
|
type: "ImportExpression";
|
||||||
|
source: Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDefaultSpecifier extends BaseModuleSpecifier {
|
||||||
|
type: "ImportDefaultSpecifier";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportNamespaceSpecifier extends BaseModuleSpecifier {
|
||||||
|
type: "ImportNamespaceSpecifier";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportNamedDeclaration extends BaseModuleDeclaration {
|
||||||
|
type: "ExportNamedDeclaration";
|
||||||
|
declaration?: Declaration | null | undefined;
|
||||||
|
specifiers: ExportSpecifier[];
|
||||||
|
source?: Literal | null | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportSpecifier extends Omit<BaseModuleSpecifier, "local"> {
|
||||||
|
type: "ExportSpecifier";
|
||||||
|
local: Identifier | Literal;
|
||||||
|
exported: Identifier | Literal;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportDefaultDeclaration extends BaseModuleDeclaration {
|
||||||
|
type: "ExportDefaultDeclaration";
|
||||||
|
declaration: MaybeNamedFunctionDeclaration | MaybeNamedClassDeclaration | Expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportAllDeclaration extends BaseModuleDeclaration {
|
||||||
|
type: "ExportAllDeclaration";
|
||||||
|
exported: Identifier | Literal | null;
|
||||||
|
source: Literal;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AwaitExpression extends BaseExpression {
|
||||||
|
type: "AwaitExpression";
|
||||||
|
argument: Expression;
|
||||||
|
}
|
26
node_modules/@types/estree/package.json
generated
vendored
Normal file
26
node_modules/@types/estree/package.json
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"name": "@types/estree",
|
||||||
|
"version": "1.0.6",
|
||||||
|
"description": "TypeScript definitions for estree",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "RReverser",
|
||||||
|
"githubUsername": "RReverser",
|
||||||
|
"url": "https://github.com/RReverser"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/estree"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {},
|
||||||
|
"typesPublisherContentHash": "0310b41994a6f8d7530af6c53d47d8b227f32925e43718507fdb1178e05006b1",
|
||||||
|
"typeScriptVersion": "4.8",
|
||||||
|
"nonNpm": true
|
||||||
|
}
|
21
node_modules/@types/node/LICENSE
generated
vendored
Normal file
21
node_modules/@types/node/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
15
node_modules/@types/node/README.md
generated
vendored
Normal file
15
node_modules/@types/node/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Installation
|
||||||
|
> `npm install --save @types/node`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for node (https://nodejs.org/).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Tue, 09 Apr 2024 21:07:24 GMT
|
||||||
|
* Dependencies: [undici-types](https://npmjs.com/package/undici-types)
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky).
|
1043
node_modules/@types/node/assert.d.ts
generated
vendored
Normal file
1043
node_modules/@types/node/assert.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
Normal file
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
declare module "assert/strict" {
|
||||||
|
import { strict } from "node:assert";
|
||||||
|
export = strict;
|
||||||
|
}
|
||||||
|
declare module "node:assert/strict" {
|
||||||
|
import { strict } from "node:assert";
|
||||||
|
export = strict;
|
||||||
|
}
|
539
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Normal file
539
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Normal file
@ -0,0 +1,539 @@
|
|||||||
|
/**
|
||||||
|
* We strongly discourage the use of the `async_hooks` API.
|
||||||
|
* Other APIs that can cover most of its use cases include:
|
||||||
|
*
|
||||||
|
* * `AsyncLocalStorage` tracks async context
|
||||||
|
* * `process.getActiveResourcesInfo()` tracks active resources
|
||||||
|
*
|
||||||
|
* The `node:async_hooks` module provides an API to track asynchronous resources.
|
||||||
|
* It can be accessed using:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import async_hooks from 'node:async_hooks';
|
||||||
|
* ```
|
||||||
|
* @experimental
|
||||||
|
* @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/async_hooks.js)
|
||||||
|
*/
|
||||||
|
declare module "async_hooks" {
|
||||||
|
/**
|
||||||
|
* ```js
|
||||||
|
* import { executionAsyncId } from 'node:async_hooks';
|
||||||
|
* import fs from 'node:fs';
|
||||||
|
*
|
||||||
|
* console.log(executionAsyncId()); // 1 - bootstrap
|
||||||
|
* const path = '.';
|
||||||
|
* fs.open(path, 'r', (err, fd) => {
|
||||||
|
* console.log(executionAsyncId()); // 6 - open()
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* The ID returned from `executionAsyncId()` is related to execution timing, not
|
||||||
|
* causality (which is covered by `triggerAsyncId()`):
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const server = net.createServer((conn) => {
|
||||||
|
* // Returns the ID of the server, not of the new connection, because the
|
||||||
|
* // callback runs in the execution scope of the server's MakeCallback().
|
||||||
|
* async_hooks.executionAsyncId();
|
||||||
|
*
|
||||||
|
* }).listen(port, () => {
|
||||||
|
* // Returns the ID of a TickObject (process.nextTick()) because all
|
||||||
|
* // callbacks passed to .listen() are wrapped in a nextTick().
|
||||||
|
* async_hooks.executionAsyncId();
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Promise contexts may not get precise `executionAsyncIds` by default.
|
||||||
|
* See the section on `promise execution tracking`.
|
||||||
|
* @since v8.1.0
|
||||||
|
* @return The `asyncId` of the current execution context. Useful to track when something calls.
|
||||||
|
*/
|
||||||
|
function executionAsyncId(): number;
|
||||||
|
/**
|
||||||
|
* Resource objects returned by `executionAsyncResource()` are most often internal
|
||||||
|
* Node.js handle objects with undocumented APIs. Using any functions or properties
|
||||||
|
* on the object is likely to crash your application and should be avoided.
|
||||||
|
*
|
||||||
|
* Using `executionAsyncResource()` in the top-level execution context will
|
||||||
|
* return an empty object as there is no handle or request object to use,
|
||||||
|
* but having an object representing the top-level can be helpful.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import { open } from 'node:fs';
|
||||||
|
* import { executionAsyncId, executionAsyncResource } from 'node:async_hooks';
|
||||||
|
*
|
||||||
|
* console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
|
||||||
|
* open(new URL(import.meta.url), 'r', (err, fd) => {
|
||||||
|
* console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* This can be used to implement continuation local storage without the
|
||||||
|
* use of a tracking `Map` to store the metadata:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import { createServer } from 'node:http';
|
||||||
|
* import {
|
||||||
|
* executionAsyncId,
|
||||||
|
* executionAsyncResource,
|
||||||
|
* createHook,
|
||||||
|
* } from 'async_hooks';
|
||||||
|
* const sym = Symbol('state'); // Private symbol to avoid pollution
|
||||||
|
*
|
||||||
|
* createHook({
|
||||||
|
* init(asyncId, type, triggerAsyncId, resource) {
|
||||||
|
* const cr = executionAsyncResource();
|
||||||
|
* if (cr) {
|
||||||
|
* resource[sym] = cr[sym];
|
||||||
|
* }
|
||||||
|
* },
|
||||||
|
* }).enable();
|
||||||
|
*
|
||||||
|
* const server = createServer((req, res) => {
|
||||||
|
* executionAsyncResource()[sym] = { state: req.url };
|
||||||
|
* setTimeout(function() {
|
||||||
|
* res.end(JSON.stringify(executionAsyncResource()[sym]));
|
||||||
|
* }, 100);
|
||||||
|
* }).listen(3000);
|
||||||
|
* ```
|
||||||
|
* @since v13.9.0, v12.17.0
|
||||||
|
* @return The resource representing the current execution. Useful to store data within the resource.
|
||||||
|
*/
|
||||||
|
function executionAsyncResource(): object;
|
||||||
|
/**
|
||||||
|
* ```js
|
||||||
|
* const server = net.createServer((conn) => {
|
||||||
|
* // The resource that caused (or triggered) this callback to be called
|
||||||
|
* // was that of the new connection. Thus the return value of triggerAsyncId()
|
||||||
|
* // is the asyncId of "conn".
|
||||||
|
* async_hooks.triggerAsyncId();
|
||||||
|
*
|
||||||
|
* }).listen(port, () => {
|
||||||
|
* // Even though all callbacks passed to .listen() are wrapped in a nextTick()
|
||||||
|
* // the callback itself exists because the call to the server's .listen()
|
||||||
|
* // was made. So the return value would be the ID of the server.
|
||||||
|
* async_hooks.triggerAsyncId();
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Promise contexts may not get valid `triggerAsyncId`s by default. See
|
||||||
|
* the section on `promise execution tracking`.
|
||||||
|
* @return The ID of the resource responsible for calling the callback that is currently being executed.
|
||||||
|
*/
|
||||||
|
function triggerAsyncId(): number;
|
||||||
|
interface HookCallbacks {
|
||||||
|
/**
|
||||||
|
* Called when a class is constructed that has the possibility to emit an asynchronous event.
|
||||||
|
* @param asyncId a unique ID for the async resource
|
||||||
|
* @param type the type of the async resource
|
||||||
|
* @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created
|
||||||
|
* @param resource reference to the resource representing the async operation, needs to be released during destroy
|
||||||
|
*/
|
||||||
|
init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
|
||||||
|
/**
|
||||||
|
* When an asynchronous operation is initiated or completes a callback is called to notify the user.
|
||||||
|
* The before callback is called just before said callback is executed.
|
||||||
|
* @param asyncId the unique identifier assigned to the resource about to execute the callback.
|
||||||
|
*/
|
||||||
|
before?(asyncId: number): void;
|
||||||
|
/**
|
||||||
|
* Called immediately after the callback specified in before is completed.
|
||||||
|
* @param asyncId the unique identifier assigned to the resource which has executed the callback.
|
||||||
|
*/
|
||||||
|
after?(asyncId: number): void;
|
||||||
|
/**
|
||||||
|
* Called when a promise has resolve() called. This may not be in the same execution id
|
||||||
|
* as the promise itself.
|
||||||
|
* @param asyncId the unique id for the promise that was resolve()d.
|
||||||
|
*/
|
||||||
|
promiseResolve?(asyncId: number): void;
|
||||||
|
/**
|
||||||
|
* Called after the resource corresponding to asyncId is destroyed
|
||||||
|
* @param asyncId a unique ID for the async resource
|
||||||
|
*/
|
||||||
|
destroy?(asyncId: number): void;
|
||||||
|
}
|
||||||
|
interface AsyncHook {
|
||||||
|
/**
|
||||||
|
* Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
|
||||||
|
*/
|
||||||
|
enable(): this;
|
||||||
|
/**
|
||||||
|
* Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
|
||||||
|
*/
|
||||||
|
disable(): this;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Registers functions to be called for different lifetime events of each async
|
||||||
|
* operation.
|
||||||
|
*
|
||||||
|
* The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
|
||||||
|
* respective asynchronous event during a resource's lifetime.
|
||||||
|
*
|
||||||
|
* All callbacks are optional. For example, if only resource cleanup needs to
|
||||||
|
* be tracked, then only the `destroy` callback needs to be passed. The
|
||||||
|
* specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import { createHook } from 'node:async_hooks';
|
||||||
|
*
|
||||||
|
* const asyncHook = createHook({
|
||||||
|
* init(asyncId, type, triggerAsyncId, resource) { },
|
||||||
|
* destroy(asyncId) { },
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* The callbacks will be inherited via the prototype chain:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* class MyAsyncCallbacks {
|
||||||
|
* init(asyncId, type, triggerAsyncId, resource) { }
|
||||||
|
* destroy(asyncId) {}
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* class MyAddedCallbacks extends MyAsyncCallbacks {
|
||||||
|
* before(asyncId) { }
|
||||||
|
* after(asyncId) { }
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Because promises are asynchronous resources whose lifecycle is tracked
|
||||||
|
* via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
|
||||||
|
* @since v8.1.0
|
||||||
|
* @param callbacks The `Hook Callbacks` to register
|
||||||
|
* @return Instance used for disabling and enabling hooks
|
||||||
|
*/
|
||||||
|
function createHook(callbacks: HookCallbacks): AsyncHook;
|
||||||
|
interface AsyncResourceOptions {
|
||||||
|
/**
|
||||||
|
* The ID of the execution context that created this async event.
|
||||||
|
* @default executionAsyncId()
|
||||||
|
*/
|
||||||
|
triggerAsyncId?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Disables automatic `emitDestroy` when the object is garbage collected.
|
||||||
|
* This usually does not need to be set (even if `emitDestroy` is called
|
||||||
|
* manually), unless the resource's `asyncId` is retrieved and the
|
||||||
|
* sensitive API's `emitDestroy` is called with it.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
requireManualDestroy?: boolean | undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The class `AsyncResource` is designed to be extended by the embedder's async
|
||||||
|
* resources. Using this, users can easily trigger the lifetime events of their
|
||||||
|
* own resources.
|
||||||
|
*
|
||||||
|
* The `init` hook will trigger when an `AsyncResource` is instantiated.
|
||||||
|
*
|
||||||
|
* The following is an overview of the `AsyncResource` API.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import { AsyncResource, executionAsyncId } from 'node:async_hooks';
|
||||||
|
*
|
||||||
|
* // AsyncResource() is meant to be extended. Instantiating a
|
||||||
|
* // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
|
||||||
|
* // async_hook.executionAsyncId() is used.
|
||||||
|
* const asyncResource = new AsyncResource(
|
||||||
|
* type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false },
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* // Run a function in the execution context of the resource. This will
|
||||||
|
* // * establish the context of the resource
|
||||||
|
* // * trigger the AsyncHooks before callbacks
|
||||||
|
* // * call the provided function `fn` with the supplied arguments
|
||||||
|
* // * trigger the AsyncHooks after callbacks
|
||||||
|
* // * restore the original execution context
|
||||||
|
* asyncResource.runInAsyncScope(fn, thisArg, ...args);
|
||||||
|
*
|
||||||
|
* // Call AsyncHooks destroy callbacks.
|
||||||
|
* asyncResource.emitDestroy();
|
||||||
|
*
|
||||||
|
* // Return the unique ID assigned to the AsyncResource instance.
|
||||||
|
* asyncResource.asyncId();
|
||||||
|
*
|
||||||
|
* // Return the trigger ID for the AsyncResource instance.
|
||||||
|
* asyncResource.triggerAsyncId();
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
class AsyncResource {
|
||||||
|
/**
|
||||||
|
* AsyncResource() is meant to be extended. Instantiating a
|
||||||
|
* new AsyncResource() also triggers init. If triggerAsyncId is omitted then
|
||||||
|
* async_hook.executionAsyncId() is used.
|
||||||
|
* @param type The type of async event.
|
||||||
|
* @param triggerAsyncId The ID of the execution context that created
|
||||||
|
* this async event (default: `executionAsyncId()`), or an
|
||||||
|
* AsyncResourceOptions object (since v9.3.0)
|
||||||
|
*/
|
||||||
|
constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
|
||||||
|
/**
|
||||||
|
* Binds the given function to the current execution context.
|
||||||
|
* @since v14.8.0, v12.19.0
|
||||||
|
* @param fn The function to bind to the current execution context.
|
||||||
|
* @param type An optional name to associate with the underlying `AsyncResource`.
|
||||||
|
*/
|
||||||
|
static bind<Func extends (this: ThisArg, ...args: any[]) => any, ThisArg>(
|
||||||
|
fn: Func,
|
||||||
|
type?: string,
|
||||||
|
thisArg?: ThisArg,
|
||||||
|
): Func;
|
||||||
|
/**
|
||||||
|
* Binds the given function to execute to this `AsyncResource`'s scope.
|
||||||
|
* @since v14.8.0, v12.19.0
|
||||||
|
* @param fn The function to bind to the current `AsyncResource`.
|
||||||
|
*/
|
||||||
|
bind<Func extends (...args: any[]) => any>(fn: Func): Func;
|
||||||
|
/**
|
||||||
|
* Call the provided function with the provided arguments in the execution context
|
||||||
|
* of the async resource. This will establish the context, trigger the AsyncHooks
|
||||||
|
* before callbacks, call the function, trigger the AsyncHooks after callbacks, and
|
||||||
|
* then restore the original execution context.
|
||||||
|
* @since v9.6.0
|
||||||
|
* @param fn The function to call in the execution context of this async resource.
|
||||||
|
* @param thisArg The receiver to be used for the function call.
|
||||||
|
* @param args Optional arguments to pass to the function.
|
||||||
|
*/
|
||||||
|
runInAsyncScope<This, Result>(
|
||||||
|
fn: (this: This, ...args: any[]) => Result,
|
||||||
|
thisArg?: This,
|
||||||
|
...args: any[]
|
||||||
|
): Result;
|
||||||
|
/**
|
||||||
|
* Call all `destroy` hooks. This should only ever be called once. An error will
|
||||||
|
* be thrown if it is called more than once. This **must** be manually called. If
|
||||||
|
* the resource is left to be collected by the GC then the `destroy` hooks will
|
||||||
|
* never be called.
|
||||||
|
* @return A reference to `asyncResource`.
|
||||||
|
*/
|
||||||
|
emitDestroy(): this;
|
||||||
|
/**
|
||||||
|
* @return The unique `asyncId` assigned to the resource.
|
||||||
|
*/
|
||||||
|
asyncId(): number;
|
||||||
|
/**
|
||||||
|
* @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
|
||||||
|
*/
|
||||||
|
triggerAsyncId(): number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This class creates stores that stay coherent through asynchronous operations.
|
||||||
|
*
|
||||||
|
* While you can create your own implementation on top of the `node:async_hooks`module, `AsyncLocalStorage` should be preferred as it is a performant and memory
|
||||||
|
* safe implementation that involves significant optimizations that are non-obvious
|
||||||
|
* to implement.
|
||||||
|
*
|
||||||
|
* The following example uses `AsyncLocalStorage` to build a simple logger
|
||||||
|
* that assigns IDs to incoming HTTP requests and includes them in messages
|
||||||
|
* logged within each request.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import http from 'node:http';
|
||||||
|
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||||
|
*
|
||||||
|
* const asyncLocalStorage = new AsyncLocalStorage();
|
||||||
|
*
|
||||||
|
* function logWithId(msg) {
|
||||||
|
* const id = asyncLocalStorage.getStore();
|
||||||
|
* console.log(`${id !== undefined ? id : '-'}:`, msg);
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* let idSeq = 0;
|
||||||
|
* http.createServer((req, res) => {
|
||||||
|
* asyncLocalStorage.run(idSeq++, () => {
|
||||||
|
* logWithId('start');
|
||||||
|
* // Imagine any chain of async operations here
|
||||||
|
* setImmediate(() => {
|
||||||
|
* logWithId('finish');
|
||||||
|
* res.end();
|
||||||
|
* });
|
||||||
|
* });
|
||||||
|
* }).listen(8080);
|
||||||
|
*
|
||||||
|
* http.get('http://localhost:8080');
|
||||||
|
* http.get('http://localhost:8080');
|
||||||
|
* // Prints:
|
||||||
|
* // 0: start
|
||||||
|
* // 1: start
|
||||||
|
* // 0: finish
|
||||||
|
* // 1: finish
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Each instance of `AsyncLocalStorage` maintains an independent storage context.
|
||||||
|
* Multiple instances can safely exist simultaneously without risk of interfering
|
||||||
|
* with each other's data.
|
||||||
|
* @since v13.10.0, v12.17.0
|
||||||
|
*/
|
||||||
|
class AsyncLocalStorage<T> {
|
||||||
|
/**
|
||||||
|
* Binds the given function to the current execution context.
|
||||||
|
* @since v19.8.0
|
||||||
|
* @experimental
|
||||||
|
* @param fn The function to bind to the current execution context.
|
||||||
|
* @return A new function that calls `fn` within the captured execution context.
|
||||||
|
*/
|
||||||
|
static bind<Func extends (...args: any[]) => any>(fn: Func): Func;
|
||||||
|
/**
|
||||||
|
* Captures the current execution context and returns a function that accepts a
|
||||||
|
* function as an argument. Whenever the returned function is called, it
|
||||||
|
* calls the function passed to it within the captured context.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const asyncLocalStorage = new AsyncLocalStorage();
|
||||||
|
* const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot());
|
||||||
|
* const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore()));
|
||||||
|
* console.log(result); // returns 123
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple
|
||||||
|
* async context tracking purposes, for example:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* class Foo {
|
||||||
|
* #runInAsyncScope = AsyncLocalStorage.snapshot();
|
||||||
|
*
|
||||||
|
* get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); }
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* const foo = asyncLocalStorage.run(123, () => new Foo());
|
||||||
|
* console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123
|
||||||
|
* ```
|
||||||
|
* @since v19.8.0
|
||||||
|
* @experimental
|
||||||
|
* @return A new function with the signature `(fn: (...args) : R, ...args) : R`.
|
||||||
|
*/
|
||||||
|
static snapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;
|
||||||
|
/**
|
||||||
|
* Disables the instance of `AsyncLocalStorage`. All subsequent calls
|
||||||
|
* to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
|
||||||
|
*
|
||||||
|
* When calling `asyncLocalStorage.disable()`, all current contexts linked to the
|
||||||
|
* instance will be exited.
|
||||||
|
*
|
||||||
|
* Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores
|
||||||
|
* provided by the `asyncLocalStorage`, as those objects are garbage collected
|
||||||
|
* along with the corresponding async resources.
|
||||||
|
*
|
||||||
|
* Use this method when the `asyncLocalStorage` is not in use anymore
|
||||||
|
* in the current process.
|
||||||
|
* @since v13.10.0, v12.17.0
|
||||||
|
* @experimental
|
||||||
|
*/
|
||||||
|
disable(): void;
|
||||||
|
/**
|
||||||
|
* Returns the current store.
|
||||||
|
* If called outside of an asynchronous context initialized by
|
||||||
|
* calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
|
||||||
|
* returns `undefined`.
|
||||||
|
* @since v13.10.0, v12.17.0
|
||||||
|
*/
|
||||||
|
getStore(): T | undefined;
|
||||||
|
/**
|
||||||
|
* Runs a function synchronously within a context and returns its
|
||||||
|
* return value. The store is not accessible outside of the callback function.
|
||||||
|
* The store is accessible to any asynchronous operations created within the
|
||||||
|
* callback.
|
||||||
|
*
|
||||||
|
* The optional `args` are passed to the callback function.
|
||||||
|
*
|
||||||
|
* If the callback function throws an error, the error is thrown by `run()` too.
|
||||||
|
* The stacktrace is not impacted by this call and the context is exited.
|
||||||
|
*
|
||||||
|
* Example:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const store = { id: 2 };
|
||||||
|
* try {
|
||||||
|
* asyncLocalStorage.run(store, () => {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the store object
|
||||||
|
* setTimeout(() => {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the store object
|
||||||
|
* }, 200);
|
||||||
|
* throw new Error();
|
||||||
|
* });
|
||||||
|
* } catch (e) {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns undefined
|
||||||
|
* // The error will be caught here
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v13.10.0, v12.17.0
|
||||||
|
*/
|
||||||
|
run<R>(store: T, callback: () => R): R;
|
||||||
|
run<R, TArgs extends any[]>(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||||
|
/**
|
||||||
|
* Runs a function synchronously outside of a context and returns its
|
||||||
|
* return value. The store is not accessible within the callback function or
|
||||||
|
* the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`.
|
||||||
|
*
|
||||||
|
* The optional `args` are passed to the callback function.
|
||||||
|
*
|
||||||
|
* If the callback function throws an error, the error is thrown by `exit()` too.
|
||||||
|
* The stacktrace is not impacted by this call and the context is re-entered.
|
||||||
|
*
|
||||||
|
* Example:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* // Within a call to run
|
||||||
|
* try {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the store object or value
|
||||||
|
* asyncLocalStorage.exit(() => {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns undefined
|
||||||
|
* throw new Error();
|
||||||
|
* });
|
||||||
|
* } catch (e) {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the same object or value
|
||||||
|
* // The error will be caught here
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v13.10.0, v12.17.0
|
||||||
|
* @experimental
|
||||||
|
*/
|
||||||
|
exit<R, TArgs extends any[]>(callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||||
|
/**
|
||||||
|
* Transitions into the context for the remainder of the current
|
||||||
|
* synchronous execution and then persists the store through any following
|
||||||
|
* asynchronous calls.
|
||||||
|
*
|
||||||
|
* Example:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const store = { id: 1 };
|
||||||
|
* // Replaces previous store with the given store object
|
||||||
|
* asyncLocalStorage.enterWith(store);
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the store object
|
||||||
|
* someAsyncOperation(() => {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the same object
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* This transition will continue for the _entire_ synchronous execution.
|
||||||
|
* This means that if, for example, the context is entered within an event
|
||||||
|
* handler subsequent event handlers will also run within that context unless
|
||||||
|
* specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons
|
||||||
|
* to use the latter method.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const store = { id: 1 };
|
||||||
|
*
|
||||||
|
* emitter.on('my-event', () => {
|
||||||
|
* asyncLocalStorage.enterWith(store);
|
||||||
|
* });
|
||||||
|
* emitter.on('my-event', () => {
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the same object
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* asyncLocalStorage.getStore(); // Returns undefined
|
||||||
|
* emitter.emit('my-event');
|
||||||
|
* asyncLocalStorage.getStore(); // Returns the same object
|
||||||
|
* ```
|
||||||
|
* @since v13.11.0, v12.17.0
|
||||||
|
* @experimental
|
||||||
|
*/
|
||||||
|
enterWith(store: T): void;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
declare module "node:async_hooks" {
|
||||||
|
export * from "async_hooks";
|
||||||
|
}
|
2363
node_modules/@types/node/buffer.d.ts
generated
vendored
Normal file
2363
node_modules/@types/node/buffer.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1542
node_modules/@types/node/child_process.d.ts
generated
vendored
Normal file
1542
node_modules/@types/node/child_process.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
578
node_modules/@types/node/cluster.d.ts
generated
vendored
Normal file
578
node_modules/@types/node/cluster.d.ts
generated
vendored
Normal file
@ -0,0 +1,578 @@
|
|||||||
|
/**
|
||||||
|
* Clusters of Node.js processes can be used to run multiple instances of Node.js
|
||||||
|
* that can distribute workloads among their application threads. When process isolation
|
||||||
|
* is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html)
|
||||||
|
* module instead, which allows running multiple application threads within a single Node.js instance.
|
||||||
|
*
|
||||||
|
* The cluster module allows easy creation of child processes that all share
|
||||||
|
* server ports.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import cluster from 'node:cluster';
|
||||||
|
* import http from 'node:http';
|
||||||
|
* import { availableParallelism } from 'node:os';
|
||||||
|
* import process from 'node:process';
|
||||||
|
*
|
||||||
|
* const numCPUs = availableParallelism();
|
||||||
|
*
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* console.log(`Primary ${process.pid} is running`);
|
||||||
|
*
|
||||||
|
* // Fork workers.
|
||||||
|
* for (let i = 0; i < numCPUs; i++) {
|
||||||
|
* cluster.fork();
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* cluster.on('exit', (worker, code, signal) => {
|
||||||
|
* console.log(`worker ${worker.process.pid} died`);
|
||||||
|
* });
|
||||||
|
* } else {
|
||||||
|
* // Workers can share any TCP connection
|
||||||
|
* // In this case it is an HTTP server
|
||||||
|
* http.createServer((req, res) => {
|
||||||
|
* res.writeHead(200);
|
||||||
|
* res.end('hello world\n');
|
||||||
|
* }).listen(8000);
|
||||||
|
*
|
||||||
|
* console.log(`Worker ${process.pid} started`);
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Running Node.js will now share port 8000 between the workers:
|
||||||
|
*
|
||||||
|
* ```console
|
||||||
|
* $ node server.js
|
||||||
|
* Primary 3596 is running
|
||||||
|
* Worker 4324 started
|
||||||
|
* Worker 4520 started
|
||||||
|
* Worker 6056 started
|
||||||
|
* Worker 5644 started
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* On Windows, it is not yet possible to set up a named pipe server in a worker.
|
||||||
|
* @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/cluster.js)
|
||||||
|
*/
|
||||||
|
declare module "cluster" {
|
||||||
|
import * as child from "node:child_process";
|
||||||
|
import EventEmitter = require("node:events");
|
||||||
|
import * as net from "node:net";
|
||||||
|
type SerializationType = "json" | "advanced";
|
||||||
|
export interface ClusterSettings {
|
||||||
|
/**
|
||||||
|
* List of string arguments passed to the Node.js executable.
|
||||||
|
* @default process.execArgv
|
||||||
|
*/
|
||||||
|
execArgv?: string[] | undefined;
|
||||||
|
/**
|
||||||
|
* File path to worker file.
|
||||||
|
* @default process.argv[1]
|
||||||
|
*/
|
||||||
|
exec?: string | undefined;
|
||||||
|
/**
|
||||||
|
* String arguments passed to worker.
|
||||||
|
* @default process.argv.slice(2)
|
||||||
|
*/
|
||||||
|
args?: string[] | undefined;
|
||||||
|
/**
|
||||||
|
* Whether or not to send output to parent's stdio.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
silent?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must
|
||||||
|
* contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processspawncommand-args-options)'s
|
||||||
|
* [`stdio`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#optionsstdio).
|
||||||
|
*/
|
||||||
|
stdio?: any[] | undefined;
|
||||||
|
/**
|
||||||
|
* Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).)
|
||||||
|
*/
|
||||||
|
uid?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).)
|
||||||
|
*/
|
||||||
|
gid?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number.
|
||||||
|
* By default each worker gets its own port, incremented from the primary's `process.debugPort`.
|
||||||
|
*/
|
||||||
|
inspectPort?: number | (() => number) | undefined;
|
||||||
|
/**
|
||||||
|
* Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`.
|
||||||
|
* See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#advanced-serialization) for more details.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
serialization?: SerializationType | undefined;
|
||||||
|
/**
|
||||||
|
* Current working directory of the worker process.
|
||||||
|
* @default undefined (inherits from parent process)
|
||||||
|
*/
|
||||||
|
cwd?: string | undefined;
|
||||||
|
/**
|
||||||
|
* Hide the forked processes console window that would normally be created on Windows systems.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
windowsHide?: boolean | undefined;
|
||||||
|
}
|
||||||
|
export interface Address {
|
||||||
|
address: string;
|
||||||
|
port: number;
|
||||||
|
/**
|
||||||
|
* The `addressType` is one of:
|
||||||
|
*
|
||||||
|
* * `4` (TCPv4)
|
||||||
|
* * `6` (TCPv6)
|
||||||
|
* * `-1` (Unix domain socket)
|
||||||
|
* * `'udp4'` or `'udp6'` (UDPv4 or UDPv6)
|
||||||
|
*/
|
||||||
|
addressType: 4 | 6 | -1 | "udp4" | "udp6";
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A `Worker` object contains all public information and method about a worker.
|
||||||
|
* In the primary it can be obtained using `cluster.workers`. In a worker
|
||||||
|
* it can be obtained using `cluster.worker`.
|
||||||
|
* @since v0.7.0
|
||||||
|
*/
|
||||||
|
export class Worker extends EventEmitter {
|
||||||
|
/**
|
||||||
|
* Each new worker is given its own unique id, this id is stored in the `id`.
|
||||||
|
*
|
||||||
|
* While a worker is alive, this is the key that indexes it in `cluster.workers`.
|
||||||
|
* @since v0.8.0
|
||||||
|
*/
|
||||||
|
id: number;
|
||||||
|
/**
|
||||||
|
* All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object
|
||||||
|
* from this function is stored as `.process`. In a worker, the global `process` is stored.
|
||||||
|
*
|
||||||
|
* See: [Child Process module](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options).
|
||||||
|
*
|
||||||
|
* Workers will call `process.exit(0)` if the `'disconnect'` event occurs
|
||||||
|
* on `process` and `.exitedAfterDisconnect` is not `true`. This protects against
|
||||||
|
* accidental disconnection.
|
||||||
|
* @since v0.7.0
|
||||||
|
*/
|
||||||
|
process: child.ChildProcess;
|
||||||
|
/**
|
||||||
|
* Send a message to a worker or primary, optionally with a handle.
|
||||||
|
*
|
||||||
|
* In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback).
|
||||||
|
*
|
||||||
|
* In a worker, this sends a message to the primary. It is identical to `process.send()`.
|
||||||
|
*
|
||||||
|
* This example will echo back all messages from the primary:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* const worker = cluster.fork();
|
||||||
|
* worker.send('hi there');
|
||||||
|
*
|
||||||
|
* } else if (cluster.isWorker) {
|
||||||
|
* process.on('message', (msg) => {
|
||||||
|
* process.send(msg);
|
||||||
|
* });
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v0.7.0
|
||||||
|
* @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles.
|
||||||
|
*/
|
||||||
|
send(message: child.Serializable, callback?: (error: Error | null) => void): boolean;
|
||||||
|
send(
|
||||||
|
message: child.Serializable,
|
||||||
|
sendHandle: child.SendHandle,
|
||||||
|
callback?: (error: Error | null) => void,
|
||||||
|
): boolean;
|
||||||
|
send(
|
||||||
|
message: child.Serializable,
|
||||||
|
sendHandle: child.SendHandle,
|
||||||
|
options?: child.MessageOptions,
|
||||||
|
callback?: (error: Error | null) => void,
|
||||||
|
): boolean;
|
||||||
|
/**
|
||||||
|
* This function will kill the worker. In the primary worker, it does this by
|
||||||
|
* disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`.
|
||||||
|
*
|
||||||
|
* The `kill()` function kills the worker process without waiting for a graceful
|
||||||
|
* disconnect, it has the same behavior as `worker.process.kill()`.
|
||||||
|
*
|
||||||
|
* This method is aliased as `worker.destroy()` for backwards compatibility.
|
||||||
|
*
|
||||||
|
* In a worker, `process.kill()` exists, but it is not this function;
|
||||||
|
* it is [`kill()`](https://nodejs.org/docs/latest-v20.x/api/process.html#processkillpid-signal).
|
||||||
|
* @since v0.9.12
|
||||||
|
* @param [signal='SIGTERM'] Name of the kill signal to send to the worker process.
|
||||||
|
*/
|
||||||
|
kill(signal?: string): void;
|
||||||
|
destroy(signal?: string): void;
|
||||||
|
/**
|
||||||
|
* In a worker, this function will close all servers, wait for the `'close'` event
|
||||||
|
* on those servers, and then disconnect the IPC channel.
|
||||||
|
*
|
||||||
|
* In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself.
|
||||||
|
*
|
||||||
|
* Causes `.exitedAfterDisconnect` to be set.
|
||||||
|
*
|
||||||
|
* After a server is closed, it will no longer accept new connections,
|
||||||
|
* but connections may be accepted by any other listening worker. Existing
|
||||||
|
* connections will be allowed to close as usual. When no more connections exist,
|
||||||
|
* see `server.close()`, the IPC channel to the worker will close allowing it
|
||||||
|
* to die gracefully.
|
||||||
|
*
|
||||||
|
* The above applies _only_ to server connections, client connections are not
|
||||||
|
* automatically closed by workers, and disconnect does not wait for them to close
|
||||||
|
* before exiting.
|
||||||
|
*
|
||||||
|
* In a worker, `process.disconnect` exists, but it is not this function;
|
||||||
|
* it is `disconnect()`.
|
||||||
|
*
|
||||||
|
* Because long living server connections may block workers from disconnecting, it
|
||||||
|
* may be useful to send a message, so application specific actions may be taken to
|
||||||
|
* close them. It also may be useful to implement a timeout, killing a worker if
|
||||||
|
* the `'disconnect'` event has not been emitted after some time.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* const worker = cluster.fork();
|
||||||
|
* let timeout;
|
||||||
|
*
|
||||||
|
* worker.on('listening', (address) => {
|
||||||
|
* worker.send('shutdown');
|
||||||
|
* worker.disconnect();
|
||||||
|
* timeout = setTimeout(() => {
|
||||||
|
* worker.kill();
|
||||||
|
* }, 2000);
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* worker.on('disconnect', () => {
|
||||||
|
* clearTimeout(timeout);
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* } else if (cluster.isWorker) {
|
||||||
|
* const net = require('node:net');
|
||||||
|
* const server = net.createServer((socket) => {
|
||||||
|
* // Connections never end
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* server.listen(8000);
|
||||||
|
*
|
||||||
|
* process.on('message', (msg) => {
|
||||||
|
* if (msg === 'shutdown') {
|
||||||
|
* // Initiate graceful close of any connections to server
|
||||||
|
* }
|
||||||
|
* });
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v0.7.7
|
||||||
|
* @return A reference to `worker`.
|
||||||
|
*/
|
||||||
|
disconnect(): void;
|
||||||
|
/**
|
||||||
|
* This function returns `true` if the worker is connected to its primary via its
|
||||||
|
* IPC channel, `false` otherwise. A worker is connected to its primary after it
|
||||||
|
* has been created. It is disconnected after the `'disconnect'` event is emitted.
|
||||||
|
* @since v0.11.14
|
||||||
|
*/
|
||||||
|
isConnected(): boolean;
|
||||||
|
/**
|
||||||
|
* This function returns `true` if the worker's process has terminated (either
|
||||||
|
* because of exiting or being signaled). Otherwise, it returns `false`.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import cluster from 'node:cluster';
|
||||||
|
* import http from 'node:http';
|
||||||
|
* import { availableParallelism } from 'node:os';
|
||||||
|
* import process from 'node:process';
|
||||||
|
*
|
||||||
|
* const numCPUs = availableParallelism();
|
||||||
|
*
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* console.log(`Primary ${process.pid} is running`);
|
||||||
|
*
|
||||||
|
* // Fork workers.
|
||||||
|
* for (let i = 0; i < numCPUs; i++) {
|
||||||
|
* cluster.fork();
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* cluster.on('fork', (worker) => {
|
||||||
|
* console.log('worker is dead:', worker.isDead());
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* cluster.on('exit', (worker, code, signal) => {
|
||||||
|
* console.log('worker is dead:', worker.isDead());
|
||||||
|
* });
|
||||||
|
* } else {
|
||||||
|
* // Workers can share any TCP connection. In this case, it is an HTTP server.
|
||||||
|
* http.createServer((req, res) => {
|
||||||
|
* res.writeHead(200);
|
||||||
|
* res.end(`Current process\n ${process.pid}`);
|
||||||
|
* process.kill(process.pid);
|
||||||
|
* }).listen(8000);
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v0.11.14
|
||||||
|
*/
|
||||||
|
isDead(): boolean;
|
||||||
|
/**
|
||||||
|
* This property is `true` if the worker exited due to `.disconnect()`.
|
||||||
|
* If the worker exited any other way, it is `false`. If the
|
||||||
|
* worker has not exited, it is `undefined`.
|
||||||
|
*
|
||||||
|
* The boolean `worker.exitedAfterDisconnect` allows distinguishing between
|
||||||
|
* voluntary and accidental exit, the primary may choose not to respawn a worker
|
||||||
|
* based on this value.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* cluster.on('exit', (worker, code, signal) => {
|
||||||
|
* if (worker.exitedAfterDisconnect === true) {
|
||||||
|
* console.log('Oh, it was just voluntary – no need to worry');
|
||||||
|
* }
|
||||||
|
* });
|
||||||
|
*
|
||||||
|
* // kill worker
|
||||||
|
* worker.kill();
|
||||||
|
* ```
|
||||||
|
* @since v6.0.0
|
||||||
|
*/
|
||||||
|
exitedAfterDisconnect: boolean;
|
||||||
|
/**
|
||||||
|
* events.EventEmitter
|
||||||
|
* 1. disconnect
|
||||||
|
* 2. error
|
||||||
|
* 3. exit
|
||||||
|
* 4. listening
|
||||||
|
* 5. message
|
||||||
|
* 6. online
|
||||||
|
*/
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
addListener(event: "disconnect", listener: () => void): this;
|
||||||
|
addListener(event: "error", listener: (error: Error) => void): this;
|
||||||
|
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||||
|
addListener(event: "listening", listener: (address: Address) => void): this;
|
||||||
|
addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
addListener(event: "online", listener: () => void): this;
|
||||||
|
emit(event: string | symbol, ...args: any[]): boolean;
|
||||||
|
emit(event: "disconnect"): boolean;
|
||||||
|
emit(event: "error", error: Error): boolean;
|
||||||
|
emit(event: "exit", code: number, signal: string): boolean;
|
||||||
|
emit(event: "listening", address: Address): boolean;
|
||||||
|
emit(event: "message", message: any, handle: net.Socket | net.Server): boolean;
|
||||||
|
emit(event: "online"): boolean;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "disconnect", listener: () => void): this;
|
||||||
|
on(event: "error", listener: (error: Error) => void): this;
|
||||||
|
on(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||||
|
on(event: "listening", listener: (address: Address) => void): this;
|
||||||
|
on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
on(event: "online", listener: () => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "disconnect", listener: () => void): this;
|
||||||
|
once(event: "error", listener: (error: Error) => void): this;
|
||||||
|
once(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||||
|
once(event: "listening", listener: (address: Address) => void): this;
|
||||||
|
once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
once(event: "online", listener: () => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "disconnect", listener: () => void): this;
|
||||||
|
prependListener(event: "error", listener: (error: Error) => void): this;
|
||||||
|
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||||
|
prependListener(event: "listening", listener: (address: Address) => void): this;
|
||||||
|
prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
prependListener(event: "online", listener: () => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "disconnect", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "error", listener: (error: Error) => void): this;
|
||||||
|
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||||
|
prependOnceListener(event: "listening", listener: (address: Address) => void): this;
|
||||||
|
prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
prependOnceListener(event: "online", listener: () => void): this;
|
||||||
|
}
|
||||||
|
export interface Cluster extends EventEmitter {
|
||||||
|
disconnect(callback?: () => void): void;
|
||||||
|
/**
|
||||||
|
* Spawn a new worker process.
|
||||||
|
*
|
||||||
|
* This can only be called from the primary process.
|
||||||
|
* @param env Key/value pairs to add to worker process environment.
|
||||||
|
* @since v0.6.0
|
||||||
|
*/
|
||||||
|
fork(env?: any): Worker;
|
||||||
|
/** @deprecated since v16.0.0 - use isPrimary. */
|
||||||
|
readonly isMaster: boolean;
|
||||||
|
/**
|
||||||
|
* True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID`
|
||||||
|
* is undefined, then `isPrimary` is `true`.
|
||||||
|
* @since v16.0.0
|
||||||
|
*/
|
||||||
|
readonly isPrimary: boolean;
|
||||||
|
/**
|
||||||
|
* True if the process is not a primary (it is the negation of `cluster.isPrimary`).
|
||||||
|
* @since v0.6.0
|
||||||
|
*/
|
||||||
|
readonly isWorker: boolean;
|
||||||
|
/**
|
||||||
|
* The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a
|
||||||
|
* global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings)
|
||||||
|
* is called, whichever comes first.
|
||||||
|
*
|
||||||
|
* `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute
|
||||||
|
* IOCP handles without incurring a large performance hit.
|
||||||
|
*
|
||||||
|
* `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`.
|
||||||
|
* @since v0.11.2
|
||||||
|
*/
|
||||||
|
schedulingPolicy: number;
|
||||||
|
/**
|
||||||
|
* After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings)
|
||||||
|
* (or [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv)) this settings object will contain
|
||||||
|
* the settings, including the default values.
|
||||||
|
*
|
||||||
|
* This object is not intended to be changed or set manually.
|
||||||
|
* @since v0.7.1
|
||||||
|
*/
|
||||||
|
readonly settings: ClusterSettings;
|
||||||
|
/** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings) instead. */
|
||||||
|
setupMaster(settings?: ClusterSettings): void;
|
||||||
|
/**
|
||||||
|
* `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`.
|
||||||
|
*
|
||||||
|
* Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv)
|
||||||
|
* and have no effect on workers that are already running.
|
||||||
|
*
|
||||||
|
* The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to
|
||||||
|
* [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv).
|
||||||
|
*
|
||||||
|
* The defaults above apply to the first call only; the defaults for later calls are the current values at the time of
|
||||||
|
* `cluster.setupPrimary()` is called.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import cluster from 'node:cluster';
|
||||||
|
*
|
||||||
|
* cluster.setupPrimary({
|
||||||
|
* exec: 'worker.js',
|
||||||
|
* args: ['--use', 'https'],
|
||||||
|
* silent: true,
|
||||||
|
* });
|
||||||
|
* cluster.fork(); // https worker
|
||||||
|
* cluster.setupPrimary({
|
||||||
|
* exec: 'worker.js',
|
||||||
|
* args: ['--use', 'http'],
|
||||||
|
* });
|
||||||
|
* cluster.fork(); // http worker
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* This can only be called from the primary process.
|
||||||
|
* @since v16.0.0
|
||||||
|
*/
|
||||||
|
setupPrimary(settings?: ClusterSettings): void;
|
||||||
|
/**
|
||||||
|
* A reference to the current worker object. Not available in the primary process.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import cluster from 'node:cluster';
|
||||||
|
*
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* console.log('I am primary');
|
||||||
|
* cluster.fork();
|
||||||
|
* cluster.fork();
|
||||||
|
* } else if (cluster.isWorker) {
|
||||||
|
* console.log(`I am worker #${cluster.worker.id}`);
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v0.7.0
|
||||||
|
*/
|
||||||
|
readonly worker?: Worker | undefined;
|
||||||
|
/**
|
||||||
|
* A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process.
|
||||||
|
*
|
||||||
|
* A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it
|
||||||
|
* is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* import cluster from 'node:cluster';
|
||||||
|
*
|
||||||
|
* for (const worker of Object.values(cluster.workers)) {
|
||||||
|
* worker.send('big announcement to all workers');
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
* @since v0.7.0
|
||||||
|
*/
|
||||||
|
readonly workers?: NodeJS.Dict<Worker> | undefined;
|
||||||
|
readonly SCHED_NONE: number;
|
||||||
|
readonly SCHED_RR: number;
|
||||||
|
/**
|
||||||
|
* events.EventEmitter
|
||||||
|
* 1. disconnect
|
||||||
|
* 2. exit
|
||||||
|
* 3. fork
|
||||||
|
* 4. listening
|
||||||
|
* 5. message
|
||||||
|
* 6. online
|
||||||
|
* 7. setup
|
||||||
|
*/
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
addListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||||
|
addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||||
|
addListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||||
|
addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||||
|
addListener(
|
||||||
|
event: "message",
|
||||||
|
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
|
||||||
|
): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
addListener(event: "online", listener: (worker: Worker) => void): this;
|
||||||
|
addListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||||
|
emit(event: string | symbol, ...args: any[]): boolean;
|
||||||
|
emit(event: "disconnect", worker: Worker): boolean;
|
||||||
|
emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
|
||||||
|
emit(event: "fork", worker: Worker): boolean;
|
||||||
|
emit(event: "listening", worker: Worker, address: Address): boolean;
|
||||||
|
emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
|
||||||
|
emit(event: "online", worker: Worker): boolean;
|
||||||
|
emit(event: "setup", settings: ClusterSettings): boolean;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||||
|
on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||||
|
on(event: "fork", listener: (worker: Worker) => void): this;
|
||||||
|
on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||||
|
on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
on(event: "online", listener: (worker: Worker) => void): this;
|
||||||
|
on(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||||
|
once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||||
|
once(event: "fork", listener: (worker: Worker) => void): this;
|
||||||
|
once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||||
|
once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
once(event: "online", listener: (worker: Worker) => void): this;
|
||||||
|
once(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||||
|
prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||||
|
prependListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||||
|
prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||||
|
// the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
prependListener(
|
||||||
|
event: "message",
|
||||||
|
listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void,
|
||||||
|
): this;
|
||||||
|
prependListener(event: "online", listener: (worker: Worker) => void): this;
|
||||||
|
prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||||
|
prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||||
|
prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||||
|
prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||||
|
// the handle is a net.Socket or net.Server object, or undefined.
|
||||||
|
prependOnceListener(
|
||||||
|
event: "message",
|
||||||
|
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
|
||||||
|
): this;
|
||||||
|
prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
|
||||||
|
prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||||
|
}
|
||||||
|
const cluster: Cluster;
|
||||||
|
export default cluster;
|
||||||
|
}
|
||||||
|
declare module "node:cluster" {
|
||||||
|
export * from "cluster";
|
||||||
|
export { default as default } from "cluster";
|
||||||
|
}
|
452
node_modules/@types/node/console.d.ts
generated
vendored
Normal file
452
node_modules/@types/node/console.d.ts
generated
vendored
Normal file
@ -0,0 +1,452 @@
|
|||||||
|
/**
|
||||||
|
* The `node:console` module provides a simple debugging console that is similar to
|
||||||
|
* the JavaScript console mechanism provided by web browsers.
|
||||||
|
*
|
||||||
|
* The module exports two specific components:
|
||||||
|
*
|
||||||
|
* * A `Console` class with methods such as `console.log()`, `console.error()`, and `console.warn()` that can be used to write to any Node.js stream.
|
||||||
|
* * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstdout) and
|
||||||
|
* [`process.stderr`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstderr). The global `console` can be used without calling `require('node:console')`.
|
||||||
|
*
|
||||||
|
* _**Warning**_: The global console object's methods are neither consistently
|
||||||
|
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||||
|
* asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v20.x/api/process.html#a-note-on-process-io) for
|
||||||
|
* more information.
|
||||||
|
*
|
||||||
|
* Example using the global `console`:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.log('hello world');
|
||||||
|
* // Prints: hello world, to stdout
|
||||||
|
* console.log('hello %s', 'world');
|
||||||
|
* // Prints: hello world, to stdout
|
||||||
|
* console.error(new Error('Whoops, something bad happened'));
|
||||||
|
* // Prints error message and stack trace to stderr:
|
||||||
|
* // Error: Whoops, something bad happened
|
||||||
|
* // at [eval]:5:15
|
||||||
|
* // at Script.runInThisContext (node:vm:132:18)
|
||||||
|
* // at Object.runInThisContext (node:vm:309:38)
|
||||||
|
* // at node:internal/process/execution:77:19
|
||||||
|
* // at [eval]-wrapper:6:22
|
||||||
|
* // at evalScript (node:internal/process/execution:76:60)
|
||||||
|
* // at node:internal/main/eval_string:23:3
|
||||||
|
*
|
||||||
|
* const name = 'Will Robinson';
|
||||||
|
* console.warn(`Danger ${name}! Danger!`);
|
||||||
|
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Example using the `Console` class:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const out = getStreamSomehow();
|
||||||
|
* const err = getStreamSomehow();
|
||||||
|
* const myConsole = new console.Console(out, err);
|
||||||
|
*
|
||||||
|
* myConsole.log('hello world');
|
||||||
|
* // Prints: hello world, to out
|
||||||
|
* myConsole.log('hello %s', 'world');
|
||||||
|
* // Prints: hello world, to out
|
||||||
|
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||||
|
* // Prints: [Error: Whoops, something bad happened], to err
|
||||||
|
*
|
||||||
|
* const name = 'Will Robinson';
|
||||||
|
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||||
|
* // Prints: Danger Will Robinson! Danger!, to err
|
||||||
|
* ```
|
||||||
|
* @see [source](https://github.com/nodejs/node/blob/v20.12.1/lib/console.js)
|
||||||
|
*/
|
||||||
|
declare module "console" {
|
||||||
|
import console = require("node:console");
|
||||||
|
export = console;
|
||||||
|
}
|
||||||
|
declare module "node:console" {
|
||||||
|
import { InspectOptions } from "node:util";
|
||||||
|
global {
|
||||||
|
// This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build
|
||||||
|
interface Console {
|
||||||
|
Console: console.ConsoleConstructor;
|
||||||
|
/**
|
||||||
|
* `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only
|
||||||
|
* writes a message and does not otherwise affect execution. The output always
|
||||||
|
* starts with `"Assertion failed"`. If provided, `message` is formatted using
|
||||||
|
* [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args).
|
||||||
|
*
|
||||||
|
* If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.assert(true, 'does nothing');
|
||||||
|
*
|
||||||
|
* console.assert(false, 'Whoops %s work', 'didn\'t');
|
||||||
|
* // Assertion failed: Whoops didn't work
|
||||||
|
*
|
||||||
|
* console.assert();
|
||||||
|
* // Assertion failed
|
||||||
|
* ```
|
||||||
|
* @since v0.1.101
|
||||||
|
* @param value The value tested for being truthy.
|
||||||
|
* @param message All arguments besides `value` are used as error message.
|
||||||
|
*/
|
||||||
|
assert(value: any, message?: string, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* When `stdout` is a TTY, calling `console.clear()` will attempt to clear the
|
||||||
|
* TTY. When `stdout` is not a TTY, this method does nothing.
|
||||||
|
*
|
||||||
|
* The specific operation of `console.clear()` can vary across operating systems
|
||||||
|
* and terminal types. For most Linux operating systems, `console.clear()` operates similarly to the `clear` shell command. On Windows, `console.clear()` will clear only the output in the
|
||||||
|
* current terminal viewport for the Node.js
|
||||||
|
* binary.
|
||||||
|
* @since v8.3.0
|
||||||
|
*/
|
||||||
|
clear(): void;
|
||||||
|
/**
|
||||||
|
* Maintains an internal counter specific to `label` and outputs to `stdout` the
|
||||||
|
* number of times `console.count()` has been called with the given `label`.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* > console.count()
|
||||||
|
* default: 1
|
||||||
|
* undefined
|
||||||
|
* > console.count('default')
|
||||||
|
* default: 2
|
||||||
|
* undefined
|
||||||
|
* > console.count('abc')
|
||||||
|
* abc: 1
|
||||||
|
* undefined
|
||||||
|
* > console.count('xyz')
|
||||||
|
* xyz: 1
|
||||||
|
* undefined
|
||||||
|
* > console.count('abc')
|
||||||
|
* abc: 2
|
||||||
|
* undefined
|
||||||
|
* > console.count()
|
||||||
|
* default: 3
|
||||||
|
* undefined
|
||||||
|
* >
|
||||||
|
* ```
|
||||||
|
* @since v8.3.0
|
||||||
|
* @param [label='default'] The display label for the counter.
|
||||||
|
*/
|
||||||
|
count(label?: string): void;
|
||||||
|
/**
|
||||||
|
* Resets the internal counter specific to `label`.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* > console.count('abc');
|
||||||
|
* abc: 1
|
||||||
|
* undefined
|
||||||
|
* > console.countReset('abc');
|
||||||
|
* undefined
|
||||||
|
* > console.count('abc');
|
||||||
|
* abc: 1
|
||||||
|
* undefined
|
||||||
|
* >
|
||||||
|
* ```
|
||||||
|
* @since v8.3.0
|
||||||
|
* @param [label='default'] The display label for the counter.
|
||||||
|
*/
|
||||||
|
countReset(label?: string): void;
|
||||||
|
/**
|
||||||
|
* The `console.debug()` function is an alias for {@link log}.
|
||||||
|
* @since v8.0.0
|
||||||
|
*/
|
||||||
|
debug(message?: any, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* Uses [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options) on `obj` and prints the resulting string to `stdout`.
|
||||||
|
* This function bypasses any custom `inspect()` function defined on `obj`.
|
||||||
|
* @since v0.1.101
|
||||||
|
*/
|
||||||
|
dir(obj: any, options?: InspectOptions): void;
|
||||||
|
/**
|
||||||
|
* This method calls `console.log()` passing it the arguments received.
|
||||||
|
* This method does not produce any XML formatting.
|
||||||
|
* @since v8.0.0
|
||||||
|
*/
|
||||||
|
dirxml(...data: any[]): void;
|
||||||
|
/**
|
||||||
|
* Prints to `stderr` with newline. Multiple arguments can be passed, with the
|
||||||
|
* first used as the primary message and all additional used as substitution
|
||||||
|
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
|
||||||
|
* (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)).
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const code = 5;
|
||||||
|
* console.error('error #%d', code);
|
||||||
|
* // Prints: error #5, to stderr
|
||||||
|
* console.error('error', code);
|
||||||
|
* // Prints: error 5, to stderr
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* If formatting elements (e.g. `%d`) are not found in the first string then
|
||||||
|
* [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options) is called on each argument and the
|
||||||
|
* resulting string values are concatenated. See [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)
|
||||||
|
* for more information.
|
||||||
|
* @since v0.1.100
|
||||||
|
*/
|
||||||
|
error(message?: any, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* Increases indentation of subsequent lines by spaces for `groupIndentation` length.
|
||||||
|
*
|
||||||
|
* If one or more `label`s are provided, those are printed first without the
|
||||||
|
* additional indentation.
|
||||||
|
* @since v8.5.0
|
||||||
|
*/
|
||||||
|
group(...label: any[]): void;
|
||||||
|
/**
|
||||||
|
* An alias for {@link group}.
|
||||||
|
* @since v8.5.0
|
||||||
|
*/
|
||||||
|
groupCollapsed(...label: any[]): void;
|
||||||
|
/**
|
||||||
|
* Decreases indentation of subsequent lines by spaces for `groupIndentation` length.
|
||||||
|
* @since v8.5.0
|
||||||
|
*/
|
||||||
|
groupEnd(): void;
|
||||||
|
/**
|
||||||
|
* The `console.info()` function is an alias for {@link log}.
|
||||||
|
* @since v0.1.100
|
||||||
|
*/
|
||||||
|
info(message?: any, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* Prints to `stdout` with newline. Multiple arguments can be passed, with the
|
||||||
|
* first used as the primary message and all additional used as substitution
|
||||||
|
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
|
||||||
|
* (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)).
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const count = 5;
|
||||||
|
* console.log('count: %d', count);
|
||||||
|
* // Prints: count: 5, to stdout
|
||||||
|
* console.log('count:', count);
|
||||||
|
* // Prints: count: 5, to stdout
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* See [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args) for more information.
|
||||||
|
* @since v0.1.100
|
||||||
|
*/
|
||||||
|
log(message?: any, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
|
||||||
|
* logging the argument if it can't be parsed as tabular.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* // These can't be parsed as tabular data
|
||||||
|
* console.table(Symbol());
|
||||||
|
* // Symbol()
|
||||||
|
*
|
||||||
|
* console.table(undefined);
|
||||||
|
* // undefined
|
||||||
|
*
|
||||||
|
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
|
||||||
|
* // ┌─────────┬─────┬─────┐
|
||||||
|
* // │ (index) │ a │ b │
|
||||||
|
* // ├─────────┼─────┼─────┤
|
||||||
|
* // │ 0 │ 1 │ 'Y' │
|
||||||
|
* // │ 1 │ 'Z' │ 2 │
|
||||||
|
* // └─────────┴─────┴─────┘
|
||||||
|
*
|
||||||
|
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
|
||||||
|
* // ┌─────────┬─────┐
|
||||||
|
* // │ (index) │ a │
|
||||||
|
* // ├─────────┼─────┤
|
||||||
|
* // │ 0 │ 1 │
|
||||||
|
* // │ 1 │ 'Z' │
|
||||||
|
* // └─────────┴─────┘
|
||||||
|
* ```
|
||||||
|
* @since v10.0.0
|
||||||
|
* @param properties Alternate properties for constructing the table.
|
||||||
|
*/
|
||||||
|
table(tabularData: any, properties?: readonly string[]): void;
|
||||||
|
/**
|
||||||
|
* Starts a timer that can be used to compute the duration of an operation. Timers
|
||||||
|
* are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in
|
||||||
|
* suitable time units to `stdout`. For example, if the elapsed
|
||||||
|
* time is 3869ms, `console.timeEnd()` displays "3.869s".
|
||||||
|
* @since v0.1.104
|
||||||
|
* @param [label='default']
|
||||||
|
*/
|
||||||
|
time(label?: string): void;
|
||||||
|
/**
|
||||||
|
* Stops a timer that was previously started by calling {@link time} and
|
||||||
|
* prints the result to `stdout`:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.time('bunch-of-stuff');
|
||||||
|
* // Do a bunch of stuff.
|
||||||
|
* console.timeEnd('bunch-of-stuff');
|
||||||
|
* // Prints: bunch-of-stuff: 225.438ms
|
||||||
|
* ```
|
||||||
|
* @since v0.1.104
|
||||||
|
* @param [label='default']
|
||||||
|
*/
|
||||||
|
timeEnd(label?: string): void;
|
||||||
|
/**
|
||||||
|
* For a timer that was previously started by calling {@link time}, prints
|
||||||
|
* the elapsed time and other `data` arguments to `stdout`:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.time('process');
|
||||||
|
* const value = expensiveProcess1(); // Returns 42
|
||||||
|
* console.timeLog('process', value);
|
||||||
|
* // Prints "process: 365.227ms 42".
|
||||||
|
* doExpensiveProcess2(value);
|
||||||
|
* console.timeEnd('process');
|
||||||
|
* ```
|
||||||
|
* @since v10.7.0
|
||||||
|
* @param [label='default']
|
||||||
|
*/
|
||||||
|
timeLog(label?: string, ...data: any[]): void;
|
||||||
|
/**
|
||||||
|
* Prints to `stderr` the string `'Trace: '`, followed by the [`util.format()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilformatformat-args)
|
||||||
|
* formatted message and stack trace to the current position in the code.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.trace('Show me');
|
||||||
|
* // Prints: (stack trace will vary based on where trace is called)
|
||||||
|
* // Trace: Show me
|
||||||
|
* // at repl:2:9
|
||||||
|
* // at REPLServer.defaultEval (repl.js:248:27)
|
||||||
|
* // at bound (domain.js:287:14)
|
||||||
|
* // at REPLServer.runBound [as eval] (domain.js:300:12)
|
||||||
|
* // at REPLServer.<anonymous> (repl.js:412:12)
|
||||||
|
* // at emitOne (events.js:82:20)
|
||||||
|
* // at REPLServer.emit (events.js:169:7)
|
||||||
|
* // at REPLServer.Interface._onLine (readline.js:210:10)
|
||||||
|
* // at REPLServer.Interface._line (readline.js:549:8)
|
||||||
|
* // at REPLServer.Interface._ttyWrite (readline.js:826:14)
|
||||||
|
* ```
|
||||||
|
* @since v0.1.104
|
||||||
|
*/
|
||||||
|
trace(message?: any, ...optionalParams: any[]): void;
|
||||||
|
/**
|
||||||
|
* The `console.warn()` function is an alias for {@link error}.
|
||||||
|
* @since v0.1.100
|
||||||
|
*/
|
||||||
|
warn(message?: any, ...optionalParams: any[]): void;
|
||||||
|
// --- Inspector mode only ---
|
||||||
|
/**
|
||||||
|
* This method does not display anything unless used in the inspector. The `console.profile()`
|
||||||
|
* method starts a JavaScript CPU profile with an optional label until {@link profileEnd}
|
||||||
|
* is called. The profile is then added to the Profile panel of the inspector.
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.profile('MyLabel');
|
||||||
|
* // Some code
|
||||||
|
* console.profileEnd('MyLabel');
|
||||||
|
* // Adds the profile 'MyLabel' to the Profiles panel of the inspector.
|
||||||
|
* ```
|
||||||
|
* @since v8.0.0
|
||||||
|
*/
|
||||||
|
profile(label?: string): void;
|
||||||
|
/**
|
||||||
|
* This method does not display anything unless used in the inspector. Stops the current
|
||||||
|
* JavaScript CPU profiling session if one has been started and prints the report to the
|
||||||
|
* Profiles panel of the inspector. See {@link profile} for an example.
|
||||||
|
*
|
||||||
|
* If this method is called without a label, the most recently started profile is stopped.
|
||||||
|
* @since v8.0.0
|
||||||
|
*/
|
||||||
|
profileEnd(label?: string): void;
|
||||||
|
/**
|
||||||
|
* This method does not display anything unless used in the inspector. The `console.timeStamp()`
|
||||||
|
* method adds an event with the label `'label'` to the Timeline panel of the inspector.
|
||||||
|
* @since v8.0.0
|
||||||
|
*/
|
||||||
|
timeStamp(label?: string): void;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The `console` module provides a simple debugging console that is similar to the
|
||||||
|
* JavaScript console mechanism provided by web browsers.
|
||||||
|
*
|
||||||
|
* The module exports two specific components:
|
||||||
|
*
|
||||||
|
* * A `Console` class with methods such as `console.log()`, `console.error()` and `console.warn()` that can be used to write to any Node.js stream.
|
||||||
|
* * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstdout) and
|
||||||
|
* [`process.stderr`](https://nodejs.org/docs/latest-v20.x/api/process.html#processstderr). The global `console` can be used without calling `require('console')`.
|
||||||
|
*
|
||||||
|
* _**Warning**_: The global console object's methods are neither consistently
|
||||||
|
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||||
|
* asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v20.x/api/process.html#a-note-on-process-io) for
|
||||||
|
* more information.
|
||||||
|
*
|
||||||
|
* Example using the global `console`:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* console.log('hello world');
|
||||||
|
* // Prints: hello world, to stdout
|
||||||
|
* console.log('hello %s', 'world');
|
||||||
|
* // Prints: hello world, to stdout
|
||||||
|
* console.error(new Error('Whoops, something bad happened'));
|
||||||
|
* // Prints error message and stack trace to stderr:
|
||||||
|
* // Error: Whoops, something bad happened
|
||||||
|
* // at [eval]:5:15
|
||||||
|
* // at Script.runInThisContext (node:vm:132:18)
|
||||||
|
* // at Object.runInThisContext (node:vm:309:38)
|
||||||
|
* // at node:internal/process/execution:77:19
|
||||||
|
* // at [eval]-wrapper:6:22
|
||||||
|
* // at evalScript (node:internal/process/execution:76:60)
|
||||||
|
* // at node:internal/main/eval_string:23:3
|
||||||
|
*
|
||||||
|
* const name = 'Will Robinson';
|
||||||
|
* console.warn(`Danger ${name}! Danger!`);
|
||||||
|
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Example using the `Console` class:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const out = getStreamSomehow();
|
||||||
|
* const err = getStreamSomehow();
|
||||||
|
* const myConsole = new console.Console(out, err);
|
||||||
|
*
|
||||||
|
* myConsole.log('hello world');
|
||||||
|
* // Prints: hello world, to out
|
||||||
|
* myConsole.log('hello %s', 'world');
|
||||||
|
* // Prints: hello world, to out
|
||||||
|
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||||
|
* // Prints: [Error: Whoops, something bad happened], to err
|
||||||
|
*
|
||||||
|
* const name = 'Will Robinson';
|
||||||
|
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||||
|
* // Prints: Danger Will Robinson! Danger!, to err
|
||||||
|
* ```
|
||||||
|
* @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/console.js)
|
||||||
|
*/
|
||||||
|
namespace console {
|
||||||
|
interface ConsoleConstructorOptions {
|
||||||
|
stdout: NodeJS.WritableStream;
|
||||||
|
stderr?: NodeJS.WritableStream | undefined;
|
||||||
|
/**
|
||||||
|
* Ignore errors when writing to the underlying streams.
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
ignoreErrors?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* Set color support for this `Console` instance. Setting to true enables coloring while inspecting
|
||||||
|
* values. Setting to `false` disables coloring while inspecting values. Setting to `'auto'` makes color
|
||||||
|
* support depend on the value of the `isTTY` property and the value returned by `getColorDepth()` on the
|
||||||
|
* respective stream. This option can not be used, if `inspectOptions.colors` is set as well.
|
||||||
|
* @default auto
|
||||||
|
*/
|
||||||
|
colorMode?: boolean | "auto" | undefined;
|
||||||
|
/**
|
||||||
|
* Specifies options that are passed along to
|
||||||
|
* [`util.inspect()`](https://nodejs.org/docs/latest-v20.x/api/util.html#utilinspectobject-options).
|
||||||
|
*/
|
||||||
|
inspectOptions?: InspectOptions | undefined;
|
||||||
|
/**
|
||||||
|
* Set group indentation.
|
||||||
|
* @default 2
|
||||||
|
*/
|
||||||
|
groupIndentation?: number | undefined;
|
||||||
|
}
|
||||||
|
interface ConsoleConstructor {
|
||||||
|
prototype: Console;
|
||||||
|
new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console;
|
||||||
|
new(options: ConsoleConstructorOptions): Console;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var console: Console;
|
||||||
|
}
|
||||||
|
export = globalThis.console;
|
||||||
|
}
|
19
node_modules/@types/node/constants.d.ts
generated
vendored
Normal file
19
node_modules/@types/node/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */
|
||||||
|
declare module "constants" {
|
||||||
|
import { constants as osConstants, SignalConstants } from "node:os";
|
||||||
|
import { constants as cryptoConstants } from "node:crypto";
|
||||||
|
import { constants as fsConstants } from "node:fs";
|
||||||
|
|
||||||
|
const exp:
|
||||||
|
& typeof osConstants.errno
|
||||||
|
& typeof osConstants.priority
|
||||||
|
& SignalConstants
|
||||||
|
& typeof cryptoConstants
|
||||||
|
& typeof fsConstants;
|
||||||
|
export = exp;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare module "node:constants" {
|
||||||
|
import constants = require("constants");
|
||||||
|
export = constants;
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user