1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge branch 'main' into update-imagebitmap

This commit is contained in:
Hajime-san 2024-10-02 21:34:05 +09:00
commit f3aa376ef3
1069 changed files with 14685 additions and 7464 deletions

View file

@ -39,10 +39,14 @@
"tests/node_compat/runner/TODO.md",
"tests/node_compat/test",
"tests/registry/",
"tests/specs/bench/default_ts",
"tests/specs/fmt",
"tests/specs/lint/bom",
"tests/specs/lint/default_ts",
"tests/specs/lint/syntax_error_reporting",
"tests/specs/publish/no_check_surfaces_syntax_error",
"tests/specs/run/default_ts",
"tests/specs/test/default_ts",
"tests/testdata/byte_order_mark.ts",
"tests/testdata/encoding",
"tests/testdata/file_extensions/ts_with_js_extension.js",
@ -64,10 +68,10 @@
"third_party"
],
"plugins": [
"https://plugins.dprint.dev/typescript-0.91.7.wasm",
"https://plugins.dprint.dev/typescript-0.93.0.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm",
"https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.2.wasm",
"https://plugins.dprint.dev/toml-0.6.3.wasm",
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
]

View file

@ -354,7 +354,7 @@ const ci = {
needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": "${{ matrix.runner }}",
"timeout-minutes": 150,
"timeout-minutes": 180,
defaults: {
run: {
// GH actions does not fail fast by default on
@ -757,8 +757,10 @@ const ci = {
].join("\n"),
run: [
"cd target/release",
"shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
"strip denort",
"shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
"./deno types > lib.deno.d.ts",
].join("\n"),
@ -783,8 +785,10 @@ const ci = {
"--p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) " +
"--entitlements-xml-file=cli/entitlements.plist",
"cd target/release",
"shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum",
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
"strip denort",
"shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum",
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
]
.join("\n"),
@ -799,7 +803,9 @@ const ci = {
].join("\n"),
shell: "pwsh",
run: [
"Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
].join("\n"),
},
@ -813,6 +819,7 @@ const ci = {
].join("\n"),
run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
"echo ${{ github.sha }} > canary-latest.txt",
'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt',
].join("\n"),
@ -994,8 +1001,10 @@ const ci = {
"github.repository == 'denoland/deno' &&",
"startsWith(github.ref, 'refs/tags/')",
].join("\n"),
run:
run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
},
{
name: "Upload release to dl.deno.land (windows)",
@ -1009,8 +1018,10 @@ const ci = {
env: {
CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe",
},
run:
run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
},
{
name: "Create release notes",
@ -1040,15 +1051,25 @@ const ci = {
with: {
files: [
"target/release/deno-x86_64-pc-windows-msvc.zip",
"target/release/deno-x86_64-pc-windows-msvc.sha256sum",
"target/release/denort-x86_64-pc-windows-msvc.zip",
"target/release/denort-x86_64-pc-windows-msvc.sha256sum",
"target/release/deno-x86_64-unknown-linux-gnu.zip",
"target/release/deno-x86_64-unknown-linux-gnu.sha256sum",
"target/release/denort-x86_64-unknown-linux-gnu.zip",
"target/release/denort-x86_64-unknown-linux-gnu.sha256sum",
"target/release/deno-x86_64-apple-darwin.zip",
"target/release/deno-x86_64-apple-darwin.sha256sum",
"target/release/denort-x86_64-apple-darwin.zip",
"target/release/denort-x86_64-apple-darwin.sha256sum",
"target/release/deno-aarch64-unknown-linux-gnu.zip",
"target/release/deno-aarch64-unknown-linux-gnu.sha256sum",
"target/release/denort-aarch64-unknown-linux-gnu.zip",
"target/release/denort-aarch64-unknown-linux-gnu.sha256sum",
"target/release/deno-aarch64-apple-darwin.zip",
"target/release/deno-aarch64-apple-darwin.sha256sum",
"target/release/denort-aarch64-apple-darwin.zip",
"target/release/denort-aarch64-apple-darwin.sha256sum",
"target/release/deno_src.tar.gz",
"target/release/lib.deno.d.ts",
].join("\n"),
@ -1067,6 +1088,7 @@ const ci = {
"./target",
"!./target/*/gn_out",
"!./target/*/*.zip",
"!./target/*/*.sha256sum",
"!./target/*/*.tar.gz",
].join("\n"),
key: prCacheKeyPrefix + "${{ github.sha }}",

View file

@ -48,7 +48,7 @@ jobs:
- pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
runs-on: '${{ matrix.runner }}'
timeout-minutes: 150
timeout-minutes: 180
defaults:
run:
shell: bash
@ -448,8 +448,10 @@ jobs:
github.repository == 'denoland/deno')
run: |-
cd target/release
shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
strip denort
shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
./deno types > lib.deno.d.ts
- name: Pre-release (mac)
@ -465,8 +467,10 @@ jobs:
echo "Key is $(echo $APPLE_CODESIGN_KEY | base64 -d | wc -c) bytes"
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
cd target/release
shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
strip denort
shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
- name: Pre-release (windows)
if: |-
@ -476,7 +480,9 @@ jobs:
github.repository == 'denoland/deno')
shell: pwsh
run: |-
Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
- name: Upload canary to dl.deno.land
if: |-
@ -486,6 +492,7 @@ jobs:
github.ref == 'refs/heads/main')
run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/
echo ${{ github.sha }} > canary-latest.txt
gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt
- name: Autobahn testsuite
@ -615,7 +622,9 @@ jobs:
matrix.profile == 'release' &&
github.repository == 'denoland/deno' &&
startsWith(github.ref, 'refs/tags/'))
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Upload release to dl.deno.land (windows)
if: |-
!(matrix.skip) && (matrix.os == 'windows' &&
@ -625,7 +634,9 @@ jobs:
startsWith(github.ref, 'refs/tags/'))
env:
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Create release notes
if: |-
!(matrix.skip) && (matrix.job == 'test' &&
@ -647,15 +658,25 @@ jobs:
with:
files: |-
target/release/deno-x86_64-pc-windows-msvc.zip
target/release/deno-x86_64-pc-windows-msvc.sha256sum
target/release/denort-x86_64-pc-windows-msvc.zip
target/release/denort-x86_64-pc-windows-msvc.sha256sum
target/release/deno-x86_64-unknown-linux-gnu.zip
target/release/deno-x86_64-unknown-linux-gnu.sha256sum
target/release/denort-x86_64-unknown-linux-gnu.zip
target/release/denort-x86_64-unknown-linux-gnu.sha256sum
target/release/deno-x86_64-apple-darwin.zip
target/release/deno-x86_64-apple-darwin.sha256sum
target/release/denort-x86_64-apple-darwin.zip
target/release/denort-x86_64-apple-darwin.sha256sum
target/release/deno-aarch64-unknown-linux-gnu.zip
target/release/deno-aarch64-unknown-linux-gnu.sha256sum
target/release/denort-aarch64-unknown-linux-gnu.zip
target/release/denort-aarch64-unknown-linux-gnu.sha256sum
target/release/deno-aarch64-apple-darwin.zip
target/release/deno-aarch64-apple-darwin.sha256sum
target/release/denort-aarch64-apple-darwin.zip
target/release/denort-aarch64-apple-darwin.sha256sum
target/release/deno_src.tar.gz
target/release/lib.deno.d.ts
body_path: target/release/release-notes.md
@ -668,6 +689,7 @@ jobs:
./target
!./target/*/gn_out
!./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz
key: '15-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:

View file

@ -58,5 +58,5 @@ jobs:
- name: Upload archives to dl.deno.land
run: |
gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-${{github.event.inputs.commitHash}}-latest.txt)/
gsutil -h "Cache-Control: no-cache" cp release-${{github.event.inputs.commitHash}}-latest.txt gs://dl.deno.land/release-${{github.event.inputs.commitHash}}-latest.txt
gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-${{github.event.inputs.releaseKind}}-latest.txt)/
gsutil -h "Cache-Control: no-cache" cp release-${{github.event.inputs.releaseKind}}-latest.txt gs://dl.deno.land/release-${{github.event.inputs.releaseKind}}-latest.txt

197
Cargo.lock generated
View file

@ -1152,11 +1152,10 @@ dependencies = [
[[package]]
name = "deno"
version = "2.0.0-rc.2"
version = "2.0.0-rc.9"
dependencies = [
"anstream",
"async-trait",
"base32",
"base64 0.21.7",
"bincode",
"bytes",
@ -1180,6 +1179,8 @@ dependencies = [
"deno_lockfile",
"deno_npm",
"deno_package_json",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_task_shell",
@ -1278,9 +1279,9 @@ dependencies = [
[[package]]
name = "deno_ast"
version = "0.42.0"
version = "0.42.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b08d11d9e4086b00d3428650e31153cf5896586411763cb88a6423ce5b18791"
checksum = "89ea2fd038c9c7e3e87e624fd708303cd33f39c33707f6c48fa9a65d65fefc47"
dependencies = [
"base64 0.21.7",
"deno_media_type",
@ -1353,11 +1354,13 @@ dependencies = [
[[package]]
name = "deno_cache_dir"
version = "0.11.1"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6df43311cb7703fa3242c282823a850e4c8d0c06b9527d8209b55bd695452ea5"
checksum = "186a102b13b4512841f5f40784cd25822042d22954afe3b5b070d406d15eb4f2"
dependencies = [
"base32",
"deno_media_type",
"deno_path_util",
"indexmap",
"log",
"once_cell",
@ -1385,9 +1388,9 @@ dependencies = [
[[package]]
name = "deno_config"
version = "0.34.2"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "764368ba1424ea74f9a28d8e2e7633f57d008e73f76b12bc272f6d5903ca1e08"
checksum = "105864a9e0a7fbc22f1106784b2d263f402f157be1c3e1a9905f53d182700c9f"
dependencies = [
"anyhow",
"deno_package_json",
@ -1415,9 +1418,9 @@ dependencies = [
[[package]]
name = "deno_core"
version = "0.309.0"
version = "0.311.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaecc78e8903d1b5d95c7fb01a14eb342b9e63484763a304fd30a8f48861f9df"
checksum = "5e09bd55da542fa1fde753aff617c355b5d782e763ab2a19e4371a56d7844cac"
dependencies = [
"anyhow",
"bincode",
@ -1486,6 +1489,7 @@ dependencies = [
"rand",
"ring",
"rsa",
"sec1",
"serde",
"serde_bytes",
"sha1",
@ -1498,9 +1502,9 @@ dependencies = [
[[package]]
name = "deno_doc"
version = "0.148.0"
version = "0.150.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "144fa07977ba9eeeb98bcd267b7f0a6f8033f0f1f20fd210e669b3c4f30cefa2"
checksum = "0841188bc852535b76e53be6c3d13c61cfc6751a731969b8959fe31fa696c73f"
dependencies = [
"ammonia",
"anyhow",
@ -1579,6 +1583,7 @@ dependencies = [
"base32",
"deno_core",
"deno_io",
"deno_path_util",
"deno_permissions",
"filetime",
"junction",
@ -1593,9 +1598,9 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.82.1"
version = "0.83.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b63015c73aa203da206b5d35b4c1eaa23bc7fed37ab325da62d525a5524a04"
checksum = "20088a4497b1a212482883dc7b0365e99f703d575fb512d4a793531cdc92ea76"
dependencies = [
"anyhow",
"async-trait",
@ -1691,6 +1696,7 @@ dependencies = [
"chrono",
"deno_core",
"deno_fetch",
"deno_path_util",
"deno_permissions",
"deno_tls",
"denokv_proto",
@ -1711,9 +1717,9 @@ dependencies = [
[[package]]
name = "deno_lint"
version = "0.65.0"
version = "0.67.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90a58ed695d2a04c43ff6f5fb03d1597f86e3748861c872ea5a9443da7512fc7"
checksum = "871b60e32bfb6c110cbb9b0688dbf048f81e5d347fe4ce5a42239263de9dd938"
dependencies = [
"anyhow",
"deno_ast",
@ -1809,6 +1815,7 @@ dependencies = [
"deno_media_type",
"deno_net",
"deno_package_json",
"deno_path_util",
"deno_permissions",
"deno_whoami",
"der",
@ -1869,6 +1876,7 @@ dependencies = [
"thiserror",
"tokio",
"url",
"webpki-root-certs",
"winapi",
"windows-sys 0.52.0",
"x25519-dalek",
@ -1878,9 +1886,9 @@ dependencies = [
[[package]]
name = "deno_npm"
version = "0.25.1"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e61b112e9bb332e8e6b0d82fcea7664423933de476e1726dd23a924a2d94f4ef"
checksum = "1809e2d77d8a06bc2800dc10c1d4acb664197e518e289a86e336411c1feba785"
dependencies = [
"anyhow",
"async-trait",
@ -1897,9 +1905,9 @@ dependencies = [
[[package]]
name = "deno_ops"
version = "0.185.0"
version = "0.187.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d817c00b3f30bef495c84080b5ed327ed68d6d2636b5ed8b730d00a06221dc1"
checksum = "e040fd4def8a67538fe38c9955fd970efc9f44284bd69d44f8992a456afd665d"
dependencies = [
"proc-macro-rules",
"proc-macro2",
@ -1924,21 +1932,49 @@ dependencies = [
"url",
]
[[package]]
name = "deno_path_util"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4889646c1ce8437a6fde3acb057fd7e2d039e62c61f5063fc125ed1ede114dc6"
dependencies = [
"percent-encoding",
"thiserror",
"url",
]
[[package]]
name = "deno_permissions"
version = "0.28.0"
dependencies = [
"deno_core",
"deno_path_util",
"deno_terminal 0.2.0",
"fqdn",
"libc",
"log",
"once_cell",
"percent-encoding",
"serde",
"which 4.4.2",
"winapi",
]
[[package]]
name = "deno_resolver"
version = "0.0.1"
dependencies = [
"anyhow",
"base32",
"deno_media_type",
"deno_package_json",
"deno_path_util",
"deno_semver",
"node_resolver",
"test_server",
"url",
]
[[package]]
name = "deno_runtime"
version = "0.177.0"
@ -1960,6 +1996,7 @@ dependencies = [
"deno_napi",
"deno_net",
"deno_node",
"deno_path_util",
"deno_permissions",
"deno_terminal 0.2.0",
"deno_tls",
@ -1992,6 +2029,7 @@ dependencies = [
"serde",
"signal-hook",
"signal-hook-registry",
"tempfile",
"test_server",
"tokio",
"tokio-metrics",
@ -2004,9 +2042,9 @@ dependencies = [
[[package]]
name = "deno_semver"
version = "0.5.13"
version = "0.5.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6657fecb9ac6a7a71f552c95e8cc492466a75f5660224577e2226bcf30db9768"
checksum = "670fec7ef309384e23c2a90ac5d2d9d91a776d225306c75f5cdd28cf6cc8a59f"
dependencies = [
"monch",
"once_cell",
@ -2528,9 +2566,9 @@ dependencies = [
[[package]]
name = "dprint-plugin-typescript"
version = "0.91.7"
version = "0.93.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243879526ffc78aeb111719c3180f95820847c28eeeffb8d2585d1ec15a44fe4"
checksum = "e9308d98b923b7c0335c2ee1560199e3f2321b1be82803107b4ba4ed5dac46cc"
dependencies = [
"anyhow",
"deno_ast",
@ -2810,9 +2848,9 @@ checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8"
[[package]]
name = "eszip"
version = "0.78.0"
version = "0.79.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0546f00d41dbc6e90b50e922759c02559a897e59b683369c3a13519cd5108b6"
checksum = "8eb55c89bdde75a3826a79d49c9d847623ae7fbdb2695b542982982da990d33e"
dependencies = [
"anyhow",
"async-trait",
@ -4131,70 +4169,6 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "lexical-core"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46"
dependencies = [
"lexical-parse-float",
"lexical-parse-integer",
"lexical-util",
"lexical-write-float",
"lexical-write-integer",
]
[[package]]
name = "lexical-parse-float"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f"
dependencies = [
"lexical-parse-integer",
"lexical-util",
"static_assertions",
]
[[package]]
name = "lexical-parse-integer"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9"
dependencies = [
"lexical-util",
"static_assertions",
]
[[package]]
name = "lexical-util"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc"
dependencies = [
"static_assertions",
]
[[package]]
name = "lexical-write-float"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862"
dependencies = [
"lexical-util",
"lexical-write-integer",
"static_assertions",
]
[[package]]
name = "lexical-write-integer"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446"
dependencies = [
"lexical-util",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.153"
@ -4269,9 +4243,9 @@ dependencies = [
[[package]]
name = "libsui"
version = "0.3.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d0f34a58599483cd44a31ff3622dbcca0c50679af60f98b705069dc729e70cf"
checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27"
dependencies = [
"editpe",
"libc",
@ -4402,9 +4376,9 @@ dependencies = [
[[package]]
name = "markup_fmt"
version = "0.12.0"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33edf0c364d53d5518cf663824e7d5d5339c844daa54bb0208bee5202dde88f1"
checksum = "9dab5ae899659fbe5c8835b2c8ca8d3e357974a3e454138925b404004973361f"
dependencies = [
"aho-corasick",
"css_dataset",
@ -4652,6 +4626,7 @@ dependencies = [
"async-trait",
"deno_media_type",
"deno_package_json",
"deno_path_util",
"futures",
"lazy-regex",
"once_cell",
@ -6042,9 +6017,9 @@ dependencies = [
[[package]]
name = "rustls-pki-types"
version = "1.7.0"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d"
checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0"
[[package]]
name = "rustls-tokio-stream"
@ -6326,9 +6301,9 @@ dependencies = [
[[package]]
name = "serde_v8"
version = "0.218.0"
version = "0.220.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134add6f9dc7a226912468f7c427a476583ab362e094f04ff3a9fa79f2df97c7"
checksum = "6e7a65d91d79acc82aa229aeb084f4a39bda269069bc1520df40f679495388e4"
dependencies = [
"num-bigint",
"serde",
@ -6452,13 +6427,12 @@ checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
[[package]]
name = "simd-json"
version = "0.13.9"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0b84c23a1066e1d650ebc99aa8fb9f8ed0ab96fd36e2e836173c92fc9fb29bc"
checksum = "05f0b376aada35f30a0012f5790e50aed62f91804a0682669aefdbe81c7fcb91"
dependencies = [
"getrandom",
"halfbrown",
"lexical-core",
"ref-cast",
"serde",
"serde_json",
@ -7607,9 +7581,9 @@ dependencies = [
[[package]]
name = "tower-http"
version = "0.5.2"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97"
dependencies = [
"async-compression",
"bitflags 2.6.0",
@ -7627,9 +7601,9 @@ dependencies = [
[[package]]
name = "tower-layer"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
@ -8013,9 +7987,9 @@ dependencies = [
[[package]]
name = "value-trait"
version = "0.8.1"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dad8db98c1e677797df21ba03fca7d3bf9bec3ca38db930954e4fe6e1ea27eb4"
checksum = "bcaa56177466248ba59d693a048c0959ddb67f1151b963f904306312548cf392"
dependencies = [
"float-cmp",
"halfbrown",
@ -8183,6 +8157,15 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webpki-root-certs"
version = "0.26.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c6dfa3ac045bc517de14c7b1384298de1dbd229d38e08e169d9ae8c170937c"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "webpki-roots"
version = "0.26.1"

View file

@ -21,13 +21,14 @@ members = [
"ext/napi",
"ext/net",
"ext/node",
"ext/node_resolver",
"ext/url",
"ext/web",
"ext/webgpu",
"ext/webidl",
"ext/websocket",
"ext/webstorage",
"resolvers/deno",
"resolvers/node",
"runtime",
"runtime/permissions",
"tests",
@ -44,15 +45,17 @@ license = "MIT"
repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.42.0", features = ["transpiling"] }
deno_core = { version = "0.309.0" }
deno_ast = { version = "=0.42.1", features = ["transpiling"] }
deno_core = { version = "0.311.0" }
deno_bench_util = { version = "0.162.0", path = "./bench_util" }
deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
deno_npm = "=0.25.2"
deno_path_util = "=0.2.0"
deno_permissions = { version = "0.28.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.177.0", path = "./runtime" }
deno_semver = "=0.5.13"
deno_semver = "=0.5.14"
deno_terminal = "0.2.0"
napi_sym = { version = "0.98.0", path = "./cli/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
@ -85,7 +88,10 @@ deno_webgpu = { version = "0.135.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.168.0", path = "./ext/webidl" }
deno_websocket = { version = "0.173.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.163.0", path = "./ext/webstorage" }
node_resolver = { version = "0.7.0", path = "./ext/node_resolver" }
# resolvers
deno_resolver = { version = "0.0.1", path = "./resolvers/deno" }
node_resolver = { version = "0.7.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"
@ -101,10 +107,11 @@ cbc = { version = "=0.1.2", features = ["alloc"] }
# Instead use util::time::utc_now()
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.0"
deno_cache_dir = "=0.11.1"
deno_package_json = { version = "=0.1.1", default-features = false }
deno_cache_dir = "=0.13.0"
deno_package_json = { version = "0.1.1", default-features = false }
dlopen2 = "0.6.1"
ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
@ -163,6 +170,7 @@ rustls-webpki = "0.102"
rustyline = "=13.0.0"
saffron = "=0.1.0"
scopeguard = "1.2.0"
sec1 = "0.7"
serde = { version = "1.0.149", features = ["derive"] }
serde_bytes = "0.11"
serde_json = "1.0.85"
@ -184,13 +192,14 @@ tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring
tokio-socks = "0.5.1"
tokio-util = "0.7.4"
tower = { version = "0.4.13", default-features = false, features = ["util"] }
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] }
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
tower-service = "0.3.2"
twox-hash = "=1.6.3"
# Upgrading past 2.4.1 may cause WPT failures
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] }
webpki-root-certs = "0.26.5"
webpki-roots = "0.26"
which = "4.2.5"
yoke = { version = "0.7.4", features = ["derive"] }

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "2.0.0-rc.2"
version = "2.0.0-rc.9"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -65,26 +65,27 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true }
deno_config = { version = "=0.34.2", features = ["workspace", "sync"] }
deno_config = { version = "=0.35.0", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.148.0", features = ["html", "syntect"] }
deno_graph = { version = "=0.82.1" }
deno_lint = { version = "=0.65.0", features = ["docs"] }
deno_doc = { version = "0.150.1", features = ["html", "syntect"] }
deno_graph = { version = "=0.83.0" }
deno_lint = { version = "=0.67.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm = "=0.25.1"
deno_npm.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver.workspace = true
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_task_shell = "=0.17.0"
deno_terminal.workspace = true
eszip = "=0.78.0"
libsui = "0.3.0"
eszip = "=0.79.1"
libsui = "0.4.0"
napi_sym.workspace = true
node_resolver.workspace = true
anstream = "0.6.14"
async-trait.workspace = true
base32.workspace = true
base64.workspace = true
bincode = "=1.3.3"
bytes.workspace = true
@ -95,14 +96,14 @@ clap_complete = "=4.5.24"
clap_complete_fig = "=4.5.2"
color-print = "0.3.5"
console_static_text.workspace = true
dashmap = "5.5.3"
dashmap.workspace = true
data-encoding.workspace = true
dissimilar = "=1.0.4"
dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3"
dprint-plugin-jupyter = "=0.1.3"
dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.91.7"
dprint-plugin-typescript = "=0.93.0"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
@ -124,7 +125,7 @@ libz-sys.workspace = true
log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true
malva = "=0.10.1"
markup_fmt = "=0.12.0"
markup_fmt = "=0.13.1"
memmem.workspace = true
monch.workspace = true
notify.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -246,7 +246,7 @@ impl CliLockfile {
// has an extra newline at the end
let diff = diff.trim_end();
Err(deno_core::anyhow::anyhow!(
"The lockfile is out of date. Run `deno cache --frozen=false`, `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
))
} else {
Ok(())

View file

@ -20,13 +20,13 @@ use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_config::workspace::WorkspaceDiscoverStart;
use deno_config::workspace::WorkspaceLintConfig;
use deno_config::workspace::WorkspaceResolver;
use deno_core::normalize_path;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference;
use import_map::resolve_import_map_value_from_specifier;
@ -69,6 +69,8 @@ use std::collections::HashMap;
use std::env;
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::net::SocketAddr;
use std::num::NonZeroUsize;
use std::path::Path;
@ -281,10 +283,7 @@ impl BenchOptions {
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct UnstableFmtOptions {
pub css: bool,
pub html: bool,
pub component: bool,
pub yaml: bool,
}
#[derive(Clone, Debug)]
@ -317,10 +316,7 @@ impl FmtOptions {
Self {
options: resolve_fmt_options(fmt_flags, fmt_config.options),
unstable: UnstableFmtOptions {
css: unstable.css || fmt_flags.unstable_css,
html: unstable.html || fmt_flags.unstable_html,
component: unstable.component || fmt_flags.unstable_component,
yaml: unstable.yaml || fmt_flags.unstable_yaml,
},
files: fmt_config.files,
}
@ -748,15 +744,33 @@ pub enum NpmProcessStateKind {
Byonm,
}
pub(crate) const NPM_RESOLUTION_STATE_ENV_VAR_NAME: &str =
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
let state = std::env::var(NPM_RESOLUTION_STATE_ENV_VAR_NAME).ok()?;
let state: NpmProcessState = serde_json::from_str(&state).ok()?;
// remove the environment variable so that sub processes
// that are spawned do not also use this.
std::env::remove_var(NPM_RESOLUTION_STATE_ENV_VAR_NAME);
use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
@ -775,6 +789,7 @@ pub struct CliOptions {
// application need not concern itself with, so keep these private
flags: Arc<Flags>,
initial_cwd: PathBuf,
main_module_cell: std::sync::OnceLock<Result<ModuleSpecifier, AnyError>>,
maybe_node_modules_folder: Option<PathBuf>,
npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>,
@ -831,6 +846,7 @@ impl CliOptions {
npmrc,
maybe_node_modules_folder,
overrides: Default::default(),
main_module_cell: std::sync::OnceLock::new(),
start_dir,
deno_dir_provider,
})
@ -1071,27 +1087,13 @@ impl CliOptions {
None => None,
}
};
Ok(
self
.workspace()
.create_resolver(
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map: cli_arg_specified_import_map,
},
|specifier| {
let specifier = specifier.clone();
async move {
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
)
.await?,
)
Ok(self.workspace().create_resolver(
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map: cli_arg_specified_import_map,
},
|path| Ok(std::fs::read_to_string(path)?),
)?)
}
pub fn node_ipc_fd(&self) -> Option<i64> {
@ -1125,40 +1127,39 @@ impl CliOptions {
self.flags.env_file.as_ref()
}
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> {
let main_module = match &self.flags.subcommand {
DenoSubcommand::Compile(compile_flags) => {
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Eval(_) => {
resolve_url_or_path("./$deno$eval", self.initial_cwd())?
}
DenoSubcommand::Repl(_) => {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
}
DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() {
std::env::current_dir()
.context("Unable to get CWD")
.and_then(|cwd| {
resolve_url_or_path("./$deno$stdin.ts", &cwd)
.map_err(AnyError::from)
})?
} else if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
ModuleSpecifier::parse(&run_flags.script)?
} else {
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
}
DenoSubcommand::Serve(run_flags) => {
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
_ => {
bail!("No main module.")
}
};
pub fn resolve_main_module(&self) -> Result<&ModuleSpecifier, AnyError> {
self
.main_module_cell
.get_or_init(|| {
let main_module = match &self.flags.subcommand {
DenoSubcommand::Compile(compile_flags) => {
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Eval(_) => {
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
}
DenoSubcommand::Repl(_) => {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
}
DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() {
resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())?
} else {
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
}
DenoSubcommand::Serve(run_flags) => {
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
_ => {
bail!("No main module.")
}
};
Ok(main_module)
Ok(main_module)
})
.as_ref()
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
}
pub fn resolve_file_header_overrides(
@ -1179,7 +1180,7 @@ impl CliOptions {
(maybe_main_specifier, maybe_content_type)
{
HashMap::from([(
main_specifier,
main_specifier.clone(),
HashMap::from([("content-type".to_string(), content_type.to_string())]),
)])
} else {
@ -1301,10 +1302,7 @@ impl CliOptions {
pub fn resolve_config_unstable_fmt_options(&self) -> UnstableFmtOptions {
let workspace = self.workspace();
UnstableFmtOptions {
css: workspace.has_unstable("fmt-css"),
html: workspace.has_unstable("fmt-html"),
component: workspace.has_unstable("fmt-component"),
yaml: workspace.has_unstable("fmt-yaml"),
}
}
@ -1345,11 +1343,9 @@ impl CliOptions {
)?;
Ok(deno_lint::linter::LintConfig {
default_jsx_factory: transpile_options
.jsx_automatic
default_jsx_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_factory.clone()),
default_jsx_fragment_factory: transpile_options
.jsx_automatic
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_fragment_factory.clone()),
})
}
@ -1503,7 +1499,34 @@ impl CliOptions {
}
pub fn permissions_options(&self) -> PermissionsOptions {
self.flags.permissions.to_options()
fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
files
.iter()
.filter_map(|f| Url::parse(f).ok().map(Cow::Owned))
.collect()
}
// get a list of urls to imply for --allow-import
let cli_arg_urls = self
.resolve_main_module()
.ok()
.map(|url| vec![Cow::Borrowed(url)])
.or_else(|| match &self.flags.subcommand {
DenoSubcommand::Cache(cache_flags) => {
Some(files_to_urls(&cache_flags.files))
}
DenoSubcommand::Check(check_flags) => {
Some(files_to_urls(&check_flags.files))
}
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(flags),
}) => Url::parse(&flags.module_url)
.ok()
.map(|url| vec![Cow::Owned(url)]),
_ => None,
})
.unwrap_or_default();
self.flags.permissions.to_options(&cli_arg_urls)
}
pub fn reload_flag(&self) -> bool {
@ -1609,10 +1632,7 @@ impl CliOptions {
"sloppy-imports",
"byonm",
"bare-node-builtins",
"fmt-css",
"fmt-html",
"fmt-component",
"fmt-yaml",
]);
// add more unstable flags to the same vector holding granular flags
all_valid_unstable_flags.append(&mut another_unstable_flags);
@ -1678,14 +1698,14 @@ impl CliOptions {
pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig {
LifecycleScriptsConfig {
allowed: self.flags.allow_scripts.clone(),
initial_cwd: if matches!(
self.flags.allow_scripts,
PackagesAllowedScripts::None
) {
None
} else {
Some(self.initial_cwd.clone())
},
initial_cwd: self.initial_cwd.clone(),
root_dir: self.workspace().root_dir_path(),
explicit_install: matches!(
self.sub_command(),
DenoSubcommand::Install(_)
| DenoSubcommand::Cache(_)
| DenoSubcommand::Add(_)
),
}
}
}
@ -1705,7 +1725,7 @@ fn warn_insecure_allow_run_flags(flags: &Flags) {
// discourage using --allow-run without an allow list
if allow_run_list.is_empty() {
log::warn!(
"{} --allow-run can be trivially exploited. Prefer specifying an allow list (https://docs.deno.com/runtime/fundamentals/security/#running-subprocesses)",
"{} --allow-run without an allow list is susceptible to exploits. Prefer specifying an allow list (https://docs.deno.com/runtime/fundamentals/security/#running-subprocesses)",
colors::yellow("Warning")
);
}

View file

@ -126,9 +126,9 @@ impl DenoDir {
self.root.join("registries")
}
/// Path to the dependencies cache folder.
pub fn deps_folder_path(&self) -> PathBuf {
self.root.join("deps")
/// Path to the remote cache folder.
pub fn remote_folder_path(&self) -> PathBuf {
self.root.join("remote")
}
/// Path to the origin data cache folder.

114
cli/cache/mod.rs vendored
View file

@ -1,14 +1,17 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::args::CacheSetting;
use crate::errors::get_error_class_name;
use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOption;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::path::specifier_has_extension;
use deno_ast::MediaType;
@ -19,6 +22,7 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse;
use deno_graph::source::Loader;
use deno_runtime::deno_permissions::PermissionsContainer;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
@ -75,8 +79,12 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
}
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
std::fs::remove_file(path)
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
crate::util::fs::canonicalize_path(path)
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(path)
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
@ -98,40 +106,108 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
}
}
#[derive(Debug, Clone)]
pub struct DenoCacheEnvFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
self
.0
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn atomic_write_file(
&self,
path: &Path,
bytes: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.0,
write_mode: CACHE_PERM,
},
path,
bytes,
)
}
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
self
.0
.mkdir_sync(path, true, None)
.map_err(|e| e.into_io_error())
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
self
.0
.stat_sync(path)
.map(|stat| {
stat
.mtime
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
})
.map_err(|e| e.into_io_error())
}
fn is_file(&self, path: &Path) -> bool {
self.0.is_file_sync(path)
}
fn time_now(&self) -> SystemTime {
SystemTime::now()
}
}
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
pub type LocalLspHttpCache =
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
pub use deno_cache_dir::HttpCache;
pub struct FetchCacherOptions {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`.
pub is_deno_publish: bool,
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: FetchPermissionsOption,
permissions: PermissionsContainer,
cache_info_enabled: bool,
is_deno_publish: bool,
}
impl FetchCacher {
pub fn new(
file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: FetchPermissionsOption,
options: FetchCacherOptions,
) -> Self {
Self {
file_fetcher,
file_header_overrides,
global_http_cache,
npm_resolver,
module_info_cache,
permissions,
file_header_overrides: options.file_header_overrides,
permissions: options.permissions,
is_deno_publish: options.is_deno_publish,
cache_info_enabled: false,
}
}
@ -208,10 +284,24 @@ impl Loader for FetchCacher {
}
}
if self.is_deno_publish
&& matches!(specifier.scheme(), "http" | "https")
&& !specifier.as_str().starts_with(jsr_url().as_str())
{
// mark non-JSR remote modules as external so we don't need --allow-import
// permissions as these will error out later when publishing
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
let file_fetcher = self.file_fetcher.clone();
let file_header_overrides = self.file_header_overrides.clone();
let permissions = self.permissions.clone();
let specifier = specifier.clone();
let is_statically_analyzable = !options.was_dynamic_root;
async move {
let maybe_cache_setting = match options.cache_setting {
@ -230,7 +320,11 @@ impl Loader for FetchCacher {
.fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions {
specifier: &specifier,
permissions: permissions.as_ref(),
permissions: if is_statically_analyzable {
FetchPermissionsOptionRef::StaticContainer(&permissions)
} else {
FetchPermissionsOptionRef::DynamicContainer(&permissions)
},
maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(),
},

View file

@ -32,17 +32,19 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter;
use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector;
@ -185,7 +187,8 @@ struct CliFactoryServices {
node_resolver: Deferred<Arc<NodeResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>,
root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
@ -298,7 +301,7 @@ impl CliFactory {
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new(
self.deno_dir()?.deps_folder_path(),
self.deno_dir()?.remote_folder_path(),
crate::cache::RealDenoCacheEnv,
)))
})
@ -359,8 +362,8 @@ impl CliFactory {
let cli_options = self.cli_options()?;
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory.
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: fs.clone(),
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()),
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() {
Some(node_modules_path) => node_modules_path.to_path_buf(),
// path needs to be canonicalized for node resolution
@ -403,17 +406,16 @@ impl CliFactory {
pub fn sloppy_imports_resolver(
&self,
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> {
) -> Result<Option<&Arc<CliSloppyImportsResolver>>, AnyError> {
self
.services
.sloppy_imports_resolver
.get_or_try_init(|| {
Ok(
self
.cli_options()?
.unstable_sloppy_imports()
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))),
)
Ok(self.cli_options()?.unstable_sloppy_imports().then(|| {
Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
self.fs().clone(),
)))
}))
})
.map(|maybe| maybe.as_ref())
}
@ -572,8 +574,12 @@ impl CliFactory {
let caches = self.caches()?;
let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db());
let cjs_esm_analyzer =
CliCjsCodeAnalyzer::new(node_analysis_cache, self.fs().clone());
let node_resolver = self.cli_node_resolver().await?.clone();
let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
self.fs().clone(),
node_resolver,
);
Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer,
@ -622,6 +628,7 @@ impl CliFactory {
self.maybe_file_watcher_reporter().clone(),
self.file_fetcher()?.clone(),
self.global_http_cache()?.clone(),
self.root_permissions_container()?.clone(),
)))
})
.await
@ -655,6 +662,7 @@ impl CliFactory {
Ok(Arc::new(MainModuleGraphContainer::new(
self.cli_options()?.clone(),
self.module_load_preparer().await?.clone(),
self.root_permissions_container()?.clone(),
)))
})
.await
@ -751,15 +759,20 @@ impl CliFactory {
))
}
pub fn create_permissions_container(
pub fn root_permissions_container(
&self,
) -> Result<PermissionsContainer, AnyError> {
let desc_parser = self.permission_desc_parser()?.clone();
let permissions = Permissions::from_options(
desc_parser.as_ref(),
&self.cli_options()?.permissions_options(),
)?;
Ok(PermissionsContainer::new(desc_parser, permissions))
) -> Result<&PermissionsContainer, AnyError> {
self
.services
.root_permissions_container
.get_or_try_init(|| {
let desc_parser = self.permission_desc_parser()?.clone();
let permissions = Permissions::from_options(
desc_parser.as_ref(),
&self.cli_options()?.permissions_options(),
)?;
Ok(PermissionsContainer::new(desc_parser, permissions))
})
}
pub async fn create_cli_main_worker_factory(
@ -770,6 +783,7 @@ impl CliFactory {
let npm_resolver = self.npm_resolver().await?;
let fs = self.fs();
let cli_node_resolver = self.cli_node_resolver().await?;
let cli_npm_resolver = self.npm_resolver().await?.clone();
let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap())
} else {
@ -799,6 +813,7 @@ impl CliFactory {
self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(),
cli_node_resolver.clone(),
cli_npm_resolver.clone(),
NpmModuleLoader::new(
self.cjs_resolutions().clone(),
self.node_code_translator().await?.clone(),
@ -810,8 +825,8 @@ impl CliFactory {
)),
node_resolver.clone(),
npm_resolver.clone(),
self.permission_desc_parser()?.clone(),
self.root_cert_store_provider().clone(),
self.root_permissions_container()?.clone(),
StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?,

View file

@ -21,6 +21,7 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::source::LoaderChecksum;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::BlobStore;
use log::debug;
@ -135,14 +136,23 @@ impl MemoryFiles {
/// Fetch a source file from the local file system.
fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| {
let local = url_to_file_path(specifier).map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?;
// If it doesnt have a extension, we want to treat it as typescript by default
let headers = if local.extension().is_none() {
Some(HashMap::from([(
"content-type".to_string(),
"application/typescript".to_string(),
)]))
} else {
None
};
let bytes = fs::read(local)?;
Ok(File {
specifier: specifier.clone(),
maybe_headers: None,
maybe_headers: headers,
source: bytes.into(),
})
}
@ -164,30 +174,8 @@ fn get_validated_scheme(
#[derive(Debug, Copy, Clone)]
pub enum FetchPermissionsOptionRef<'a> {
AllowAll,
Container(&'a PermissionsContainer),
}
#[derive(Debug, Clone)]
pub enum FetchPermissionsOption {
AllowAll,
Container(PermissionsContainer),
}
impl FetchPermissionsOption {
pub fn as_ref(&self) -> FetchPermissionsOptionRef {
match self {
FetchPermissionsOption::AllowAll => FetchPermissionsOptionRef::AllowAll,
FetchPermissionsOption::Container(container) => {
FetchPermissionsOptionRef::Container(container)
}
}
}
}
impl From<PermissionsContainer> for FetchPermissionsOption {
fn from(value: PermissionsContainer) -> Self {
Self::Container(value)
}
DynamicContainer(&'a PermissionsContainer),
StaticContainer(&'a PermissionsContainer),
}
pub struct FetchOptions<'a> {
@ -555,7 +543,6 @@ impl FileFetcher {
}
/// Fetch a source file and asynchronously return it.
#[allow(dead_code)] // todo(25469): undo when merging
#[inline(always)]
pub async fn fetch(
&self,
@ -563,7 +550,10 @@ impl FileFetcher {
permissions: &PermissionsContainer,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, FetchPermissionsOptionRef::Container(permissions))
.fetch_inner(
specifier,
FetchPermissionsOptionRef::StaticContainer(permissions),
)
.await
}
@ -638,8 +628,17 @@ impl FileFetcher {
FetchPermissionsOptionRef::AllowAll => {
// allow
}
FetchPermissionsOptionRef::Container(permissions) => {
permissions.check_specifier(specifier)?;
FetchPermissionsOptionRef::StaticContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Static,
)?;
}
FetchPermissionsOptionRef::DynamicContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic,
)?;
}
}
if let Some(file) = self.memory_files.get(specifier) {
@ -727,7 +726,7 @@ mod tests {
maybe_temp_dir: Option<TempDir>,
) -> (FileFetcher, TempDir, Arc<BlobStore>) {
let temp_dir = maybe_temp_dir.unwrap_or_default();
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let blob_store: Arc<BlobStore> = Default::default();
let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)),
@ -965,7 +964,7 @@ mod tests {
// This creates a totally new instance, simulating another Deno process
// invocation and indicates to "cache bust".
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new(
location,
@ -991,7 +990,7 @@ mod tests {
async fn test_fetch_uses_cache() {
let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let specifier =
resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap();
@ -1157,7 +1156,7 @@ mod tests {
async fn test_fetch_uses_cache_with_redirects() {
let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let specifier =
resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap();
let redirected_specifier =
@ -1325,7 +1324,7 @@ mod tests {
async fn test_fetch_no_remote() {
let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new(
location,
@ -1351,7 +1350,7 @@ mod tests {
async fn test_fetch_cache_only() {
let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf();
let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher_01 = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)),
CacheSetting::Only,

View file

@ -9,9 +9,9 @@ use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock;
use deno_graph::ModuleGraph;
use deno_runtime::colors;
use deno_runtime::deno_permissions::PermissionsContainer;
use crate::args::CliOptions;
use crate::file_fetcher::FetchPermissionsOption;
use crate::module_loader::ModuleLoadPreparer;
use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
@ -45,12 +45,14 @@ pub struct MainModuleGraphContainer {
inner: Arc<RwLock<Arc<ModuleGraph>>>,
cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
}
impl MainModuleGraphContainer {
pub fn new(
cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
) -> Self {
Self {
update_queue: Default::default(),
@ -59,12 +61,14 @@ impl MainModuleGraphContainer {
)))),
cli_options,
module_load_preparer,
root_permissions,
}
}
pub async fn check_specifiers(
&self,
specifiers: &[ModuleSpecifier],
ext_overwrite: Option<&String>,
) -> Result<(), AnyError> {
let mut graph_permit = self.acquire_update_permit().await;
let graph = graph_permit.graph_mut();
@ -75,7 +79,8 @@ impl MainModuleGraphContainer {
specifiers,
false,
self.cli_options.ts_type_lib_window(),
FetchPermissionsOption::AllowAll,
self.root_permissions.clone(),
ext_overwrite,
)
.await?;
graph_permit.commit();
@ -94,7 +99,7 @@ impl MainModuleGraphContainer {
log::warn!("{} No matching files found.", colors::yellow("Warning"));
}
self.check_specifiers(&specifiers).await
self.check_specifiers(&specifiers, None).await
}
pub fn collect_specifiers(

View file

@ -11,28 +11,28 @@ use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
use crate::file_fetcher::FetchPermissionsOption;
use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check;
use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError;
use deno_graph::WorkspaceFastCheckOption;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use deno_graph::source::Loader;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::GraphKind;
use deno_graph::ModuleError;
@ -40,11 +40,13 @@ use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use import_map::ImportMapError;
use std::collections::HashSet;
use std::error::Error;
@ -52,14 +54,14 @@ use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
#[derive(Clone, Copy)]
#[derive(Clone)]
pub struct GraphValidOptions {
pub check_js: bool,
pub follow_type_only: bool,
pub is_vendoring: bool,
/// Whether to exit the process for lockfile errors.
/// Otherwise, surfaces lockfile errors as errors.
pub exit_lockfile_errors: bool,
pub kind: GraphKind,
/// Whether to exit the process for integrity check errors such as
/// lockfile checksum mismatches and JSR integrity failures.
/// Otherwise, surfaces integrity errors as errors.
pub exit_integrity_errors: bool,
}
/// Check if `roots` and their deps are available. Returns `Ok(())` if
@ -75,17 +77,54 @@ pub fn graph_valid(
roots: &[ModuleSpecifier],
options: GraphValidOptions,
) -> Result<(), AnyError> {
if options.exit_lockfile_errors {
graph_exit_lock_errors(graph);
if options.exit_integrity_errors {
graph_exit_integrity_errors(graph);
}
let mut errors = graph
let mut errors = graph_walk_errors(
graph,
fs,
roots,
GraphWalkErrorsOptions {
check_js: options.check_js,
kind: options.kind,
},
);
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(
get_error_class_name(err),
format_deno_graph_error(err.as_ref().deref()),
));
}
Ok(())
}
}
#[derive(Clone)]
pub struct GraphWalkErrorsOptions {
pub check_js: bool,
pub kind: GraphKind,
}
/// Walks the errors found in the module graph that should be surfaced to users
/// and enhances them with CLI information.
pub fn graph_walk_errors<'a>(
graph: &'a ModuleGraph,
fs: &'a Arc<dyn FileSystem>,
roots: &'a [ModuleSpecifier],
options: GraphWalkErrorsOptions,
) -> impl Iterator<Item = AnyError> + 'a {
graph
.walk(
roots.iter(),
deno_graph::WalkOptions {
check_js: options.check_js,
follow_type_only: options.follow_type_only,
follow_dynamic: options.is_vendoring,
kind: options.kind,
follow_dynamic: false,
prefer_fast_check_graph: false,
},
)
@ -109,7 +148,7 @@ pub fn graph_valid(
)
}
ModuleGraphError::ModuleError(error) => {
enhanced_lockfile_error_message(error)
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(fs, error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
@ -132,56 +171,18 @@ pub fn graph_valid(
return None;
}
if options.is_vendoring {
// warn about failing dynamic imports when vendoring, but don't fail completely
if matches!(
error,
ModuleGraphError::ModuleError(ModuleError::MissingDynamic(_, _))
) {
log::warn!("Ignoring: {}", message);
return None;
}
// ignore invalid downgrades and invalid local imports when vendoring
match &error {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
if matches!(
err,
ResolutionError::InvalidDowngrade { .. }
| ResolutionError::InvalidLocalImport { .. }
) {
return None;
}
}
ModuleGraphError::ModuleError(_) => {}
}
}
Some(custom_error(get_error_class_name(&error.into()), message))
});
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(
get_error_class_name(err),
format_deno_graph_error(err.as_ref().deref()),
));
}
Ok(())
}
})
}
pub fn graph_exit_lock_errors(graph: &ModuleGraph) {
pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
for error in graph.module_errors() {
exit_for_lockfile_error(error);
exit_for_integrity_error(error);
}
}
fn exit_for_lockfile_error(err: &ModuleError) {
if let Some(err_message) = enhanced_lockfile_error_message(err) {
fn exit_for_integrity_error(err: &ModuleError) {
if let Some(err_message) = enhanced_integrity_error_message(err) {
log::error!("{} {}", colors::red("error:"), err_message);
std::process::exit(10);
}
@ -249,6 +250,19 @@ impl ModuleGraphCreator {
package_configs: &[JsrPackageConfig],
build_fast_check_graph: bool,
) -> Result<ModuleGraph, AnyError> {
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
// Earlier on, we marked external non-JSR modules as external.
// If the graph contains any of those, it would cause type checking
// to crash, so since publishing is going to fail anyway, skip type
// checking.
graph.modules().any(|module| match module {
deno_graph::Module::External(external_module) => {
matches!(external_module.specifier.scheme(), "http" | "https")
}
_ => false,
})
}
let mut roots = Vec::new();
for package_config in package_configs {
roots.extend(package_config.config_file.resolve_export_value_urls()?);
@ -262,9 +276,12 @@ impl ModuleGraphCreator {
})
.await?;
self.graph_valid(&graph)?;
if self.options.type_check_mode().is_true() {
if self.options.type_check_mode().is_true()
&& !graph_has_external_remote(&graph)
{
self.type_check_graph(graph.clone()).await?;
}
if build_fast_check_graph {
let fast_check_workspace_members = package_configs
.iter()
@ -279,6 +296,7 @@ impl ModuleGraphCreator {
},
)?;
}
Ok(graph)
}
@ -370,6 +388,7 @@ pub struct ModuleGraphBuilder {
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
root_permissions_container: PermissionsContainer,
}
impl ModuleGraphBuilder {
@ -386,6 +405,7 @@ impl ModuleGraphBuilder {
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
root_permissions_container: PermissionsContainer,
) -> Self {
Self {
options,
@ -399,6 +419,7 @@ impl ModuleGraphBuilder {
maybe_file_watcher_reporter,
file_fetcher,
global_http_cache,
root_permissions_container,
}
}
@ -553,33 +574,19 @@ impl ModuleGraphBuilder {
// populate the information from the lockfile
if let Some(lockfile) = &self.lockfile {
let lockfile = lockfile.lock();
for (from, to) in &lockfile.content.redirects {
if let Ok(from) = ModuleSpecifier::parse(from) {
if let Ok(to) = ModuleSpecifier::parse(to) {
if !matches!(from.scheme(), "file" | "npm" | "jsr") {
graph.redirects.insert(from, to);
}
}
}
}
for (req_dep, value) in &lockfile.content.packages.specifiers {
match req_dep.kind {
deno_semver::package::PackageKind::Jsr => {
if let Ok(version) = Version::parse_standard(value) {
graph.packages.add_nv(
req_dep.req.clone(),
PackageNv {
name: req_dep.req.name.clone(),
version,
},
);
}
}
deno_semver::package::PackageKind::Npm => {
// ignore
}
}
}
graph.fill_from_lockfile(FillFromLockfileOptions {
redirects: lockfile
.content
.redirects
.iter()
.map(|(from, to)| (from.as_str(), to.as_str())),
package_specifiers: lockfile
.content
.packages
.specifiers
.iter()
.map(|(dep, id)| (dep, id.as_str())),
});
}
}
@ -587,6 +594,12 @@ impl ModuleGraphBuilder {
let initial_package_deps_len = graph.packages.package_deps_sum();
let initial_package_mappings_len = graph.packages.mappings().len();
if roots.iter().any(|r| r.scheme() == "npm")
&& self.npm_resolver.as_byonm().is_some()
{
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
}
graph.build(roots, loader, options).await;
let has_redirects_changed = graph.redirects.len() != initial_redirects_len;
@ -670,20 +683,26 @@ impl ModuleGraphBuilder {
/// Creates the default loader used for creating a graph.
pub fn create_graph_loader(&self) -> cache::FetchCacher {
self.create_fetch_cacher(FetchPermissionsOption::AllowAll)
self.create_fetch_cacher(self.root_permissions_container.clone())
}
pub fn create_fetch_cacher(
&self,
permissions: FetchPermissionsOption,
permissions: PermissionsContainer,
) -> cache::FetchCacher {
cache::FetchCacher::new(
self.file_fetcher.clone(),
self.options.resolve_file_header_overrides(),
self.global_http_cache.clone(),
self.npm_resolver.clone(),
self.module_info_cache.clone(),
permissions,
cache::FetchCacherOptions {
file_header_overrides: self.options.resolve_file_header_overrides(),
permissions,
is_deno_publish: matches!(
self.options.sub_command(),
crate::args::DenoSubcommand::Publish { .. }
),
},
)
}
@ -707,10 +726,13 @@ impl ModuleGraphBuilder {
&self.fs,
roots,
GraphValidOptions {
is_vendoring: false,
follow_type_only: self.options.type_check_mode().is_true(),
kind: if self.options.type_check_mode().is_true() {
GraphKind::All
} else {
GraphKind::CodeOnly
},
check_js: self.options.check_js(),
exit_lockfile_errors: true,
exit_integrity_errors: true,
},
)
}
@ -751,8 +773,8 @@ fn enhanced_sloppy_imports_error_message(
match error {
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => {
let additional_message = SloppyImportsResolver::new(fs.clone())
.resolve(specifier, ResolutionMode::Execution)?
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
.resolve(specifier, SloppyImportsResolutionMode::Execution)?
.as_suggestion_message();
Some(format!(
"{} {} or run with --unstable-sloppy-imports",
@ -764,7 +786,7 @@ fn enhanced_sloppy_imports_error_message(
}
}
fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err {
ModuleError::LoadingErr(
specifier,
@ -928,13 +950,13 @@ pub fn has_graph_root_local_dependent_changed(
std::iter::once(root),
deno_graph::WalkOptions {
follow_dynamic: true,
follow_type_only: true,
kind: GraphKind::All,
prefer_fast_check_graph: true,
check_js: true,
},
);
while let Some((s, _)) = dependent_specifiers.next() {
if let Ok(path) = specifier_to_file_path(s) {
if let Ok(path) = url_to_file_path(s) {
if let Ok(path) = canonicalize_path(&path) {
if canonicalized_changed_paths.contains(&path) {
return true;

View file

@ -2,6 +2,7 @@
use super::diagnostics::DenoDiagnostic;
use super::diagnostics::DiagnosticSource;
use super::documents::Document;
use super::documents::Documents;
use super::language_server;
use super::resolver::LspResolver;
@ -9,9 +10,10 @@ use super::tsc;
use super::urls::url_to_uri;
use crate::args::jsr_url;
use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter;
use deno_config::workspace::MappedResolution;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
@ -24,6 +26,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PathClean;
use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference;
@ -401,7 +404,7 @@ impl<'a> TsResponseImportMapper<'a> {
.flatten()?;
let root_folder = package_json.path.parent()?;
let specifier_path = specifier_to_file_path(specifier).ok()?;
let specifier_path = url_to_file_path(specifier).ok()?;
let mut search_paths = vec![specifier_path.clone()];
// TypeScript will provide a .js extension for quick fixes, so do
// a search for the .d.ts file instead
@ -1151,6 +1154,162 @@ impl CodeActionCollection {
..Default::default()
}));
}
pub async fn add_source_actions(
&mut self,
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) {
async fn deno_types_for_npm_action(
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) -> Option<lsp::CodeAction> {
let (dep_key, dependency, _) =
document.get_maybe_dependency(&range.end)?;
if dependency.maybe_deno_types_specifier.is_some() {
return None;
}
if dependency.maybe_code.maybe_specifier().is_none()
&& dependency.maybe_type.maybe_specifier().is_none()
{
// We're using byonm and the package is not cached.
return None;
}
let position = deno_graph::Position::new(
range.end.line as usize,
range.end.character as usize,
);
let import_range = dependency.imports.iter().find_map(|i| {
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
return None;
}
if !i.specifier_range.includes(&position) {
return None;
}
i.full_range.as_ref()
})?;
let referrer = document.specifier();
let file_referrer = document.file_referrer();
let config_data = language_server
.config
.tree
.data_for_specifier(file_referrer?)?;
let workspace_resolver = config_data.resolver.clone();
let npm_ref = if let Ok(resolution) =
workspace_resolver.resolve(&dep_key, document.specifier())
{
let specifier = match resolution {
MappedResolution::Normal { specifier, .. }
| MappedResolution::ImportMap { specifier, .. } => specifier,
_ => {
return None;
}
};
NpmPackageReqReference::from_specifier(&specifier).ok()?
} else {
// Only resolve bare package.json deps for byonm.
if !config_data.byonm {
return None;
}
if !language_server
.resolver
.is_bare_package_json_dep(&dep_key, referrer)
{
return None;
}
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
};
let package_name = &npm_ref.req().name;
if package_name.starts_with("@types/") {
return None;
}
let managed_npm_resolver = language_server
.resolver
.maybe_managed_npm_resolver(file_referrer);
if let Some(npm_resolver) = managed_npm_resolver {
if !npm_resolver.is_pkg_req_folder_cached(npm_ref.req()) {
return None;
}
}
if language_server
.resolver
.npm_to_file_url(&npm_ref, document.specifier(), file_referrer)
.is_some()
{
// The package import has types.
return None;
}
let types_package_name = format!("@types/{package_name}");
let types_package_version = language_server
.npm_search_api
.versions(&types_package_name)
.await
.ok()
.and_then(|versions| versions.first().cloned())?;
let types_specifier_text =
if let Some(npm_resolver) = managed_npm_resolver {
let mut specifier_text = if let Some(req) =
npm_resolver.top_package_req_for_name(&types_package_name)
{
format!("npm:{req}")
} else {
format!("npm:{}@^{}", &types_package_name, types_package_version)
};
let specifier = ModuleSpecifier::parse(&specifier_text).ok()?;
if let Some(file_referrer) = file_referrer {
if let Some(text) = language_server
.get_ts_response_import_mapper(file_referrer)
.check_specifier(&specifier, referrer)
{
specifier_text = text;
}
}
specifier_text
} else {
types_package_name.clone()
};
let uri = language_server
.url_map
.specifier_to_uri(referrer, file_referrer)
.ok()?;
let position = lsp::Position {
line: import_range.start.line as u32,
character: import_range.start.character as u32,
};
let new_text = format!(
"{}// @deno-types=\"{}\"\n",
if position.character == 0 { "" } else { "\n" },
&types_specifier_text
);
let text_edit = lsp::TextEdit {
range: lsp::Range {
start: position,
end: position,
},
new_text,
};
Some(lsp::CodeAction {
title: format!(
"Add @deno-types directive for \"{}\"",
&types_specifier_text
),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some([(uri, vec![text_edit])].into_iter().collect()),
..Default::default()
}),
..Default::default()
})
}
if let Some(action) =
deno_types_for_npm_action(document, range, language_server).await
{
self.actions.push(CodeActionKind::Deno(action));
}
}
}
/// Prepend the whitespace characters found at the start of line_content to content.

View file

@ -7,10 +7,10 @@ use crate::cache::LocalLspHttpCache;
use crate::lsp::config::Config;
use crate::lsp::logging::lsp_log;
use crate::lsp::logging::lsp_warn;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use std::collections::BTreeMap;
use std::fs;
use std::path::Path;
@ -24,7 +24,7 @@ pub fn calculate_fs_version(
) -> Option<String> {
match specifier.scheme() {
"npm" | "node" | "data" | "blob" => None,
"file" => specifier_to_file_path(specifier)
"file" => url_to_file_path(specifier)
.ok()
.and_then(|path| calculate_fs_version_at_path(&path)),
_ => calculate_fs_version_in_cache(cache, specifier, file_referrer),
@ -82,7 +82,7 @@ impl Default for LspCache {
impl LspCache {
pub fn new(global_cache_url: Option<Url>) -> Self {
let global_cache_path = global_cache_url.and_then(|s| {
specifier_to_file_path(&s)
url_to_file_path(&s)
.inspect(|p| {
lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy());
})
@ -94,7 +94,7 @@ impl LspCache {
let deno_dir = DenoDir::new(global_cache_path)
.expect("should be infallible with absolute custom root");
let global = Arc::new(GlobalHttpCache::new(
deno_dir.deps_folder_path(),
deno_dir.remote_folder_path(),
crate::cache::RealDenoCacheEnv,
));
Self {
@ -165,7 +165,7 @@ impl LspCache {
&self,
specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> {
let path = specifier_to_file_path(specifier).ok()?;
let path = url_to_file_path(specifier).ok()?;
let vendor = self
.vendors_by_scope
.iter()
@ -176,7 +176,7 @@ impl LspCache {
}
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
if let Ok(path) = specifier_to_file_path(specifier) {
if let Ok(path) = url_to_file_path(specifier) {
if !path.starts_with(&self.deno_dir().root) {
return true;
}

View file

@ -19,7 +19,6 @@ use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Range;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::LineAndColumnIndex;
use deno_ast::SourceTextInfo;
@ -30,6 +29,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json::json;
use deno_core::url::Position;
use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv;
use import_map::ImportMap;
@ -380,7 +380,7 @@ fn get_local_completions(
ResolutionMode::Execution,
)
.ok()?;
let resolved_parent_path = specifier_to_file_path(&resolved_parent).ok()?;
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
let raw_parent =
&text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
if resolved_parent_path.is_dir() {

View file

@ -36,8 +36,8 @@ use deno_core::ModuleSpecifier;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path;
use indexmap::IndexSet;
use lsp_types::ClientCapabilities;
use std::collections::BTreeMap;
@ -59,7 +59,8 @@ use crate::args::LintOptions;
use crate::cache::FastInsecureHasher;
use crate::file_fetcher::FileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider;
@ -801,7 +802,7 @@ impl Settings {
/// Returns `None` if the value should be deferred to the presence of a
/// `deno.json` file.
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> {
let Ok(path) = specifier_to_file_path(specifier) else {
let Ok(path) = url_to_file_path(specifier) else {
// Non-file URLs are not disabled by these settings.
return Some(true);
};
@ -810,7 +811,7 @@ impl Settings {
let mut disable_paths = vec![];
let mut enable_paths = None;
if let Some(folder_uri) = folder_uri {
if let Ok(folder_path) = specifier_to_file_path(folder_uri) {
if let Ok(folder_path) = url_to_file_path(folder_uri) {
disable_paths = settings
.disable_paths
.iter()
@ -847,12 +848,12 @@ impl Settings {
&self,
specifier: &ModuleSpecifier,
) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) {
let Ok(path) = specifier_to_file_path(specifier) else {
let Ok(path) = url_to_file_path(specifier) else {
return (&self.unscoped, self.first_folder.as_ref());
};
for (folder_uri, settings) in self.by_workspace_folder.iter().rev() {
if let Some(settings) = settings {
let Ok(folder_path) = specifier_to_file_path(folder_uri) else {
let Ok(folder_path) = url_to_file_path(folder_uri) else {
continue;
};
if path.starts_with(folder_path) {
@ -1129,6 +1130,7 @@ impl Default for LspTsConfig {
"module": "esnext",
"moduleDetection": "force",
"noEmit": true,
"noImplicitOverride": true,
"resolveJsonModule": true,
"strict": true,
"target": "esnext",
@ -1180,7 +1182,7 @@ pub struct ConfigData {
pub lockfile: Option<Arc<CliLockfile>>,
pub npmrc: Option<Arc<ResolvedNpmRc>>,
pub resolver: Arc<WorkspaceResolver>,
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub import_map_from_settings: Option<ModuleSpecifier>,
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
}
@ -1450,9 +1452,10 @@ impl ConfigData {
// Mark the import map as a watched file
if let Some(import_map_specifier) = member_dir
.workspace
.to_import_map_specifier()
.to_import_map_path()
.ok()
.flatten()
.and_then(|path| Url::from_file_path(path).ok())
{
add_watched_file(
import_map_specifier.clone(),
@ -1541,50 +1544,32 @@ impl ConfigData {
None
}
};
let resolver = deno_core::unsync::spawn({
let workspace = member_dir.workspace.clone();
let file_fetcher = file_fetcher.cloned();
async move {
workspace
.create_resolver(
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map,
},
move |specifier| {
let specifier = specifier.clone();
let file_fetcher = file_fetcher.clone().unwrap();
async move {
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
)
.await
.inspect_err(|err| {
lsp_warn!(
" Failed to load resolver: {}",
err // will contain the specifier
);
})
.ok()
}
})
.await
.unwrap()
.unwrap_or_else(|| {
// create a dummy resolver
WorkspaceResolver::new_raw(
scope.clone(),
None,
member_dir.workspace.resolver_jsr_pkgs().collect(),
member_dir.workspace.package_jsons().cloned().collect(),
pkg_json_dep_resolution,
let resolver = member_dir
.workspace
.create_resolver(
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map,
},
|path| Ok(std::fs::read_to_string(path)?),
)
});
.inspect_err(|err| {
lsp_warn!(
" Failed to load resolver: {}",
err // will contain the specifier
);
})
.ok()
.unwrap_or_else(|| {
// create a dummy resolver
WorkspaceResolver::new_raw(
scope.clone(),
None,
member_dir.workspace.resolver_jsr_pkgs().collect(),
member_dir.workspace.package_jsons().cloned().collect(),
pkg_json_dep_resolution,
)
});
if !resolver.diagnostics().is_empty() {
lsp_warn!(
" Import map diagnostics:\n{}",
@ -1600,9 +1585,11 @@ impl ConfigData {
.is_ok()
|| member_dir.workspace.has_unstable("sloppy-imports");
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
Arc::new(SloppyImportsResolver::new_without_stat_cache(Arc::new(
deno_runtime::deno_fs::RealFs,
)))
Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
deno_runtime::deno_fs::RealFs,
)),
))
});
let resolver = Arc::new(resolver);
let lint_rule_provider = LintRuleProvider::new(
@ -1783,7 +1770,7 @@ impl ConfigTree {
let config_file_path = (|| {
let config_setting = ws_settings.config.as_ref()?;
let config_uri = folder_uri.join(config_setting).ok()?;
specifier_to_file_path(&config_uri).ok()
url_to_file_path(&config_uri).ok()
})();
if config_file_path.is_some() || ws_settings.import_map.is_some() {
scopes.insert(
@ -1860,7 +1847,7 @@ impl ConfigTree {
let scope = config_file.specifier.join(".").unwrap();
let json_text = serde_json::to_string(&config_file.json).unwrap();
let test_fs = deno_runtime::deno_fs::InMemoryFs::default();
let config_path = specifier_to_file_path(&config_file.specifier).unwrap();
let config_path = url_to_file_path(&config_file.specifier).unwrap();
test_fs.setup_text_files(vec![(
config_path.to_string_lossy().to_string(),
json_text,

View file

@ -19,8 +19,8 @@ use super::urls::LspUrlMap;
use crate::graph_util;
use crate::graph_util::enhanced_resolution_error_message;
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
use crate::resolver::SloppyImportsResolution;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider;
@ -40,11 +40,12 @@ use deno_core::unsync::spawn_blocking;
use deno_core::unsync::JoinHandle;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs;
use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime;
@ -1263,7 +1264,9 @@ impl DenoDiagnostic {
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => {
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
).resolve(specifier, SloppyImportsResolutionMode::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({
"specifier": specifier,
@ -1514,17 +1517,19 @@ fn diagnose_dependency(
let import_ranges: Vec<_> = dependency
.imports
.iter()
.map(|i| documents::to_lsp_range(&i.range))
.map(|i| documents::to_lsp_range(&i.specifier_range))
.collect();
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
// a different specifier and therefore needs a separate call to
// `diagnose_resolution()`. It would be much cleaner if that were modelled as
// a separate dependency: https://github.com/denoland/deno_graph/issues/247.
let is_types_deno_types = !dependency.maybe_type.is_none()
&& !dependency
.imports
.iter()
.any(|i| dependency.maybe_type.includes(&i.range.start).is_some());
&& !dependency.imports.iter().any(|i| {
dependency
.maybe_type
.includes(&i.specifier_range.start)
.is_some()
});
diagnostics.extend(
diagnose_resolution(

View file

@ -11,7 +11,6 @@ use super::tsc;
use super::tsc::AssetDocument;
use crate::graph_util::CliJsrUrlProvider;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap;
use deno_ast::swc::visit::VisitWith;
@ -27,6 +26,7 @@ use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
@ -849,7 +849,7 @@ impl FileSystemDocuments {
file_referrer: Option<&ModuleSpecifier>,
) -> Option<Arc<Document>> {
let doc = if specifier.scheme() == "file" {
let path = specifier_to_file_path(specifier).ok()?;
let path = url_to_file_path(specifier).ok()?;
let bytes = fs::read(path).ok()?;
let content =
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?;
@ -1136,7 +1136,7 @@ impl Documents {
return true;
}
if specifier.scheme() == "file" {
return specifier_to_file_path(&specifier)
return url_to_file_path(&specifier)
.map(|p| p.is_file())
.unwrap_or(false);
}
@ -1325,7 +1325,7 @@ impl Documents {
let fs_docs = &self.file_system_docs;
// Clean up non-existent documents.
fs_docs.docs.retain(|specifier, _| {
let Ok(path) = specifier_to_file_path(specifier) else {
let Ok(path) = url_to_file_path(specifier) else {
// Remove non-file schemed docs (deps). They may not be dependencies
// anymore after updating resolvers.
return false;

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use base64::Engine;
use deno_ast::MediaType;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions;
@ -16,6 +15,7 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_semver::jsr::JsrPackageReqReference;
@ -113,7 +113,6 @@ use crate::util::fs::remove_dir_all_if_exists;
use crate::util::path::is_importable_ext;
use crate::util::path::to_percent_decoded_str;
use crate::util::sync::AsyncFlag;
use deno_runtime::fs_util::specifier_to_file_path;
struct LspRootCertStoreProvider(RootCertStore);
@ -208,11 +207,11 @@ pub struct Inner {
module_registry: ModuleRegistry,
/// A lazily create "server" for handling test run requests.
maybe_testing_server: Option<testing::TestServer>,
npm_search_api: CliNpmSearchApi,
pub npm_search_api: CliNpmSearchApi,
project_version: usize,
/// A collection of measurements which instrument that performance of the LSP.
performance: Arc<Performance>,
resolver: Arc<LspResolver>,
pub resolver: Arc<LspResolver>,
task_queue: LanguageServerTaskQueue,
/// A memoized version of fixable diagnostic codes retrieved from TypeScript.
ts_fixable_diagnostics: Vec<String>,
@ -241,7 +240,7 @@ impl LanguageServer {
}
}
/// Similar to `deno cache` on the command line, where modules will be cached
/// Similar to `deno install --entrypoint` on the command line, where modules will be cached
/// in the Deno cache, including any of their dependencies.
pub async fn cache(
&self,
@ -275,10 +274,9 @@ impl LanguageServer {
factory.fs(),
&roots,
graph_util::GraphValidOptions {
is_vendoring: false,
follow_type_only: true,
kind: GraphKind::All,
check_js: false,
exit_lockfile_errors: false,
exit_integrity_errors: false,
},
)?;
@ -628,7 +626,7 @@ impl Inner {
let maybe_root_path = self
.config
.root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok());
.and_then(|uri| url_to_file_path(uri).ok());
let root_cert_store = get_root_cert_store(
maybe_root_path,
workspace_settings.certificate_stores.clone(),
@ -804,7 +802,7 @@ impl Inner {
let mut roots = config
.workspace_folders
.iter()
.filter_map(|p| specifier_to_file_path(&p.0).ok())
.filter_map(|p| url_to_file_path(&p.0).ok())
.collect::<Vec<_>>();
roots.sort();
let roots = roots
@ -968,16 +966,27 @@ impl Inner {
(|| {
let compiler_options = config_file.to_compiler_options().ok()?.options;
let jsx_import_source = compiler_options.get("jsxImportSource")?;
let jsx_import_source = jsx_import_source.as_str()?;
let jsx_import_source = jsx_import_source.as_str()?.to_string();
let referrer = config_file.specifier.clone();
let specifier = Url::parse(&format!(
"data:application/typescript;base64,{}",
base64::engine::general_purpose::STANDARD
.encode(format!("import '{jsx_import_source}/jsx-runtime';"))
))
.unwrap();
let specifier = format!("{jsx_import_source}/jsx-runtime");
self.task_queue.queue_task(Box::new(|ls: LanguageServer| {
spawn(async move {
let specifier = {
let inner = ls.inner.read().await;
let resolver = inner.resolver.as_graph_resolver(Some(&referrer));
let Ok(specifier) = resolver.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
deno_graph::source::ResolutionMode::Types,
) else {
return;
};
specifier
};
if let Err(err) = ls.cache(vec![specifier], referrer, false).await {
lsp_warn!("{:#}", err);
}
@ -1115,7 +1124,7 @@ impl Inner {
{
return;
}
match specifier_to_file_path(&specifier) {
match url_to_file_path(&specifier) {
Ok(path) if is_importable_ext(&path) => {}
_ => return,
}
@ -1353,7 +1362,7 @@ impl Inner {
{
specifier = uri_to_url(&params.text_document.uri);
}
let file_path = specifier_to_file_path(&specifier).map_err(|err| {
let file_path = url_to_file_path(&specifier).map_err(|err| {
error!("{:#}", err);
LspError::invalid_request()
})?;
@ -1375,18 +1384,9 @@ impl Inner {
.data_for_specifier(&specifier)
.map(|d| &d.member_dir.workspace);
let unstable_options = UnstableFmtOptions {
css: maybe_workspace
.map(|w| w.has_unstable("fmt-css"))
.unwrap_or(false),
html: maybe_workspace
.map(|w| w.has_unstable("fmt-html"))
.unwrap_or(false),
component: maybe_workspace
.map(|w| w.has_unstable("fmt-component"))
.unwrap_or(false),
yaml: maybe_workspace
.map(|w| w.has_unstable("fmt-yaml"))
.unwrap_or(false),
};
let document = document.clone();
move || {
@ -1410,6 +1410,7 @@ impl Inner {
document.content(),
&fmt_options,
&unstable_options,
None,
)
}
};
@ -1611,8 +1612,8 @@ impl Inner {
None => false,
})
.collect();
let mut code_actions = CodeActionCollection::default();
if !fixable_diagnostics.is_empty() {
let mut code_actions = CodeActionCollection::default();
let file_diagnostics = self
.diagnostics_server
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
@ -1720,9 +1721,14 @@ impl Inner {
.add_cache_all_action(&specifier, no_cache_diagnostics.to_owned());
}
}
code_actions.set_preferred_fixes();
all_actions.extend(code_actions.get_response());
}
if let Some(document) = asset_or_doc.document() {
code_actions
.add_source_actions(document, &params.range, self)
.await;
}
code_actions.set_preferred_fixes();
all_actions.extend(code_actions.get_response());
// Refactor
let only = params
@ -2507,7 +2513,7 @@ impl Inner {
let maybe_root_path_owned = self
.config
.root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok());
.and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new();
for item in incoming_calls.iter() {
if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call(
@ -2553,7 +2559,7 @@ impl Inner {
let maybe_root_path_owned = self
.config
.root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok());
.and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new();
for item in outgoing_calls.iter() {
if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call(
@ -2602,7 +2608,7 @@ impl Inner {
let maybe_root_path_owned = self
.config
.root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok());
.and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyItem>::new();
match one_or_many {
tsc::OneOrMany::One(item) => {
@ -3612,6 +3618,11 @@ impl Inner {
}),
// bit of a hack to force the lsp to cache the @types/node package
type_check_mode: crate::args::TypeCheckMode::Local,
permissions: crate::args::PermissionFlags {
// allow remote import permissions in the lsp for now
allow_import: Some(vec![]),
..Default::default()
},
..Default::default()
}),
initial_cwd,

View file

@ -1,28 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::HttpCache;
@ -33,10 +10,10 @@ use deno_graph::source::Resolver;
use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
@ -55,6 +32,30 @@ use std::sync::Arc;
use super::cache::LspCache;
use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)]
struct LspScopeResolver {
@ -327,11 +328,11 @@ impl LspResolver {
) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?;
Some(NodeResolution::into_specifier_and_media_type(
Some(NodeResolution::into_specifier_and_media_type(Some(
node_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
.ok(),
))
.ok()?,
)))
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
@ -372,6 +373,26 @@ impl LspResolver {
Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1)
}
pub fn is_bare_package_json_dep(
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
) -> bool {
let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
return false;
};
node_resolver
.resolve_if_for_npm_pkg(
specifier_text,
referrer,
NodeResolutionMode::Types,
)
.ok()
.flatten()
.is_some()
}
pub fn get_closest_package_json(
&self,
referrer: &ModuleSpecifier,
@ -439,11 +460,11 @@ async fn create_npm_resolver(
) -> Option<Arc<dyn CliNpmResolver>> {
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: Arc::new(deno_fs::RealFs),
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
root_node_modules_dir: config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| {
specifier_to_file_path(&config_data.scope)
url_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})

View file

@ -234,16 +234,9 @@ impl TestRun {
&cli_options.permissions_options(),
)?;
let main_graph_container = factory.main_module_graph_container().await?;
test::check_specifiers(
factory.file_fetcher()?,
main_graph_container,
self
.queue
.iter()
.map(|s| (s.clone(), test::TestMode::Executable))
.collect(),
)
.await?;
main_graph_container
.check_specifiers(&self.queue.iter().cloned().collect::<Vec<_>>(), None)
.await?;
let (concurrent_jobs, fail_fast) =
if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() {

View file

@ -39,7 +39,6 @@ use deno_core::convert::ToV8;
use deno_core::error::StdAnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::StreamExt;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap;
use deno_ast::MediaType;
@ -63,6 +62,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;
use deno_path_util::url_to_file_path;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::tokio_util::create_basic_runtime;
use indexmap::IndexMap;
@ -3191,7 +3191,7 @@ impl CallHierarchyItem {
let use_file_name = self.is_source_file_item();
let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file")
{
specifier_to_file_path(&uri_to_url(&uri)).ok()
url_to_file_path(&uri_to_url(&uri)).ok()
} else {
None
};
@ -6154,7 +6154,7 @@ mod tests {
let change = changes.text_changes.first().unwrap();
assert_eq!(
change.new_text,
"import { someLongVariable } from './b.ts'\n"
"import type { someLongVariable } from './b.ts'\n"
);
}

View file

@ -121,12 +121,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::installer::install_from_entrypoints(flags, &cache_flags.files).await
}),
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags);
let main_graph_container =
factory.main_module_graph_container().await?;
main_graph_container
.load_and_type_check_files(&check_flags.files)
.await
tools::check::check(flags, check_flags).await
}),
DenoSubcommand::Clean => spawn_subcommand(async move {
tools::clean::clean()
@ -385,6 +380,43 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> {
"Run again with `--unstable-broadcast-channel` flag to enable this API.",
),
];
} else if msg.contains("window is not defined") {
return vec![
FixSuggestion::info("window global is not available in Deno 2."),
FixSuggestion::hint("Replace `window` with `globalThis`."),
];
} else if msg.contains("UnsafeWindowSurface is not a constructor") {
return vec![
FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-webgpu` flag to enable this API.",
),
];
// Try to capture errors like:
// ```
// Uncaught Error: Cannot find module '../build/Release/canvas.node'
// Require stack:
// - /.../deno/npm/registry.npmjs.org/canvas/2.11.2/lib/bindings.js
// - /.../.cache/deno/npm/registry.npmjs.org/canvas/2.11.2/lib/canvas.js
// ```
} else if msg.contains("Cannot find module")
&& msg.contains("Require stack")
&& msg.contains(".node'")
{
return vec![
FixSuggestion::info_multiline(
&[
"Trying to execute an npm package using Node-API addons,",
"these packages require local `node_modules` directory to be present."
]
),
FixSuggestion::hint_multiline(
&[
"Add `\"nodeModulesDir\": \"auto\" option to `deno.json`, and then run",
"`deno install --allow-scripts=npm:<package> --entrypoint <script>` to setup `node_modules` directory."
]
)
];
}
}

View file

@ -23,6 +23,7 @@ use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use crate::graph_util::ModuleGraphBuilder;
use crate::node;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver;
use crate::resolver::ModuleCodeStringSource;
@ -104,12 +105,33 @@ impl ModuleLoadPreparer {
roots: &[ModuleSpecifier],
is_dynamic: bool,
lib: TsTypeLib,
permissions: crate::file_fetcher::FetchPermissionsOption,
permissions: PermissionsContainer,
ext_overwrite: Option<&String>,
) -> Result<(), AnyError> {
log::debug!("Preparing module load.");
let _pb_clear_guard = self.progress_bar.clear_guard();
let mut cache = self.module_graph_builder.create_fetch_cacher(permissions);
if let Some(ext) = ext_overwrite {
let maybe_content_type = match ext.as_str() {
"ts" => Some("text/typescript"),
"tsx" => Some("text/tsx"),
"js" => Some("text/javascript"),
"jsx" => Some("text/jsx"),
_ => None,
};
if let Some(content_type) = maybe_content_type {
for root in roots {
cache.file_header_overrides.insert(
root.clone(),
std::collections::HashMap::from([(
"content-type".to_string(),
content_type.to_string(),
)]),
);
}
}
}
log::debug!("Building module graph.");
let has_type_checked = !graph.roots.is_empty();
@ -182,6 +204,7 @@ struct SharedCliModuleLoaderState {
main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
@ -200,6 +223,7 @@ impl CliModuleLoaderFactory {
main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
@ -220,6 +244,7 @@ impl CliModuleLoaderFactory {
main_module_graph_container,
module_load_preparer,
node_resolver,
npm_resolver,
npm_module_loader,
parsed_source_cache,
resolver,
@ -231,13 +256,15 @@ impl CliModuleLoaderFactory {
&self,
graph_container: TGraphContainer,
lib: TsTypeLib,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
is_worker: bool,
parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter {
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
lib,
root_permissions,
dynamic_permissions,
is_worker,
parent_permissions,
permissions,
graph_container,
emitter: self.shared.emitter.clone(),
parsed_source_cache: self.shared.parsed_source_cache.clone(),
@ -253,20 +280,20 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
fn create_for_main(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter {
self.create_with_lib(
(*self.shared.main_module_graph_container).clone(),
self.shared.lib_window,
/* is worker */ false,
root_permissions.clone(),
root_permissions,
dynamic_permissions,
)
}
fn create_for_worker(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter {
self.create_with_lib(
// create a fresh module graph for the worker
@ -274,21 +301,21 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
self.shared.graph_kind,
))),
self.shared.lib_worker,
root_permissions,
dynamic_permissions,
/* is worker */ true,
parent_permissions,
permissions,
)
}
}
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
lib: TsTypeLib,
is_worker: bool,
/// The initial set of permissions used to resolve the static imports in the
/// worker. These are "allow all" for main worker, and parent thread
/// permissions for Web Worker.
root_permissions: PermissionsContainer,
/// Permissions used to resolve dynamic imports, these get passed as
/// "root permissions" for Web Worker.
dynamic_permissions: PermissionsContainer,
parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
shared: Arc<SharedCliModuleLoaderState>,
emitter: Arc<Emitter>,
parsed_source_cache: Arc<ParsedSourceCache>,
@ -455,7 +482,6 @@ impl<TGraphContainer: ModuleGraphContainer>
Some(Module::Npm(module)) => {
let package_folder = self
.shared
.node_resolver
.npm_resolver
.as_managed()
.unwrap() // byonm won't create a Module::Npm
@ -748,11 +774,12 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
}
}
let root_permissions = if is_dynamic {
inner.dynamic_permissions.clone()
let permissions = if is_dynamic {
inner.permissions.clone()
} else {
inner.root_permissions.clone()
inner.parent_permissions.clone()
};
let is_dynamic = is_dynamic || inner.is_worker; // consider workers as dynamic for permissions
let lib = inner.lib;
let mut update_permit = graph_container.acquire_update_permit().await;
let graph = update_permit.graph_mut();
@ -762,7 +789,8 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
&[specifier],
is_dynamic,
lib,
root_permissions.into(),
permissions,
None,
)
.await?;
update_permit.commit();

View file

@ -16,6 +16,7 @@ use serde::Serialize;
use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache;
use crate::resolver::CliNodeResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists;
pub type CliNodeCodeTranslator =
@ -54,11 +55,20 @@ pub enum CliCjsAnalysis {
pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
}
impl CliCjsCodeAnalyzer {
pub fn new(cache: NodeAnalysisCache, fs: deno_fs::FileSystemRc) -> Self {
Self { cache, fs }
pub fn new(
cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
) -> Self {
Self {
cache,
fs,
node_resolver,
}
}
async fn inner_cjs_analysis(
@ -73,7 +83,7 @@ impl CliCjsCodeAnalyzer {
return Ok(analysis);
}
let media_type = MediaType::from_specifier(specifier);
let mut media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json {
return Ok(CliCjsAnalysis::Cjs {
exports: vec![],
@ -81,6 +91,22 @@ impl CliCjsCodeAnalyzer {
});
}
if media_type == MediaType::JavaScript {
if let Some(package_json) =
self.node_resolver.get_closest_package_json(specifier)?
{
match package_json.typ.as_str() {
"commonjs" => {
media_type = MediaType::Cjs;
}
"module" => {
media_type = MediaType::Mjs;
}
_ => {}
}
}
}
let analysis = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source: Arc<str> = source.into();
@ -99,6 +125,13 @@ impl CliCjsCodeAnalyzer {
exports: analysis.exports,
reexports: analysis.reexports,
})
} else if media_type == MediaType::Cjs {
// FIXME: `deno_ast` should internally handle MediaType::Cjs implying that
// the result must never be Esm
Ok(CliCjsAnalysis::Cjs {
exports: vec![],
reexports: vec![],
})
} else {
Ok(CliCjsAnalysis::Esm)
}
@ -125,10 +158,23 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
let source = match source {
Some(source) => source,
None => {
self
.fs
.read_text_file_lossy_async(specifier.to_file_path().unwrap(), None)
.await?
if let Ok(path) = specifier.to_file_path() {
if let Ok(source_from_file) =
self.fs.read_text_file_lossy_async(path, None).await
{
source_from_file
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
}
};
let analysis = self.inner_cjs_analysis(specifier, &source).await?;

View file

@ -1,276 +1,36 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::DenoPkgJsonFsAdapter;
use deno_core::url::Url;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageJsonLoadError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::load_pkg_json;
use node_resolver::NpmResolver;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use deno_runtime::fs_util::specifier_to_file_path;
use crate::resolver::CliDenoResolverFs;
use super::managed::normalize_pkg_name_for_node_modules_deno_folder;
use super::CliNpmResolver;
use super::InnerCliNpmResolverRef;
pub struct CliNpmResolverByonmCreateOptions {
pub fs: Arc<dyn FileSystem>,
// todo(dsherret): investigate removing this
pub root_node_modules_dir: Option<PathBuf>,
}
pub fn create_byonm_npm_resolver(
options: CliNpmResolverByonmCreateOptions,
) -> Arc<dyn CliNpmResolver> {
Arc::new(ByonmCliNpmResolver {
fs: options.fs,
root_node_modules_dir: options.root_node_modules_dir,
})
}
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliDenoResolverFs>;
pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>;
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
#[derive(Debug)]
pub struct ByonmCliNpmResolver {
fs: Arc<dyn FileSystem>,
root_node_modules_dir: Option<PathBuf>,
}
struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
impl ByonmCliNpmResolver {
fn load_pkg_json(
&self,
path: &Path,
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
}
/// Finds the ancestor package.json that contains the specified dependency.
pub fn find_ancestor_package_json_with_dep(
&self,
dep_name: &str,
referrer: &ModuleSpecifier,
) -> Option<Arc<PackageJson>> {
let referrer_path = referrer.to_file_path().ok()?;
let mut current_folder = referrer_path.parent()?;
loop {
let pkg_json_path = current_folder.join("package.json");
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
if let Some(deps) = &pkg_json.dependencies {
if deps.contains_key(dep_name) {
return Some(pkg_json);
}
}
if let Some(deps) = &pkg_json.dev_dependencies {
if deps.contains_key(dep_name) {
return Some(pkg_json);
}
}
}
if let Some(parent) = current_folder.parent() {
current_folder = parent;
} else {
return None;
}
}
}
fn resolve_pkg_json_and_alias_for_req(
&self,
req: &PackageReq,
referrer: &ModuleSpecifier,
) -> Result<Option<(Arc<PackageJson>, String)>, AnyError> {
fn resolve_alias_from_pkg_json(
req: &PackageReq,
pkg_json: &PackageJson,
) -> Option<String> {
let deps = pkg_json.resolve_local_package_json_deps();
for (key, value) in deps {
if let Ok(value) = value {
match value {
PackageJsonDepValue::Req(dep_req) => {
if dep_req.name == req.name
&& dep_req.version_req.intersects(&req.version_req)
{
return Some(key);
}
}
PackageJsonDepValue::Workspace(_workspace) => {
if key == req.name && req.version_req.tag() == Some("workspace") {
return Some(key);
}
}
}
}
}
None
}
// attempt to resolve the npm specifier from the referrer's package.json,
if let Ok(file_path) = specifier_to_file_path(referrer) {
let mut current_path = file_path.as_path();
while let Some(dir_path) = current_path.parent() {
let package_json_path = dir_path.join("package.json");
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
if let Some(alias) =
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
return Ok(Some((pkg_json, alias)));
}
}
current_path = dir_path;
}
}
// otherwise, fall fallback to the project's package.json
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
let root_pkg_json_path =
root_node_modules_dir.parent().unwrap().join("package.json");
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
return Ok(Some((pkg_json, alias)));
}
}
}
Ok(None)
}
fn resolve_folder_in_root_node_modules(
&self,
req: &PackageReq,
) -> Option<PathBuf> {
// now check if node_modules/.deno/ matches this constraint
let root_node_modules_dir = self.root_node_modules_dir.as_ref()?;
let node_modules_deno_dir = root_node_modules_dir.join(".deno");
let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else {
return None;
};
let search_prefix = format!(
"{}@",
normalize_pkg_name_for_node_modules_deno_folder(&req.name)
);
let mut best_version = None;
// example entries:
// - @denotest+add@1.0.0
// - @denotest+add@1.0.0_1
for entry in entries {
if !entry.is_directory {
continue;
}
let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix)
else {
continue;
};
let version = version_and_copy_idx
.rsplit_once('_')
.map(|(v, _)| v)
.unwrap_or(version_and_copy_idx);
let Ok(version) = Version::parse_from_npm(version) else {
continue;
};
if req.version_req.matches(&version) {
if let Some((best_version_version, _)) = &best_version {
if version > *best_version_version {
best_version = Some((version, entry.name));
}
} else {
best_version = Some((version, entry.name));
}
}
}
best_version.map(|(_version, entry_name)| {
join_package_name(
&node_modules_deno_dir.join(entry_name).join("node_modules"),
&req.name,
)
})
}
}
impl NpmResolver for ByonmCliNpmResolver {
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
fn inner(
fs: &dyn FileSystem,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_referrer_file = specifier_to_file_path(referrer).ok();
let maybe_start_folder =
maybe_referrer_file.as_ref().and_then(|f| f.parent());
if let Some(start_folder) = maybe_start_folder {
for current_folder in start_folder.ancestors() {
let node_modules_folder = if current_folder.ends_with("node_modules")
{
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name(&node_modules_folder, name);
if fs.is_dir_sync(&sub_dir) {
return Ok(sub_dir);
}
}
}
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
)
}
let path = inner(&*self.fs, name, referrer)?;
self.fs.realpath_sync(&path).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err.into_io_error(),
}
.into()
})
}
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
specifier.scheme() == "file"
&& specifier
.path()
.to_ascii_lowercase()
.contains("/node_modules/")
}
}
impl NodeRequireResolver for ByonmCliNpmResolver {
impl NodeRequireResolver for CliByonmWrapper {
fn ensure_read_permission(
&self,
permissions: &mut dyn NodePermissions,
@ -286,110 +46,54 @@ impl NodeRequireResolver for ByonmCliNpmResolver {
}
}
impl NpmProcessStateProvider for ByonmCliNpmResolver {
impl NpmProcessStateProvider for CliByonmWrapper {
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.root_node_modules_dir
.as_ref()
.0
.root_node_modules_dir()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}
impl CliNpmResolver for ByonmCliNpmResolver {
impl CliNpmResolver for CliByonmNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
self
}
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
self
Arc::new(CliByonmWrapper(self))
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
Arc::new(CliByonmWrapper(self))
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
Arc::new(Self {
fs: self.fs.clone(),
root_node_modules_dir: self.root_node_modules_dir.clone(),
})
Arc::new(self.clone())
}
fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Byonm(self)
}
fn root_node_modules_path(&self) -> Option<&PathBuf> {
self.root_node_modules_dir.as_ref()
fn root_node_modules_path(&self) -> Option<&Path> {
self.root_node_modules_dir()
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &ModuleSpecifier,
referrer: &Url,
) -> Result<PathBuf, AnyError> {
fn node_resolve_dir(
fs: &dyn FileSystem,
alias: &str,
start_dir: &Path,
) -> Result<Option<PathBuf>, AnyError> {
for ancestor in start_dir.ancestors() {
let node_modules_folder = ancestor.join("node_modules");
let sub_dir = join_package_name(&node_modules_folder, alias);
if fs.is_dir_sync(&sub_dir) {
return Ok(Some(canonicalize_path_maybe_not_exists_with_fs(
&sub_dir, fs,
)?));
}
}
Ok(None)
}
// now attempt to resolve if it's found in any package.json
let maybe_pkg_json_and_alias =
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
match maybe_pkg_json_and_alias {
Some((pkg_json, alias)) => {
// now try node resolution
if let Some(resolved) =
node_resolve_dir(self.fs.as_ref(), &alias, pkg_json.dir_path())?
{
return Ok(resolved);
}
bail!(
concat!(
"Could not find \"{}\" in a node_modules folder. ",
"Deno expects the node_modules/ directory to be up to date. ",
"Did you forget to run `deno install`?"
),
alias,
);
}
None => {
// now check if node_modules/.deno/ matches this constraint
if let Some(folder) = self.resolve_folder_in_root_node_modules(req) {
return Ok(folder);
}
bail!(
concat!(
"Could not find a matching package for 'npm:{}' in the node_modules ",
"directory. Ensure you have all your JSR and npm dependencies listed ",
"in your deno.json or package.json, then run `deno install`. Alternatively, ",
r#"turn on auto-install by specifying `"nodeModulesDir": "auto"` in your "#,
"deno.json file."
),
req,
);
}
}
ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req(
self, req, referrer,
)
}
fn check_state_hash(&self) -> Option<u64> {
@ -398,12 +102,3 @@ impl CliNpmResolver for ByonmCliNpmResolver {
None
}
}
fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
let mut path = path.to_path_buf();
// ensure backslashes are used on windows
for part in package_name.split('/') {
path = path.join(part);
}
path
}

View file

@ -1,295 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use std::path::PathBuf;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use crate::util::fs::canonicalize_path;
use crate::util::path::root_url_to_safe_local_dirname;
/// The global cache directory of npm packages.
#[derive(Clone, Debug)]
pub struct NpmCacheDir {
root_dir: PathBuf,
// cached url representation of the root directory
root_dir_url: Url,
// A list of all registry that were discovered via `.npmrc` files
// turned into a safe directory names.
known_registries_dirnames: Vec<String>,
}
impl NpmCacheDir {
pub fn new(root_dir: PathBuf, known_registries_urls: Vec<Url>) -> Self {
fn try_get_canonicalized_root_dir(
root_dir: &Path,
) -> Result<PathBuf, AnyError> {
if !root_dir.exists() {
std::fs::create_dir_all(root_dir)
.with_context(|| format!("Error creating {}", root_dir.display()))?;
}
Ok(canonicalize_path(root_dir)?)
}
// this may fail on readonly file systems, so just ignore if so
let root_dir =
try_get_canonicalized_root_dir(&root_dir).unwrap_or(root_dir);
let root_dir_url = Url::from_directory_path(&root_dir).unwrap();
let known_registries_dirnames: Vec<_> = known_registries_urls
.into_iter()
.map(|url| {
root_url_to_safe_local_dirname(&url)
.to_string_lossy()
.replace('\\', "/")
})
.collect();
Self {
root_dir,
root_dir_url,
known_registries_dirnames,
}
}
pub fn root_dir(&self) -> &Path {
&self.root_dir
}
pub fn root_dir_url(&self) -> &Url {
&self.root_dir_url
}
pub fn package_folder_for_id(
&self,
folder_id: &NpmPackageCacheFolderId,
registry_url: &Url,
) -> PathBuf {
if folder_id.copy_index == 0 {
self.package_folder_for_nv(&folder_id.nv, registry_url)
} else {
self
.package_name_folder(&folder_id.nv.name, registry_url)
.join(format!("{}_{}", folder_id.nv.version, folder_id.copy_index))
}
}
pub fn package_folder_for_nv(
&self,
package: &PackageNv,
registry_url: &Url,
) -> PathBuf {
self
.package_name_folder(&package.name, registry_url)
.join(package.version.to_string())
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
let mut dir = self.registry_folder(registry_url);
if name.to_lowercase() != name {
let encoded_name = mixed_case_package_name_encode(name);
// Using the encoded directory may have a collision with an actual package name
// so prefix it with an underscore since npm packages can't start with that
dir.join(format!("_{encoded_name}"))
} else {
// ensure backslashes are used on windows
for part in name.split('/') {
dir = dir.join(part);
}
dir
}
}
fn registry_folder(&self, registry_url: &Url) -> PathBuf {
self
.root_dir
.join(root_url_to_safe_local_dirname(registry_url))
}
pub fn resolve_package_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Option<NpmPackageCacheFolderId> {
let mut maybe_relative_url = None;
// Iterate through known registries and try to get a match.
for registry_dirname in &self.known_registries_dirnames {
let registry_root_dir = self
.root_dir_url
.join(&format!("{}/", registry_dirname))
// this not succeeding indicates a fatal issue, so unwrap
.unwrap();
let Some(relative_url) = registry_root_dir.make_relative(specifier)
else {
continue;
};
if relative_url.starts_with("../") {
continue;
}
maybe_relative_url = Some(relative_url);
break;
}
let mut relative_url = maybe_relative_url?;
// base32 decode the url if it starts with an underscore
// * Ex. _{base32(package_name)}/
if let Some(end_url) = relative_url.strip_prefix('_') {
let mut parts = end_url
.split('/')
.map(ToOwned::to_owned)
.collect::<Vec<_>>();
match mixed_case_package_name_decode(&parts[0]) {
Some(part) => {
parts[0] = part;
}
None => return None,
}
relative_url = parts.join("/");
}
// examples:
// * chalk/5.0.1/
// * @types/chalk/5.0.1/
// * some-package/5.0.1_1/ -- where the `_1` (/_\d+/) is a copy of the folder for peer deps
let is_scoped_package = relative_url.starts_with('@');
let mut parts = relative_url
.split('/')
.enumerate()
.take(if is_scoped_package { 3 } else { 2 })
.map(|(_, part)| part)
.collect::<Vec<_>>();
if parts.len() < 2 {
return None;
}
let version_part = parts.pop().unwrap();
let name = parts.join("/");
let (version, copy_index) =
if let Some((version, copy_count)) = version_part.split_once('_') {
(version, copy_count.parse::<u8>().ok()?)
} else {
(version_part, 0)
};
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name,
version: Version::parse_from_npm(version).ok()?,
},
copy_index,
})
}
pub fn get_cache_location(&self) -> PathBuf {
self.root_dir.clone()
}
}
pub fn mixed_case_package_name_encode(name: &str) -> String {
// use base32 encoding because it's reversible and the character set
// only includes the characters within 0-9 and A-Z so it can be lower cased
base32::encode(
base32::Alphabet::Rfc4648Lower { padding: false },
name.as_bytes(),
)
.to_lowercase()
}
pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
base32::decode(base32::Alphabet::Rfc4648Lower { padding: false }, name)
.and_then(|b| String::from_utf8(b).ok())
}
#[cfg(test)]
mod test {
use deno_core::url::Url;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use super::NpmCacheDir;
use crate::npm::cache_dir::NpmPackageCacheFolderId;
#[test]
fn should_get_package_folder() {
let deno_dir = crate::cache::DenoDir::new(None).unwrap();
let root_dir = deno_dir.npm_folder_path();
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
let cache = NpmCacheDir::new(root_dir.clone(), vec![registry_url.clone()]);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("json")
.join("1.2.5"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 1,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("json")
.join("1.2.5_1"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("_jjju6tq")
.join("2.1.5"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "@types/JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("_ib2hs4dfomxuuu2pjy")
.join("2.1.5"),
);
}
}

View file

@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@ -18,10 +19,10 @@ use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use crate::args::CacheSetting;
use crate::cache::CACHE_PERM;
use crate::npm::NpmCacheDir;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::hard_link_dir_recursive;
@ -87,9 +88,12 @@ impl NpmCache {
) -> Result<(), AnyError> {
let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name);
assert_ne!(folder_id.copy_index, 0);
let package_folder = self
.cache_dir
.package_folder_for_id(folder_id, registry_url);
let package_folder = self.cache_dir.package_folder_for_id(
&folder_id.nv.name,
&folder_id.nv.version.to_string(),
folder_id.copy_index,
registry_url,
);
if package_folder.exists()
// if this file exists, then the package didn't successfully initialize
@ -100,9 +104,12 @@ impl NpmCache {
return Ok(());
}
let original_package_folder = self
.cache_dir
.package_folder_for_nv(&folder_id.nv, registry_url);
let original_package_folder = self.cache_dir.package_folder_for_id(
&folder_id.nv.name,
&folder_id.nv.version.to_string(),
0, // original copy index
registry_url,
);
// it seems Windows does an "AccessDenied" error when moving a
// directory with hard links, so that's why this solution is done
@ -114,7 +121,12 @@ impl NpmCache {
pub fn package_folder_for_id(&self, id: &NpmPackageCacheFolderId) -> PathBuf {
let registry_url = self.npmrc.get_registry_url(&id.nv.name);
self.cache_dir.package_folder_for_id(id, registry_url)
self.cache_dir.package_folder_for_id(
&id.nv.name,
&id.nv.version.to_string(),
id.copy_index,
registry_url,
)
}
pub fn package_folder_for_nv(&self, package: &PackageNv) -> PathBuf {
@ -127,7 +139,12 @@ impl NpmCache {
package: &PackageNv,
registry_url: &Url,
) -> PathBuf {
self.cache_dir.package_folder_for_nv(package, registry_url)
self.cache_dir.package_folder_for_id(
&package.name,
&package.version.to_string(),
0, // original copy_index
registry_url,
)
}
pub fn package_name_folder(&self, name: &str) -> PathBuf {
@ -146,6 +163,15 @@ impl NpmCache {
self
.cache_dir
.resolve_package_folder_id_from_specifier(specifier)
.and_then(|cache_id| {
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name: cache_id.name,
version: Version::parse_from_npm(&cache_id.version).ok()?,
},
copy_index: cache_id.copy_index,
})
})
}
pub fn load_package_info(

View file

@ -7,6 +7,7 @@ use std::sync::Arc;
use cache::RegistryInfoDownloader;
use cache::TarballCache;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
@ -22,7 +23,7 @@ use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError;
@ -35,6 +36,7 @@ use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
@ -45,12 +47,10 @@ use self::cache::NpmCache;
use self::registry::CliNpmRegistryApi;
use self::resolution::NpmResolution;
use self::resolvers::create_npm_fs_resolver;
pub use self::resolvers::normalize_pkg_name_for_node_modules_deno_folder;
use self::resolvers::NpmPackageFsResolver;
use super::CliNpmResolver;
use super::InnerCliNpmResolverRef;
use super::NpmCacheDir;
mod cache;
mod registry;
@ -188,6 +188,7 @@ fn create_inner(
fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> {
Arc::new(NpmCache::new(
NpmCacheDir::new(
&DenoCacheEnvFsAdapter(options.fs.as_ref()),
options.npm_global_cache_dir.clone(),
options.npmrc.get_all_known_registries_urls(),
),
@ -427,6 +428,16 @@ impl ManagedCliNpmResolver {
self.resolution.snapshot()
}
pub fn top_package_req_for_name(&self, name: &str) -> Option<PackageReq> {
let package_reqs = self.resolution.package_reqs();
let mut entries = package_reqs
.iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by_key(|(_, nv)| &nv.version);
Some(entries.last()?.0.clone())
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
@ -573,7 +584,7 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.resolution.serialized_valid_snapshot(),
self.fs_resolver.node_modules_path().map(|p| p.as_path()),
self.fs_resolver.node_modules_path(),
)
}
}
@ -630,7 +641,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
InnerCliNpmResolverRef::Managed(self)
}
fn root_node_modules_path(&self) -> Option<&PathBuf> {
fn root_node_modules_path(&self) -> Option<&Path> {
self.fs_resolver.node_modules_path()
}

View file

@ -1,5 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
pub mod bin_entries;
pub mod lifecycle_scripts;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
@ -30,7 +33,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
fn root_dir_url(&self) -> &Url;
/// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&PathBuf>;
fn node_modules_path(&self) -> Option<&Path>;
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
@ -134,7 +137,7 @@ impl RegistryReadPermissionChecker {
/// Caches all the packages in parallel.
pub async fn cache_packages(
packages: Vec<NpmResolutionPackage>,
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<TarballCache>,
) -> Result<(), AnyError> {
let mut futures_unordered = futures::stream::FuturesUnordered::new();

View file

@ -12,12 +12,12 @@ use std::path::Path;
use std::path::PathBuf;
#[derive(Default)]
pub(super) struct BinEntries {
pub struct BinEntries<'a> {
/// Packages that have colliding bin names
collisions: HashSet<NpmPackageId>,
seen_names: HashMap<String, NpmPackageId>,
collisions: HashSet<&'a NpmPackageId>,
seen_names: HashMap<&'a str, &'a NpmPackageId>,
/// The bin entries
entries: Vec<(NpmResolutionPackage, PathBuf)>,
entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
}
/// Returns the name of the default binary for the given package.
@ -31,37 +31,32 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
}
impl BinEntries {
pub(super) fn new() -> Self {
impl<'a> BinEntries<'a> {
pub fn new() -> Self {
Self::default()
}
/// Add a new bin entry (package with a bin field)
pub(super) fn add(
pub fn add(
&mut self,
package: NpmResolutionPackage,
package: &'a NpmResolutionPackage,
package_path: PathBuf,
) {
// check for a new collision, if we haven't already
// found one
match package.bin.as_ref().unwrap() {
deno_npm::registry::NpmPackageVersionBinEntry::String(_) => {
let bin_name = default_bin_name(&package);
let bin_name = default_bin_name(package);
if let Some(other) = self
.seen_names
.insert(bin_name.to_string(), package.id.clone())
{
self.collisions.insert(package.id.clone());
if let Some(other) = self.seen_names.insert(bin_name, &package.id) {
self.collisions.insert(&package.id);
self.collisions.insert(other);
}
}
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for name in entries.keys() {
if let Some(other) =
self.seen_names.insert(name.to_string(), package.id.clone())
{
self.collisions.insert(package.id.clone());
if let Some(other) = self.seen_names.insert(name, &package.id) {
self.collisions.insert(&package.id);
self.collisions.insert(other);
}
}
@ -74,7 +69,11 @@ impl BinEntries {
fn for_each_entry(
&mut self,
snapshot: &NpmResolutionSnapshot,
mut f: impl FnMut(
mut already_seen: impl FnMut(
&Path,
&str, // bin script
) -> Result<(), AnyError>,
mut new: impl FnMut(
&NpmResolutionPackage,
&Path,
&str, // bin name
@ -95,18 +94,20 @@ impl BinEntries {
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
let name = default_bin_name(package);
if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name
continue;
}
f(package, package_path, name, script)?;
new(package, package_path, name, script)?;
}
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for (name, script) in entries {
if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name
continue;
}
f(package, package_path, name, script)?;
new(package, package_path, name, script)?;
}
}
}
@ -117,23 +118,27 @@ impl BinEntries {
}
/// Collect the bin entries into a vec of (name, script path)
pub(super) fn into_bin_files(
pub fn into_bin_files(
mut self,
snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new();
self
.for_each_entry(snapshot, |_, package_path, name, script| {
bins.push((name.to_string(), package_path.join(script)));
Ok(())
})
.for_each_entry(
snapshot,
|_, _| Ok(()),
|_, package_path, name, script| {
bins.push((name.to_string(), package_path.join(script)));
Ok(())
},
)
.unwrap();
bins
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub(super) fn finish(
pub fn finish(
mut self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
@ -144,15 +149,26 @@ impl BinEntries {
)?;
}
self.for_each_entry(snapshot, |package, package_path, name, script| {
set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)
})?;
self.for_each_entry(
snapshot,
|_package_path, _script| {
#[cfg(unix)]
{
let path = _package_path.join(_script);
make_executable_if_exists(&path)?;
}
Ok(())
},
|package, package_path, name, script| {
set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)
},
)?;
Ok(())
}
@ -162,8 +178,8 @@ impl BinEntries {
// that has a bin entry, then sort them by depth
fn sort_by_depth(
snapshot: &NpmResolutionSnapshot,
bin_entries: &mut [(NpmResolutionPackage, PathBuf)],
collisions: &mut HashSet<NpmPackageId>,
bin_entries: &mut [(&NpmResolutionPackage, PathBuf)],
collisions: &mut HashSet<&NpmPackageId>,
) {
enum Entry<'a> {
Pkg(&'a NpmPackageId),
@ -217,7 +233,7 @@ fn sort_by_depth(
});
}
pub(super) fn set_up_bin_entry(
pub fn set_up_bin_entry(
package: &NpmResolutionPackage,
bin_name: &str,
#[allow(unused_variables)] bin_script: &str,
@ -259,6 +275,32 @@ fn set_up_bin_shim(
Ok(())
}
#[cfg(unix)]
/// Make the file at `path` executable if it exists.
/// Returns `true` if the file exists, `false` otherwise.
fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
use std::io;
use std::os::unix::fs::PermissionsExt;
let mut perms = match std::fs::metadata(path) {
Ok(metadata) => metadata.permissions(),
Err(err) => {
if err.kind() == io::ErrorKind::NotFound {
return Ok(false);
}
return Err(err.into());
}
};
if perms.mode() & 0o111 == 0 {
// if the original file is not executable, make it executable
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(path, perms).with_context(|| {
format!("Setting permissions on '{}'", path.display())
})?;
}
Ok(true)
}
#[cfg(unix)]
fn symlink_bin_entry(
_package: &NpmResolutionPackage,
@ -272,32 +314,20 @@ fn symlink_bin_entry(
let link = bin_node_modules_dir_path.join(bin_name);
let original = package_path.join(bin_script);
use std::os::unix::fs::PermissionsExt;
let mut perms = match std::fs::metadata(&original) {
Ok(metadata) => metadata.permissions(),
Err(err) => {
if err.kind() == io::ErrorKind::NotFound {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name,
package_path.display(),
original.display()
);
return Ok(());
}
return Err(err).with_context(|| {
format!("Can't set up '{}' bin at {}", bin_name, original.display())
});
}
};
if perms.mode() & 0o111 == 0 {
// if the original file is not executable, make it executable
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(&original, perms).with_context(|| {
format!("Setting permissions on '{}'", original.display())
})?;
let found = make_executable_if_exists(&original).with_context(|| {
format!("Can't set up '{}' bin at {}", bin_name, original.display())
})?;
if !found {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name,
package_path.display(),
original.display()
);
return Ok(());
}
let original_relative =
crate::util::path::relative_path(bin_node_modules_dir_path, &original)
.unwrap_or(original);

View file

@ -0,0 +1,335 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use super::bin_entries::BinEntries;
use crate::args::LifecycleScriptsConfig;
use deno_core::anyhow::Context;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use std::borrow::Cow;
use std::rc::Rc;
use std::path::Path;
use std::path::PathBuf;
use deno_core::error::AnyError;
use deno_npm::NpmResolutionPackage;
pub trait LifecycleScriptsStrategy {
fn can_run_scripts(&self) -> bool {
true
}
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf;
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> Result<(), AnyError>;
fn has_warned(&self, package: &NpmResolutionPackage) -> bool;
fn has_run(&self, package: &NpmResolutionPackage) -> bool;
fn did_run_scripts(
&self,
package: &NpmResolutionPackage,
) -> Result<(), AnyError>;
}
pub struct LifecycleScripts<'a> {
packages_with_scripts: Vec<(&'a NpmResolutionPackage, PathBuf)>,
packages_with_scripts_not_run: Vec<(&'a NpmResolutionPackage, PathBuf)>,
config: &'a LifecycleScriptsConfig,
strategy: Box<dyn LifecycleScriptsStrategy + 'a>,
}
impl<'a> LifecycleScripts<'a> {
pub fn new<T: LifecycleScriptsStrategy + 'a>(
config: &'a LifecycleScriptsConfig,
strategy: T,
) -> Self {
Self {
config,
packages_with_scripts: Vec::new(),
packages_with_scripts_not_run: Vec::new(),
strategy: Box::new(strategy),
}
}
}
fn has_lifecycle_scripts(
package: &NpmResolutionPackage,
package_path: &Path,
) -> bool {
if let Some(install) = package.scripts.get("install") {
// default script
if !is_broken_default_install_script(install, package_path) {
return true;
}
}
package.scripts.contains_key("preinstall")
|| package.scripts.contains_key("postinstall")
}
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
// but it always fails if the package excludes the `binding.gyp` file when they publish.
// (for example, `fsevents` hits this)
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
}
impl<'a> LifecycleScripts<'a> {
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
if !self.strategy.can_run_scripts() {
return false;
}
use crate::args::PackagesAllowedScripts;
match &self.config.allowed {
PackagesAllowedScripts::All => true,
// TODO: make this more correct
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
let s = s.strip_prefix("npm:").unwrap_or(s);
s == package_nv.name || s == package_nv.to_string()
}),
PackagesAllowedScripts::None => false,
}
}
/// Register a package for running lifecycle scripts, if applicable.
///
/// `package_path` is the path containing the package's code (its root dir).
/// `package_meta_path` is the path to serve as the base directory for lifecycle
/// script-related metadata (e.g. to store whether the scripts have been run already)
pub fn add(
&mut self,
package: &'a NpmResolutionPackage,
package_path: Cow<Path>,
) {
if has_lifecycle_scripts(package, &package_path) {
if self.can_run_scripts(&package.id.nv) {
if !self.strategy.has_run(package) {
self
.packages_with_scripts
.push((package, package_path.into_owned()));
}
} else if !self.strategy.has_run(package)
&& (self.config.explicit_install || !self.strategy.has_warned(package))
{
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
// being run, and it's also very popular - any project using Vite would raise warnings.
{
let nv = &package.id.nv;
if nv.name == "esbuild"
&& nv.version >= Version::parse_standard("0.18.0").unwrap()
{
return;
}
}
self
.packages_with_scripts_not_run
.push((package, package_path.into_owned()));
}
}
}
pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> {
if !self.packages_with_scripts_not_run.is_empty() {
self
.strategy
.warn_on_scripts_not_run(&self.packages_with_scripts_not_run)?;
}
Ok(())
}
pub async fn finish(
self,
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: Option<&Path>,
) -> Result<(), AnyError> {
self.warn_not_run_scripts()?;
let get_package_path =
|p: &NpmResolutionPackage| self.strategy.package_path(p);
let mut failed_packages = Vec::new();
if !self.packages_with_scripts.is_empty() {
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base =
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?;
let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
root_node_modules_dir_path,
);
let mut env_vars = crate::task_runner::real_env_vars();
// we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file
//
// SAFETY: we are sharing a single temp file across all of the scripts. the file position
// will be shared among these, which is okay since we run only one script at a time.
// However, if we concurrently run scripts in the future we will
// have to have multiple temp files.
let temp_file_fd =
deno_runtime::ops::process::npm_process_state_tempfile(
process_state.as_bytes(),
).context("failed to create npm process state tempfile for running lifecycle scripts")?;
// SAFETY: fd/handle is valid
let _temp_file =
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
env_vars.insert(
deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
.to_string(),
(temp_file_fd as usize).to_string(),
);
for (package, package_path) in self.packages_with_scripts {
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = resolve_custom_commands_from_deps(
base.clone(),
package,
snapshot,
get_package_path,
)?;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = package.scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, &package_path)
{
continue;
}
let exit_code = crate::task_runner::run_task(
crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: &package_path,
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd,
argv: &[],
root_node_modules_dir: root_node_modules_dir_path,
},
)
.await?;
if exit_code != 0 {
log::warn!(
"error: script '{}' in '{}' failed with exit code {}",
script_name,
package.id.nv,
exit_code,
);
failed_packages.push(&package.id.nv);
// assume if earlier script fails, later ones will fail too
break;
}
}
}
self.strategy.did_run_scripts(package)?;
}
}
if failed_packages.is_empty() {
Ok(())
} else {
Err(AnyError::msg(format!(
"failed to run scripts for packages: {}",
failed_packages
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>()
.join(", ")
)))
}
}
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
fn resolve_baseline_custom_commands(
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages(
custom_commands,
snapshot,
packages,
get_package_path,
)
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
for package in packages {
let package_path = get_package_path(package);
if package.bin.is_some() {
bin_entries.add(package, package_path);
}
}
let bins = bin_entries.into_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
Ok(commands)
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
fn resolve_custom_commands_from_deps(
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
resolve_custom_commands_from_packages(
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
get_package_path,
)
}

View file

@ -2,16 +2,19 @@
//! Code for global npm cache resolution.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use crate::colors;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
@ -19,10 +22,14 @@ use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution;
use super::common::cache_packages;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker;
@ -34,6 +41,7 @@ pub struct GlobalNpmPackageResolver {
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker,
lifecycle_scripts: LifecycleScriptsConfig,
}
impl GlobalNpmPackageResolver {
@ -43,6 +51,7 @@ impl GlobalNpmPackageResolver {
tarball_cache: Arc<TarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self {
Self {
registry_read_permission_checker: RegistryReadPermissionChecker::new(
@ -53,6 +62,7 @@ impl GlobalNpmPackageResolver {
tarball_cache,
resolution,
system_info,
lifecycle_scripts,
}
}
}
@ -63,7 +73,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
self.cache.root_dir_url()
}
fn node_modules_path(&self) -> Option<&PathBuf> {
fn node_modules_path(&self) -> Option<&Path> {
None
}
@ -149,8 +159,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
let package_partitions = self
.resolution
.all_system_packages_partitioned(&self.system_info);
cache_packages(package_partitions.packages, &self.tarball_cache).await?;
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
// create the copy package folders
for copy in package_partitions.copy_packages {
@ -159,6 +168,18 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
.ensure_copy_package(&copy.get_package_cache_folder_id())?;
}
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
&self.lifecycle_scripts,
GlobalLifecycleScripts::new(self, &self.lifecycle_scripts.root_dir),
);
for package in &package_partitions.packages {
let package_folder = self.cache.package_folder_for_nv(&package.id.nv);
lifecycle_scripts.add(package, Cow::Borrowed(&package_folder));
}
lifecycle_scripts.warn_not_run_scripts()?;
Ok(())
}
@ -172,3 +193,78 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
.ensure_registry_read_permission(permissions, path)
}
}
struct GlobalLifecycleScripts<'a> {
resolver: &'a GlobalNpmPackageResolver,
path_hash: u64,
}
impl<'a> GlobalLifecycleScripts<'a> {
fn new(resolver: &'a GlobalNpmPackageResolver, root_dir: &Path) -> Self {
let mut hasher = FastInsecureHasher::new_without_deno_version();
hasher.write(root_dir.to_string_lossy().as_bytes());
let path_hash = hasher.finish();
Self {
resolver,
path_hash,
}
}
fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
self
.package_path(package)
.join(format!(".scripts-warned-{}", self.path_hash))
}
}
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
for GlobalLifecycleScripts<'a>
{
fn can_run_scripts(&self) -> bool {
false
}
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
self.resolver.cache.package_folder_for_nv(&package.id.nv)
}
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> std::result::Result<(), deno_core::anyhow::Error> {
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
for (package, _) in packages {
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
}
log::warn!("");
log::warn!(
"┠─ {}",
colors::italic("This may cause the packages to not work correctly.")
);
log::warn!("┠─ {}", colors::italic("Lifecycle scripts are only supported when using a `node_modules` directory."));
log::warn!(
"┠─ {}",
colors::italic("Enable it in your deno config file:")
);
log::warn!("┖─ {}", colors::bold("\"nodeModulesDir\": \"auto\""));
for (package, _) in packages {
std::fs::write(self.warned_scripts_file(package), "")?;
}
Ok(())
}
fn did_run_scripts(
&self,
_package: &NpmResolutionPackage,
) -> std::result::Result<(), deno_core::anyhow::Error> {
Ok(())
}
fn has_warned(&self, package: &NpmResolutionPackage) -> bool {
self.warned_scripts_file(package).exists()
}
fn has_run(&self, _package: &NpmResolutionPackage) -> bool {
false
}
}

View file

@ -2,8 +2,6 @@
//! Code for local node_modules resolution.
mod bin_entries;
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
@ -18,20 +16,22 @@ use std::rc::Rc;
use std::sync::Arc;
use crate::args::LifecycleScriptsConfig;
use crate::args::PackagesAllowedScripts;
use crate::colors;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow;
use deno_cache_dir::npm::mixed_case_package_name_decode;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodePermissions;
use deno_semver::package::PackageNv;
@ -44,8 +44,6 @@ use serde::Serialize;
use crate::args::NpmInstallDepsProvider;
use crate::cache::CACHE_PERM;
use crate::npm::cache_dir::mixed_case_package_name_decode;
use crate::npm::cache_dir::mixed_case_package_name_encode;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use crate::util::fs::clone_dir_recursive;
@ -161,8 +159,8 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
&self.root_node_modules_url
}
fn node_modules_path(&self) -> Option<&PathBuf> {
Some(&self.root_node_modules_path)
fn node_modules_path(&self) -> Option<&Path> {
Some(self.root_node_modules_path.as_ref())
}
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
@ -270,77 +268,10 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
}
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
fn resolve_baseline_custom_commands(
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages(
custom_commands,
snapshot,
packages,
local_registry_dir,
)
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = bin_entries::BinEntries::new();
for package in packages {
let package_path =
local_node_modules_package_path(local_registry_dir, package);
if package.bin.is_some() {
bin_entries.add(package.clone(), package_path);
}
}
let bins = bin_entries.into_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
Ok(commands)
}
fn local_node_modules_package_path(
/// `node_modules/.deno/<package>/node_modules/<package_name>`
///
/// Where the actual package is stored.
fn local_node_modules_package_contents_path(
local_registry_dir: &Path,
package: &NpmResolutionPackage,
) -> PathBuf {
@ -352,62 +283,6 @@ fn local_node_modules_package_path(
.join(&package.id.nv.name)
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
fn resolve_custom_commands_from_deps(
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
resolve_custom_commands_from_packages(
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
local_registry_dir,
)
}
fn can_run_scripts(
allow_scripts: &PackagesAllowedScripts,
package_nv: &PackageNv,
) -> bool {
match allow_scripts {
PackagesAllowedScripts::All => true,
// TODO: make this more correct
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
let s = s.strip_prefix("npm:").unwrap_or(s);
s == package_nv.name || s == package_nv.to_string()
}),
PackagesAllowedScripts::None => false,
}
}
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
// but it always fails if the package excludes the `binding.gyp` file when they publish.
// (for example, `fsevents` hits this)
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
}
fn has_lifecycle_scripts(
package: &NpmResolutionPackage,
package_path: &Path,
) -> bool {
if let Some(install) = package.scripts.get("install") {
// default script
if !is_broken_default_install_script(install, package_path) {
return true;
}
}
package.scripts.contains_key("preinstall")
|| package.scripts.contains_key("postinstall")
}
/// Creates a pnpm style folder structure.
#[allow(clippy::too_many_arguments)]
async fn sync_resolution_with_fs(
@ -458,9 +333,16 @@ async fn sync_resolution_with_fs(
let mut cache_futures = FuturesUnordered::new();
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
HashMap::with_capacity(package_partitions.packages.len());
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
let mut packages_with_scripts = Vec::with_capacity(2);
let mut packages_with_scripts_not_run = Vec::new();
let bin_entries =
Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new()));
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
lifecycle_scripts,
LocalLifecycleScripts {
deno_local_registry_dir: &deno_local_registry_dir,
},
);
let packages_with_deprecation_warnings = Arc::new(Mutex::new(Vec::new()));
for package in &package_partitions.packages {
if let Some(current_pkg) =
newest_packages_by_name.get_mut(&package.id.nv.name)
@ -487,6 +369,8 @@ async fn sync_resolution_with_fs(
let folder_path = folder_path.clone();
let bin_entries_to_setup = bin_entries.clone();
let packages_with_deprecation_warnings =
packages_with_deprecation_warnings.clone();
cache_futures.push(async move {
tarball_cache
.ensure_package(&package.id.nv, &package.dist)
@ -513,18 +397,13 @@ async fn sync_resolution_with_fs(
.await??;
if package.bin.is_some() {
bin_entries_to_setup
.borrow_mut()
.add(package.clone(), package_path);
bin_entries_to_setup.borrow_mut().add(package, package_path);
}
if let Some(deprecated) = &package.deprecated {
log::info!(
"{} {:?} is deprecated: {}",
crate::colors::yellow("Warning"),
package.id,
crate::colors::gray(deprecated),
);
packages_with_deprecation_warnings
.lock()
.push((package.id.clone(), deprecated.clone()));
}
// finally stop showing the progress bar
@ -536,21 +415,7 @@ async fn sync_resolution_with_fs(
let sub_node_modules = folder_path.join("node_modules");
let package_path =
join_package_name(&sub_node_modules, &package.id.nv.name);
if has_lifecycle_scripts(package, &package_path) {
let scripts_run = folder_path.join(".scripts-run");
let has_warned = folder_path.join(".scripts-warned");
if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) {
if !scripts_run.exists() {
packages_with_scripts.push((
package.clone(),
package_path,
scripts_run,
));
}
} else if !scripts_run.exists() && !has_warned.exists() {
packages_with_scripts_not_run.push((has_warned, package.id.nv.clone()));
}
}
lifecycle_scripts.add(package, package_path.into());
}
while let Some(result) = cache_futures.next().await {
@ -787,81 +652,40 @@ async fn sync_resolution_with_fs(
}
}
if !packages_with_scripts.is_empty() {
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base = resolve_baseline_custom_commands(
snapshot,
&package_partitions.packages,
&deno_local_registry_dir,
)?;
let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap();
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
Some(root_node_modules_dir_path),
);
let mut env_vars = crate::task_runner::real_env_vars();
env_vars.insert(
crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(),
process_state,
);
for (package, package_path, scripts_run_path) in packages_with_scripts {
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = resolve_custom_commands_from_deps(
base.clone(),
&package,
snapshot,
&deno_local_registry_dir,
)?;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = package.scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, &package_path)
{
continue;
}
let exit_code =
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: &package_path,
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd,
argv: &[],
root_node_modules_dir: Some(root_node_modules_dir_path),
})
.await?;
if exit_code != 0 {
anyhow::bail!(
"script '{}' in '{}' failed with exit code {}",
script_name,
package.id.nv,
exit_code,
);
}
{
let packages_with_deprecation_warnings =
packages_with_deprecation_warnings.lock();
if !packages_with_deprecation_warnings.is_empty() {
log::warn!(
"{} The following packages are deprecated:",
colors::yellow("Warning")
);
let len = packages_with_deprecation_warnings.len();
for (idx, (package_id, msg)) in
packages_with_deprecation_warnings.iter().enumerate()
{
if idx != len - 1 {
log::warn!(
"┠─ {}",
colors::gray(format!("npm:{:?} ({})", package_id, msg))
);
} else {
log::warn!(
"┖─ {}",
colors::gray(format!("npm:{:?} ({})", package_id, msg))
);
}
}
fs::write(scripts_run_path, "")?;
}
}
if !packages_with_scripts_not_run.is_empty() {
let packages = packages_with_scripts_not_run
.iter()
.map(|(_, p)| format!("npm:{p}"))
.collect::<Vec<_>>()
.join(", ");
log::warn!("{} Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache` or `deno install`
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>` or `deno install --allow-scripts=pkg1,pkg2`):\n {packages}", crate::colors::yellow("Warning"));
for (scripts_warned_path, _) in packages_with_scripts_not_run {
let _ignore_err = fs::write(scripts_warned_path, "");
}
}
lifecycle_scripts
.finish(
snapshot,
&package_partitions.packages,
Some(root_node_modules_dir_path),
)
.await?;
setup_cache.save();
drop(single_process_lock);
@ -870,6 +694,98 @@ async fn sync_resolution_with_fs(
Ok(())
}
/// `node_modules/.deno/<package>/`
fn local_node_modules_package_folder(
local_registry_dir: &Path,
package: &NpmResolutionPackage,
) -> PathBuf {
local_registry_dir.join(get_package_folder_id_folder_name(
&package.get_package_cache_folder_id(),
))
}
struct LocalLifecycleScripts<'a> {
deno_local_registry_dir: &'a Path,
}
impl<'a> LocalLifecycleScripts<'a> {
/// `node_modules/.deno/<package>/.scripts-run`
fn ran_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
local_node_modules_package_folder(self.deno_local_registry_dir, package)
.join(".scripts-run")
}
/// `node_modules/.deno/<package>/.scripts-warned`
fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
local_node_modules_package_folder(self.deno_local_registry_dir, package)
.join(".scripts-warned")
}
}
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
for LocalLifecycleScripts<'a>
{
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
local_node_modules_package_contents_path(
self.deno_local_registry_dir,
package,
)
}
fn did_run_scripts(
&self,
package: &NpmResolutionPackage,
) -> std::result::Result<(), deno_core::anyhow::Error> {
std::fs::write(self.ran_scripts_file(package), "")?;
Ok(())
}
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, std::path::PathBuf)],
) -> Result<(), AnyError> {
if !packages.is_empty() {
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
for (package, _) in packages {
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
}
log::warn!("");
log::warn!(
"┠─ {}",
colors::italic("This may cause the packages to not work correctly.")
);
log::warn!("┖─ {}", colors::italic("To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`:"));
let packages_comma_separated = packages
.iter()
.map(|(p, _)| format!("npm:{}", p.id.nv))
.collect::<Vec<_>>()
.join(",");
log::warn!(
" {}",
colors::bold(format!(
"deno install --allow-scripts={}",
packages_comma_separated
))
);
for (package, _) in packages {
let _ignore_err = fs::write(self.warned_scripts_file(package), "");
}
}
Ok(())
}
fn has_warned(&self, package: &NpmResolutionPackage) -> bool {
self.warned_scripts_file(package).exists()
}
fn has_run(&self, package: &NpmResolutionPackage) -> bool {
self.ran_scripts_file(package).exists()
}
}
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
// the file generated from this datastructure is deterministic.
// See: https://github.com/denoland/deno/issues/24479
@ -1004,20 +920,6 @@ impl SetupCache {
}
}
/// Normalizes a package name for use at `node_modules/.deno/<pkg-name>@<version>[_<copy_index>]`
pub fn normalize_pkg_name_for_node_modules_deno_folder(name: &str) -> Cow<str> {
let name = if name.to_lowercase() == name {
Cow::Borrowed(name)
} else {
Cow::Owned(format!("_{}", mixed_case_package_name_encode(name)))
};
if name.starts_with('@') {
name.replace('/', "+").into()
} else {
name
}
}
fn get_package_folder_id_folder_name(
folder_id: &NpmPackageCacheFolderId,
) -> String {

View file

@ -15,7 +15,6 @@ use crate::args::NpmInstallDepsProvider;
use crate::util::progress_bar::ProgressBar;
pub use self::common::NpmPackageFsResolver;
pub use self::local::normalize_pkg_name_for_node_modules_deno_folder;
use self::global::GlobalNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
@ -54,6 +53,7 @@ pub fn create_npm_fs_resolver(
tarball_cache,
resolution,
system_info,
lifecycle_scripts,
)),
}
}

View file

@ -1,10 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod byonm;
mod cache_dir;
mod common;
mod managed;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
@ -13,8 +13,9 @@ use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmNpmResolver;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::NpmResolver;
@ -22,16 +23,15 @@ use node_resolver::NpmResolver;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher;
pub use self::byonm::ByonmCliNpmResolver;
pub use self::byonm::CliNpmResolverByonmCreateOptions;
pub use self::cache_dir::NpmCacheDir;
pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver;
pub enum CliNpmResolverCreateOptions {
Managed(CliNpmResolverManagedCreateOptions),
Byonm(CliNpmResolverByonmCreateOptions),
Byonm(CliByonmNpmResolverCreateOptions),
}
pub async fn create_cli_npm_resolver_for_lsp(
@ -42,7 +42,7 @@ pub async fn create_cli_npm_resolver_for_lsp(
Managed(options) => {
managed::create_managed_npm_resolver_for_lsp(options).await
}
Byonm(options) => byonm::create_byonm_npm_resolver(options),
Byonm(options) => Arc::new(ByonmNpmResolver::new(options)),
}
}
@ -52,14 +52,14 @@ pub async fn create_cli_npm_resolver(
use CliNpmResolverCreateOptions::*;
match options {
Managed(options) => managed::create_managed_npm_resolver(options).await,
Byonm(options) => Ok(byonm::create_byonm_npm_resolver(options)),
Byonm(options) => Ok(Arc::new(ByonmNpmResolver::new(options))),
}
}
pub enum InnerCliNpmResolverRef<'a> {
Managed(&'a ManagedCliNpmResolver),
#[allow(dead_code)]
Byonm(&'a ByonmCliNpmResolver),
Byonm(&'a CliByonmNpmResolver),
}
pub trait CliNpmResolver: NpmResolver {
@ -80,14 +80,14 @@ pub trait CliNpmResolver: NpmResolver {
}
}
fn as_byonm(&self) -> Option<&ByonmCliNpmResolver> {
fn as_byonm(&self) -> Option<&CliByonmNpmResolver> {
match self.as_inner() {
InnerCliNpmResolverRef::Managed(_) => None,
InnerCliNpmResolverRef::Byonm(inner) => Some(inner),
}
}
fn root_node_modules_path(&self) -> Option<&PathBuf>;
fn root_node_modules_path(&self) -> Option<&Path>;
fn resolve_pkg_folder_from_deno_module_req(
&self,

View file

@ -2,7 +2,6 @@
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time;
use deno_core::error::generic_error;
@ -12,9 +11,7 @@ use deno_core::op2;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_runtime::deno_permissions::create_child_permissions;
use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionDescriptorParser;
use deno_runtime::deno_permissions::PermissionsContainer;
use tokio::sync::mpsc::UnboundedSender;
use uuid::Uuid;
@ -61,19 +58,8 @@ pub fn op_pledge_test_permissions(
#[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> {
let token = Uuid::new_v4();
let permission_desc_parser = state
.borrow::<Arc<dyn PermissionDescriptorParser>>()
.clone();
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = {
let mut parent_permissions = parent_permissions.inner.lock();
let perms = create_child_permissions(
permission_desc_parser.as_ref(),
&mut parent_permissions,
args,
)?;
PermissionsContainer::new(permission_desc_parser, perms)
};
let worker_permissions = parent_permissions.create_child_permissions(args)?;
let parent_permissions = parent_permissions.clone();
if state.try_take::<PermissionsHolder>().is_some() {
@ -83,7 +69,6 @@ pub fn op_pledge_test_permissions(
state.put::<PermissionsHolder>(PermissionsHolder(token, parent_permissions));
// NOTE: This call overrides current permission set for the worker
state.put(worker_permissions.inner.clone());
state.put::<PermissionsContainer>(worker_permissions);
Ok(token)
@ -100,7 +85,6 @@ pub fn op_restore_test_permissions(
}
let permissions = permissions_holder.1;
state.put(permissions.inner.clone());
state.put::<PermissionsContainer>(permissions);
Ok(())
} else {

View file

@ -16,13 +16,10 @@ use deno_core::op2;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_runtime::deno_permissions::create_child_permissions;
use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionDescriptorParser;
use deno_runtime::deno_permissions::PermissionsContainer;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use uuid::Uuid;
deno_core::extension!(deno_test,
@ -56,19 +53,8 @@ pub fn op_pledge_test_permissions(
#[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> {
let token = Uuid::new_v4();
let permission_desc_parser = state
.borrow::<Arc<dyn PermissionDescriptorParser>>()
.clone();
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = {
let mut parent_permissions = parent_permissions.inner.lock();
let perms = create_child_permissions(
permission_desc_parser.as_ref(),
&mut parent_permissions,
args,
)?;
PermissionsContainer::new(permission_desc_parser, perms)
};
let worker_permissions = parent_permissions.create_child_permissions(args)?;
let parent_permissions = parent_permissions.clone();
if state.try_take::<PermissionsHolder>().is_some() {
@ -77,7 +63,6 @@ pub fn op_pledge_test_permissions(
state.put::<PermissionsHolder>(PermissionsHolder(token, parent_permissions));
// NOTE: This call overrides current permission set for the worker
state.put(worker_permissions.inner.clone());
state.put::<PermissionsContainer>(worker_permissions);
Ok(token)
@ -94,7 +79,6 @@ pub fn op_restore_test_permissions(
}
let permissions = permissions_holder.1;
state.put(permissions.inner.clone());
state.put::<PermissionsContainer>(permissions);
Ok(())
} else {

View file

@ -22,12 +22,13 @@ use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_resolver::sloppy_imports::SloppyImportsResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use node_resolver::errors::ClosestPkgJsonError;
@ -60,13 +61,52 @@ pub struct ModuleCodeStringSource {
pub media_type: MediaType,
}
#[derive(Debug, Clone)]
pub struct CliDenoResolverFs(pub Arc<dyn FileSystem>);
impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String> {
self
.0
.read_text_file_lossy_sync(path, None)
.map_err(|e| e.into_io_error())
}
fn realpath_sync(&self, path: &Path) -> std::io::Result<PathBuf> {
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
}
fn is_dir_sync(&self, path: &Path) -> bool {
self.0.is_dir_sync(path)
}
fn read_dir_sync(
&self,
dir_path: &Path,
) -> std::io::Result<Vec<deno_resolver::fs::DirEntry>> {
self
.0
.read_dir_sync(dir_path)
.map(|entries| {
entries
.into_iter()
.map(|e| deno_resolver::fs::DirEntry {
name: e.name,
is_file: e.is_file,
is_directory: e.is_directory,
})
.collect::<Vec<_>>()
})
.map_err(|err| err.into_io_error())
}
}
#[derive(Debug)]
pub struct CliNodeResolver {
cjs_resolutions: Arc<CjsResolutionStore>,
fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<NodeResolver>,
// todo(dsherret): remove this pub(crate)
pub(crate) npm_resolver: Arc<dyn CliNpmResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
}
impl CliNodeResolver {
@ -421,13 +461,16 @@ impl CjsResolutionStore {
}
}
pub type CliSloppyImportsResolver =
SloppyImportsResolver<SloppyImportsCachedFs>;
/// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings.
#[derive(Debug)]
pub struct CliGraphResolver {
node_resolver: Option<Arc<CliNodeResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Arc<WorkspaceResolver>,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
@ -441,7 +484,7 @@ pub struct CliGraphResolver {
pub struct CliGraphResolverOptions<'a> {
pub node_resolver: Option<Arc<CliNodeResolver>>,
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub workspace_resolver: Arc<WorkspaceResolver>,
pub bare_node_builtins_enabled: bool,
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
@ -565,7 +608,15 @@ impl Resolver for CliGraphResolver {
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
Ok(
sloppy_imports_resolver
.resolve(&specifier, mode)
.resolve(
&specifier,
match mode {
ResolutionMode::Execution => {
SloppyImportsResolutionMode::Execution
}
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
},
)
.map(|s| s.into_specifier())
.unwrap_or(specifier),
)
@ -847,96 +898,18 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SloppyImportsFsEntry {
File,
Dir,
}
impl SloppyImportsFsEntry {
pub fn from_fs_stat(
stat: &deno_runtime::deno_io::fs::FsStat,
) -> Option<SloppyImportsFsEntry> {
if stat.is_file {
Some(SloppyImportsFsEntry::File)
} else if stat.is_directory {
Some(SloppyImportsFsEntry::Dir)
} else {
None
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SloppyImportsResolution {
/// Ex. `./file.js` to `./file.ts`
JsToTs(ModuleSpecifier),
/// Ex. `./file` to `./file.ts`
NoExtension(ModuleSpecifier),
/// Ex. `./dir` to `./dir/index.ts`
Directory(ModuleSpecifier),
}
impl SloppyImportsResolution {
pub fn as_specifier(&self) -> &ModuleSpecifier {
match self {
Self::JsToTs(specifier) => specifier,
Self::NoExtension(specifier) => specifier,
Self::Directory(specifier) => specifier,
}
}
pub fn into_specifier(self) -> ModuleSpecifier {
match self {
Self::JsToTs(specifier) => specifier,
Self::NoExtension(specifier) => specifier,
Self::Directory(specifier) => specifier,
}
}
pub fn as_suggestion_message(&self) -> String {
format!("Maybe {}", self.as_base_message())
}
pub fn as_quick_fix_message(&self) -> String {
let message = self.as_base_message();
let mut chars = message.chars();
format!(
"{}{}.",
chars.next().unwrap().to_uppercase(),
chars.as_str()
)
}
fn as_base_message(&self) -> String {
match self {
SloppyImportsResolution::JsToTs(specifier) => {
let media_type = MediaType::from_specifier(specifier);
format!("change the extension to '{}'", media_type.as_ts_extension())
}
SloppyImportsResolution::NoExtension(specifier) => {
let media_type = MediaType::from_specifier(specifier);
format!("add a '{}' extension", media_type.as_ts_extension())
}
SloppyImportsResolution::Directory(specifier) => {
let file_name = specifier
.path()
.rsplit_once('/')
.map(|(_, file_name)| file_name)
.unwrap_or(specifier.path());
format!("specify path to '{}' file in directory instead", file_name)
}
}
}
}
#[derive(Debug)]
pub struct SloppyImportsResolver {
fs: Arc<dyn FileSystem>,
cache: Option<DashMap<PathBuf, Option<SloppyImportsFsEntry>>>,
pub struct SloppyImportsCachedFs {
fs: Arc<dyn deno_fs::FileSystem>,
cache: Option<
DashMap<
PathBuf,
Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry>,
>,
>,
}
impl SloppyImportsResolver {
impl SloppyImportsCachedFs {
pub fn new(fs: Arc<dyn FileSystem>) -> Self {
Self {
fs,
@ -947,409 +920,34 @@ impl SloppyImportsResolver {
pub fn new_without_stat_cache(fs: Arc<dyn FileSystem>) -> Self {
Self { fs, cache: None }
}
}
pub fn resolve(
impl deno_resolver::sloppy_imports::SloppyImportResolverFs
for SloppyImportsCachedFs
{
fn stat_sync(
&self,
specifier: &ModuleSpecifier,
mode: ResolutionMode,
) -> Option<SloppyImportsResolution> {
fn path_without_ext(
path: &Path,
media_type: MediaType,
) -> Option<Cow<str>> {
let old_path_str = path.to_string_lossy();
match media_type {
MediaType::Unknown => Some(old_path_str),
_ => old_path_str
.strip_suffix(media_type.as_ts_extension())
.map(|s| Cow::Owned(s.to_string())),
}
}
fn media_types_to_paths(
path_no_ext: &str,
original_media_type: MediaType,
probe_media_type_types: Vec<MediaType>,
reason: SloppyImportsResolutionReason,
) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
probe_media_type_types
.into_iter()
.filter(|media_type| *media_type != original_media_type)
.map(|media_type| {
(
PathBuf::from(format!(
"{}{}",
path_no_ext,
media_type.as_ts_extension()
)),
reason,
)
})
.collect::<Vec<_>>()
}
if specifier.scheme() != "file" {
return None;
}
let path = specifier_to_file_path(specifier).ok()?;
#[derive(Clone, Copy)]
enum SloppyImportsResolutionReason {
JsToTs,
NoExtension,
Directory,
}
let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
match self.stat_sync(&path) {
Some(SloppyImportsFsEntry::File) => {
if mode.is_types() {
let media_type = MediaType::from_specifier(specifier);
// attempt to resolve the .d.ts file before the .js file
let probe_media_type_types = match media_type {
MediaType::JavaScript => {
vec![(MediaType::Dts), MediaType::JavaScript]
}
MediaType::Mjs => {
vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
}
MediaType::Cjs => {
vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
}
_ => return None,
};
let path_no_ext = path_without_ext(&path, media_type)?;
media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types,
SloppyImportsResolutionReason::JsToTs,
)
} else {
return None;
}
}
entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => {
let media_type = MediaType::from_specifier(specifier);
let probe_media_type_types = match media_type {
MediaType::JavaScript => (
if mode.is_types() {
vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
} else {
vec![MediaType::TypeScript, MediaType::Tsx]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Jsx => {
(vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
}
MediaType::Mjs => (
if mode.is_types() {
vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
} else {
vec![MediaType::Mts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Cjs => (
if mode.is_types() {
vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
} else {
vec![MediaType::Cts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx
| MediaType::Json
| MediaType::Wasm
| MediaType::TsBuildInfo
| MediaType::SourceMap => {
return None;
}
MediaType::Unknown => (
if mode.is_types() {
vec![
MediaType::TypeScript,
MediaType::Tsx,
MediaType::Mts,
MediaType::Dts,
MediaType::Dmts,
MediaType::Dcts,
MediaType::JavaScript,
MediaType::Jsx,
MediaType::Mjs,
]
} else {
vec![
MediaType::TypeScript,
MediaType::JavaScript,
MediaType::Tsx,
MediaType::Jsx,
MediaType::Mts,
MediaType::Mjs,
]
},
SloppyImportsResolutionReason::NoExtension,
),
};
let mut probe_paths = match path_without_ext(&path, media_type) {
Some(path_no_ext) => media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types.0,
probe_media_type_types.1,
),
None => vec![],
};
if matches!(entry, Some(SloppyImportsFsEntry::Dir)) {
// try to resolve at the index file
if mode.is_types() {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
} else {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
}
}
if probe_paths.is_empty() {
return None;
}
probe_paths
}
};
for (probe_path, reason) in probe_paths {
if self.stat_sync(&probe_path) == Some(SloppyImportsFsEntry::File) {
if let Ok(specifier) = ModuleSpecifier::from_file_path(probe_path) {
match reason {
SloppyImportsResolutionReason::JsToTs => {
return Some(SloppyImportsResolution::JsToTs(specifier));
}
SloppyImportsResolutionReason::NoExtension => {
return Some(SloppyImportsResolution::NoExtension(specifier));
}
SloppyImportsResolutionReason::Directory => {
return Some(SloppyImportsResolution::Directory(specifier));
}
}
}
}
}
None
}
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry> {
path: &Path,
) -> Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry> {
if let Some(cache) = &self.cache {
if let Some(entry) = cache.get(path) {
return *entry;
}
}
let entry = self
.fs
.stat_sync(path)
.ok()
.and_then(|stat| SloppyImportsFsEntry::from_fs_stat(&stat));
let entry = self.fs.stat_sync(path).ok().and_then(|stat| {
if stat.is_file {
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::File)
} else if stat.is_directory {
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::Dir)
} else {
None
}
});
if let Some(cache) = &self.cache {
cache.insert(path.to_owned(), entry);
}
entry
}
}
#[cfg(test)]
mod test {
use test_util::TestContext;
use super::*;
#[test]
fn test_unstable_sloppy_imports() {
fn resolve(specifier: &ModuleSpecifier) -> Option<SloppyImportsResolution> {
resolve_with_mode(specifier, ResolutionMode::Execution)
}
fn resolve_types(
specifier: &ModuleSpecifier,
) -> Option<SloppyImportsResolution> {
resolve_with_mode(specifier, ResolutionMode::Types)
}
fn resolve_with_mode(
specifier: &ModuleSpecifier,
mode: ResolutionMode,
) -> Option<SloppyImportsResolution> {
SloppyImportsResolver::new(Arc::new(deno_fs::RealFs))
.resolve(specifier, mode)
}
let context = TestContext::default();
let temp_dir = context.temp_dir().path();
// scenarios like resolving ./example.js to ./example.ts
for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
let ts_file = temp_dir.join(format!("file.{}", ext_to));
ts_file.write("");
assert_eq!(resolve(&ts_file.url_file()), None);
assert_eq!(
resolve(
&temp_dir
.url_dir()
.join(&format!("file.{}", ext_from))
.unwrap()
),
Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
);
ts_file.remove_file();
}
// no extension scenarios
for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] {
let file = temp_dir.join(format!("file.{}", ext));
file.write("");
assert_eq!(
resolve(
&temp_dir
.url_dir()
.join("file") // no ext
.unwrap()
),
Some(SloppyImportsResolution::NoExtension(file.url_file()))
);
file.remove_file();
}
// .ts and .js exists, .js specified (goes to specified)
{
let ts_file = temp_dir.join("file.ts");
ts_file.write("");
let js_file = temp_dir.join("file.js");
js_file.write("");
assert_eq!(resolve(&js_file.url_file()), None);
}
// only js exists, .js specified
{
let js_only_file = temp_dir.join("js_only.js");
js_only_file.write("");
assert_eq!(resolve(&js_only_file.url_file()), None);
assert_eq!(resolve_types(&js_only_file.url_file()), None);
}
// resolving a directory to an index file
{
let routes_dir = temp_dir.join("routes");
routes_dir.create_dir_all();
let index_file = routes_dir.join("index.ts");
index_file.write("");
assert_eq!(
resolve(&routes_dir.url_file()),
Some(SloppyImportsResolution::Directory(index_file.url_file())),
);
}
// both a directory and a file with specifier is present
{
let api_dir = temp_dir.join("api");
api_dir.create_dir_all();
let bar_file = api_dir.join("bar.ts");
bar_file.write("");
let api_file = temp_dir.join("api.ts");
api_file.write("");
assert_eq!(
resolve(&api_dir.url_file()),
Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
);
}
}
#[test]
fn test_sloppy_import_resolution_suggestion_message() {
// directory
assert_eq!(
SloppyImportsResolution::Directory(
ModuleSpecifier::parse("file:///dir/index.js").unwrap()
)
.as_suggestion_message(),
"Maybe specify path to 'index.js' file in directory instead"
);
// no ext
assert_eq!(
SloppyImportsResolution::NoExtension(
ModuleSpecifier::parse("file:///dir/index.mjs").unwrap()
)
.as_suggestion_message(),
"Maybe add a '.mjs' extension"
);
// js to ts
assert_eq!(
SloppyImportsResolution::JsToTs(
ModuleSpecifier::parse("file:///dir/index.mts").unwrap()
)
.as_suggestion_message(),
"Maybe change the extension to '.mts'"
);
}
}

View file

@ -1,6 +1,6 @@
{
"$id": "https://deno.land/x/deno/cli/schemas/config-file.v1.json",
"$schema": "http://json-schema.org/draft-07/schema",
"$schema": "https://json-schema.org/draft/2020-12/schema",
"description": "A JSON representation of a Deno configuration file.",
"required": [],
"title": "Deno configuration file Schema",
@ -9,6 +9,7 @@
"compilerOptions": {
"type": "object",
"description": "Instructs the TypeScript compiler how to compile .ts files.",
"additionalProperties": false,
"properties": {
"allowJs": {
"description": "Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files.",
@ -34,18 +35,32 @@
"default": false,
"markdownDescription": "Enable error reporting in type-checked JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#checkJs"
},
"exactOptionalPropertyTypes": {
"description": "Differentiate between undefined and not present when type checking",
"emitDecoratorMetadata": {
"description": "Emit design-type metadata for decorated declarations in source files.",
"type": "boolean",
"default": false,
"markdownDescription": "Differentiate between undefined and not present when type checking\n\nSee more: https://www.typescriptlang.org/tsconfig#exactOptionalPropertyTypes"
"deprecated": true,
"markdownDescription": "Emit design-type metadata for decorated declarations in source files.\n\nSee more: https://www.typescriptlang.org/tsconfig/#emitDecoratorMetadata"
},
"exactOptionalPropertyTypes": {
"description": "Interpret optional property types as written, rather than adding 'undefined'.",
"type": "boolean",
"default": false,
"markdownDescription": "Interpret optional property types as written, rather than adding 'undefined'.\n\nSee more: https://www.typescriptlang.org/tsconfig#exactOptionalPropertyTypes"
},
"experimentalDecorators": {
"description": "Enable experimental support for legacy experimental decorators.",
"type": "boolean",
"default": false,
"deprecated": true,
"markdownDescription": "Enable experimental support for legacy experimental decorators.\n\nSee more: https://www.typescriptlang.org/tsconfig#experimentalDecorators"
},
"isolatedDeclarations": {
"description": "Require sufficient annotation on exports so other tools can trivially generate declaration files.",
"type": "boolean",
"default": false,
"markdownDescription": "Require sufficient annotation on exports so other tools can trivially generate declaration files.\n\nSee more: https://www.typescriptlang.org/tsconfig/#isolatedDeclarations"
},
"jsx": {
"description": "Specify what JSX code is generated.",
"default": "react",
@ -90,12 +105,6 @@
},
"markdownDescription": "Specify list of elements that should be exempt from being precompiled when the jsx `precompile` transform is used."
},
"keyofStringsOnly": {
"description": "Make keyof only return strings instead of string, numbers or symbols. Legacy option.",
"type": "boolean",
"default": false,
"markdownDescription": "Make keyof only return strings instead of string, numbers or symbols. Legacy option.\n\nSee more: https://www.typescriptlang.org/tsconfig#keyofStringsOnly"
},
"lib": {
"description": "Specify a set of bundled library declaration files that describe the target runtime environment.",
"type": "array",
@ -127,7 +136,7 @@
"noImplicitOverride": {
"description": "Ensure overriding members in derived classes are marked with an override modifier.",
"type": "boolean",
"default": false,
"default": true,
"markdownDescription": "Ensure overriding members in derived classes are marked with an override modifier.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitOverride"
},
"noImplicitReturns": {
@ -142,23 +151,17 @@
"default": true,
"markdownDescription": "Enable error reporting when `this` is given the type `any`.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitThis"
},
"noImplicitUseStrict": {
"description": "Disable adding 'use strict' directives in emitted JavaScript files.",
"type": "boolean",
"default": true,
"markdownDescription": "Disable adding 'use strict' directives in emitted JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitUseStrict"
},
"noPropertyAccessFromIndexSignature": {
"description": "Enforces using indexed accessors for keys declared using an indexed type.",
"type": "boolean",
"default": false,
"markdownDescription": "Enforces using indexed accessors for keys declared using an indexed type.\n\nSee more: https://www.typescriptlang.org/tsconfig#noPropertyAccessFromIndexSignature"
},
"noStrictGenericChecks": {
"description": "Disable strict checking of generic signatures in function types.",
"noUncheckedIndexedAccess": {
"description": "Add `undefined` to a type when accessed using an index.",
"type": "boolean",
"default": false,
"markdownDescription": "Disable strict checking of generic signatures in function types.\n\nSee more: https://www.typescriptlang.org/tsconfig#noStrictGenericChecks"
"markdownDescription": "Add `undefined` to a type when accessed using an index.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedIndexedAccess"
},
"noUnusedLocals": {
"description": "Enable error reporting when a local variables aren't read.",
@ -172,12 +175,6 @@
"default": false,
"markdownDescription": "Raise an error when a function parameter isn't read\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedParameters"
},
"noUncheckedIndexedAccess": {
"description": "Add `undefined` to a type when accessed using an index.",
"type": "boolean",
"default": false,
"markdownDescription": "Add `undefined` to a type when accessed using an index.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedIndexedAccess"
},
"strict": {
"description": "Enable all strict type checking options.",
"type": "boolean",
@ -190,41 +187,49 @@
"default": true,
"markdownDescription": "Check that the arguments for `bind`, `call`, and `apply` methods match the original function.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictBindCallApply"
},
"strictBuiltinIteratorReturn": {
"description": "Built-in iterators are instantiated with a `TReturn` type of undefined instead of `any`.",
"type": "boolean",
"default": true,
"markdownDescription": "Built-in iterators are instantiated with a `TReturn` type of undefined instead of `any`.\n\nSee more: https://www.typescriptlang.org/tsconfig/#strictBuiltinIteratorReturn"
},
"strictFunctionTypes": {
"description": "When assigning functions, check to ensure parameters and the return values are subtype-compatible.",
"type": "boolean",
"default": true,
"markdownDescription": "When assigning functions, check to ensure parameters and the return values are subtype-compatible.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictFunctionTypes"
},
"strictPropertyInitialization": {
"description": "Check for class properties that are declared but not set in the constructor.",
"type": "boolean",
"default": true,
"markdownDescription": "Check for class properties that are declared but not set in the constructor.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictPropertyInitialization"
},
"strictNullChecks": {
"description": "When type checking, take into account `null` and `undefined`.",
"type": "boolean",
"default": true,
"markdownDescription": "When type checking, take into account `null` and `undefined`.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictNullChecks"
},
"suppressExcessPropertyErrors": {
"description": "Disable reporting of excess property errors during the creation of object literals.",
"strictPropertyInitialization": {
"description": "Check for class properties that are declared but not set in the constructor.",
"type": "boolean",
"default": false,
"markdownDescription": "Disable reporting of excess property errors during the creation of object literals.\n\nSee more: https://www.typescriptlang.org/tsconfig#suppressExcessPropertyErrors"
"default": true,
"markdownDescription": "Check for class properties that are declared but not set in the constructor.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictPropertyInitialization"
},
"suppressImplicitAnyIndexErrors": {
"description": "Suppress `noImplicitAny` errors when indexing objects that lack index signatures.",
"type": "boolean",
"default": false,
"markdownDescription": "Suppress `noImplicitAny` errors when indexing objects that lack index signatures.\n\nSee more: https://www.typescriptlang.org/tsconfig#suppressImplicitAnyIndexErrors"
"types": {
"description": "Specify type package names to be included without being referenced in a source file.",
"type": "array",
"items": {
"type": "string"
},
"markdownDescription": "Specify type package names to be included without being referenced in a source file.\n\nSee more: https://www.typescriptlang.org/tsconfig/#types"
},
"useUnknownInCatchVariables": {
"description": "Default catch clause variables as `unknown` instead of `any`.",
"type": "boolean",
"default": true,
"markdownDescription": "Default catch clause variables as `unknown` instead of `any`.\n\nSee more: https://www.typescriptlang.org/tsconfig#useUnknownInCatchVariables"
},
"verbatimModuleSyntax": {
"description": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.",
"type": "boolean",
"default": false,
"markdownDescription": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.\n\nSee more: https://www.typescriptlang.org/tsconfig/#verbatimModuleSyntax"
}
}
},
@ -404,8 +409,18 @@
}
},
"nodeModulesDir": {
"description": "Enables or disables the use of a local node_modules folder for npm packages. Alternatively, use the `--node-modules-dir` flag or override the config via `--node-modules-dir=false`. Requires Deno 1.34 or later.",
"type": "boolean"
"oneOf": [
{
"description": "Sets the node_modules management mode for npm packages. Alternatively, use the `--node-modules-dir=<MODE>` flag. Requires Deno 2.0-rc.1 or later.",
"default": "none",
"enum": ["auto", "manual", "none"]
},
{
"description": "Enables or disables the use of a local node_modules folder for npm packages. Alternatively, use the `--node-modules-dir` flag or override the config via `--node-modules-dir=false`. Requires Deno 1.34 or later.",
"type": "boolean",
"deprecated": false
}
]
},
"vendor": {
"description": "Enables or disables the use of a local vendor folder as a local cache for remote modules and node_modules folder for npm packages. Alternatively, use the `--vendor` flag or override the config via `--vendor=false`. Requires Deno 1.36.1 or later.",

View file

@ -427,13 +427,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
binary_name
)
}
ReleaseChannel::Stable => {
_ => {
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
}
_ => bail!(
"`deno compile` current doesn't support {} release channel",
crate::version::DENO_VERSION_INFO.release_channel.name()
),
};
let download_directory = self.deno_dir.dl_folder_path();

View file

@ -102,7 +102,7 @@ impl FileSystem for DenoCompileFileSystem {
&self,
path: &Path,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.mkdir_sync(path, recursive, mode)
@ -111,7 +111,7 @@ impl FileSystem for DenoCompileFileSystem {
&self,
path: PathBuf,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.mkdir_async(path, recursive, mode).await

View file

@ -6,6 +6,7 @@
#![allow(unused_imports)]
use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionError;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
@ -55,15 +56,16 @@ use crate::args::StorageKeyResolver;
use crate::cache::Caches;
use crate::cache::DenoDirProvider;
use crate::cache::NodeAnalysisCache;
use crate::cache::RealDenoCacheEnv;
use crate::http_util::HttpClientProvider;
use crate::node::CliCjsCodeAnalyzer;
use crate::npm::create_cli_npm_resolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::NpmCacheDir;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNodeResolver;
use crate::resolver::NpmModuleLoader;
use crate::util::progress_bar::ProgressBar;
@ -130,8 +132,6 @@ struct SharedModuleLoaderState {
#[derive(Clone)]
struct EmbeddedModuleLoader {
shared: Arc<SharedModuleLoaderState>,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
}
pub const MODULE_NOT_FOUND: &str = "Module not found";
@ -402,28 +402,23 @@ struct StandaloneModuleLoaderFactory {
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
fn create_for_main(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
_root_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter {
ModuleLoaderAndSourceMapGetter {
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
root_permissions,
dynamic_permissions,
}),
}
}
fn create_for_worker(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
_parent_permissions: PermissionsContainer,
_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter {
ModuleLoaderAndSourceMapGetter {
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
root_permissions,
dynamic_permissions,
}),
}
}
@ -471,6 +466,7 @@ pub async fn run(
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let root_node_modules_path = root_path.join("node_modules");
let npm_cache_dir = NpmCacheDir::new(
&RealDenoCacheEnv,
root_node_modules_path.clone(),
vec![npm_registry_url.clone()],
);
@ -535,8 +531,8 @@ pub async fn run(
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
as Arc<dyn deno_fs::FileSystem>;
let npm_resolver = create_cli_npm_resolver(
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: fs.clone(),
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()),
root_node_modules_dir,
}),
)
@ -580,8 +576,17 @@ pub async fn run(
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
let cache_db = Caches::new(deno_dir_provider.clone());
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
let cjs_esm_code_analyzer =
CliCjsCodeAnalyzer::new(node_analysis_cache, fs.clone());
let cli_node_resolver = Arc::new(CliNodeResolver::new(
cjs_resolutions.clone(),
fs.clone(),
node_resolver.clone(),
npm_resolver.clone(),
));
let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
fs.clone(),
cli_node_resolver.clone(),
);
let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer,
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
@ -637,12 +642,6 @@ pub async fn run(
metadata.workspace_resolver.pkg_json_resolution,
)
};
let cli_node_resolver = Arc::new(CliNodeResolver::new(
cjs_resolutions.clone(),
fs.clone(),
node_resolver.clone(),
npm_resolver.clone(),
));
let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState {
eszip: WorkspaceEszip {
@ -661,7 +660,8 @@ pub async fn run(
};
let permissions = {
let mut permissions = metadata.permissions.to_options();
let mut permissions =
metadata.permissions.to_options(/* cli_arg_urls */ &[]);
// if running with an npm vfs, grant read access to it
if let Some(vfs_root) = maybe_vfs_root {
match &mut permissions.allow_read {
@ -694,8 +694,6 @@ pub async fn run(
}
checker
});
let permission_desc_parser =
Arc::new(RuntimePermissionDescriptorParser::new(fs.clone()));
let worker_factory = CliMainWorkerFactory::new(
Arc::new(BlobStore::default()),
// Code cache is not supported for standalone binary yet.
@ -708,8 +706,8 @@ pub async fn run(
Box::new(module_loader_factory),
node_resolver,
npm_resolver,
permission_desc_parser,
root_cert_store_provider,
permissions,
StorageKeyResolver::empty(),
crate::args::DenoSubcommand::Run(Default::default()),
CliMainWorkerOptions {
@ -749,7 +747,7 @@ pub async fn run(
deno_core::JsRuntime::init_platform(None, true);
let mut worker = worker_factory
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
.create_main_worker(WorkerExecutionMode::Run, main_module)
.await?;
let exit_code = worker.run().await?;

View file

@ -442,7 +442,9 @@ pub async fn run_benchmarks(
}
let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container.check_specifiers(&specifiers).await?;
main_graph_container
.check_specifiers(&specifiers, cli_options.ext_flag().as_ref())
.await?;
if workspace_bench_options.no_run {
return Ok(());
@ -569,7 +571,7 @@ pub async fn run_benchmarks_with_watch(
factory
.main_module_graph_container()
.await?
.check_specifiers(&specifiers)
.check_specifiers(&specifiers, cli_options.ext_flag().as_ref())
.await?;
if workspace_bench_options.no_run {

View file

@ -15,7 +15,9 @@ use once_cell::sync::Lazy;
use regex::Regex;
use crate::args::check_warn_tsconfig;
use crate::args::CheckFlags;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::TsConfig;
use crate::args::TsConfigType;
use crate::args::TsTypeLib;
@ -24,13 +26,58 @@ use crate::cache::CacheDBHash;
use crate::cache::Caches;
use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::factory::CliFactory;
use crate::graph_util::BuildFastCheckGraphOptions;
use crate::graph_util::ModuleGraphBuilder;
use crate::npm::CliNpmResolver;
use crate::tsc;
use crate::tsc::Diagnostics;
use crate::util::extract;
use crate::util::path::to_percent_decoded_str;
pub async fn check(
flags: Arc<Flags>,
check_flags: CheckFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let main_graph_container = factory.main_module_graph_container().await?;
let specifiers =
main_graph_container.collect_specifiers(&check_flags.files)?;
if specifiers.is_empty() {
log::warn!("{} No matching files found.", colors::yellow("Warning"));
}
let specifiers_for_typecheck = if check_flags.doc || check_flags.doc_only {
let file_fetcher = factory.file_fetcher()?;
let root_permissions = factory.root_permissions_container()?;
let mut specifiers_for_typecheck = if check_flags.doc {
specifiers.clone()
} else {
vec![]
};
for s in specifiers {
let file = file_fetcher.fetch(&s, root_permissions).await?;
let snippet_files = extract::extract_snippet_files(file)?;
for snippet_file in snippet_files {
specifiers_for_typecheck.push(snippet_file.specifier.clone());
file_fetcher.insert_memory_files(snippet_file);
}
}
specifiers_for_typecheck
} else {
specifiers
};
main_graph_container
.check_specifiers(&specifiers_for_typecheck, None)
.await
}
/// Options for performing a check of a module graph. Note that the decision to
/// emit or not is determined by the `ts_config` settings.
pub struct CheckOptions {

View file

@ -135,7 +135,7 @@ pub async fn compile(
file,
eszip,
root_dir_url,
&module_specifier,
module_specifier,
&compile_flags,
cli_options,
)

View file

@ -452,6 +452,11 @@ fn filter_coverages(
let exclude: Vec<Regex> =
exclude.iter().map(|e| Regex::new(e).unwrap()).collect();
// Matches virtual file paths for doc testing
// e.g. file:///path/to/mod.ts$23-29.ts
let doc_test_re =
Regex::new(r"\$\d+-\d+\.(js|mjs|cjs|jsx|ts|mts|cts|tsx)$").unwrap();
coverages
.into_iter()
.filter(|e| {
@ -460,6 +465,7 @@ fn filter_coverages(
|| e.url.ends_with("$deno$test.js")
|| e.url.ends_with(".snap")
|| is_supported_test_path(Path::new(e.url.as_str()))
|| doc_test_re.is_match(e.url.as_str())
|| Url::parse(&e.url)
.ok()
.map(|url| npm_resolver.in_npm_package(&url))

View file

@ -7,7 +7,9 @@ use crate::args::Flags;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::graph_exit_lock_errors;
use crate::graph_util::graph_exit_integrity_errors;
use crate::graph_util::graph_walk_errors;
use crate::graph_util::GraphWalkErrorsOptions;
use crate::tsc::get_types_declaration_file_text;
use crate::util::fs::collect_specifiers;
use deno_ast::diagnostics::Diagnostic;
@ -107,7 +109,7 @@ pub async fn doc(
}
DocSourceFileFlag::Paths(ref source_files) => {
let module_graph_creator = factory.module_graph_creator().await?;
let maybe_lockfile = cli_options.maybe_lockfile();
let fs = factory.fs();
let module_specifiers = collect_specifiers(
FilePatterns {
@ -127,8 +129,18 @@ pub async fn doc(
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
.await?;
if maybe_lockfile.is_some() {
graph_exit_lock_errors(&graph);
graph_exit_integrity_errors(&graph);
let errors = graph_walk_errors(
&graph,
fs,
&module_specifiers,
GraphWalkErrorsOptions {
check_js: false,
kind: GraphKind::TypesOnly,
},
);
for error in errors {
log::warn!("{} {}", colors::yellow("Warning"), error);
}
let doc_parser = doc::DocParser::new(

View file

@ -33,6 +33,7 @@ use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::spawn_blocking;
use deno_core::url::Url;
use log::debug;
use log::info;
use log::warn;
@ -120,7 +121,13 @@ pub async fn format(
};
}
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
format_files(
caches,
cli_options,
&fmt_flags,
paths_with_options_batches,
)
.await?;
Ok(())
})
@ -133,7 +140,8 @@ pub async fn format(
let caches = factory.caches()?;
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
format_files(caches, cli_options, &fmt_flags, paths_with_options_batches)
.await?;
}
Ok(())
@ -172,6 +180,7 @@ fn resolve_paths_with_options_batches(
async fn format_files(
caches: &Arc<Caches>,
cli_options: &Arc<CliOptions>,
fmt_flags: &FmtFlags,
paths_with_options_batches: Vec<PathsWithOptions>,
) -> Result<(), AnyError> {
@ -199,6 +208,7 @@ async fn format_files(
fmt_options.options,
fmt_options.unstable,
incremental_cache.clone(),
cli_options.ext_flag().clone(),
)
.await?;
incremental_cache.wait_completion().await;
@ -211,11 +221,14 @@ fn collect_fmt_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
FileCollector::new(|e| is_supported_ext_fmt(e.path))
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
FileCollector::new(|e| {
is_supported_ext_fmt(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
})
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
}
/// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks
@ -253,6 +266,8 @@ fn format_markdown(
| "svelte"
| "vue"
| "astro"
| "vto"
| "njk"
| "yml"
| "yaml"
) {
@ -273,44 +288,24 @@ fn format_markdown(
dprint_plugin_json::format_text(&fake_filename, text, &json_config)
}
"css" | "scss" | "sass" | "less" => {
if unstable_options.css {
format_css(&fake_filename, text, fmt_options)
} else {
Ok(None)
}
format_css(&fake_filename, text, fmt_options)
}
"html" => {
if unstable_options.html {
format_html(&fake_filename, text, fmt_options)
} else {
Ok(None)
}
}
"svelte" | "vue" | "astro" => {
"html" => format_html(&fake_filename, text, fmt_options),
"svelte" | "vue" | "astro" | "vto" | "njk" => {
if unstable_options.component {
format_html(&fake_filename, text, fmt_options)
} else {
Ok(None)
}
}
"yml" | "yaml" => {
if unstable_options.yaml {
pretty_yaml::format_text(
text,
&get_resolved_yaml_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
} else {
Ok(None)
}
}
"yml" | "yaml" => format_yaml(text, fmt_options),
_ => {
let mut codeblock_config =
get_resolved_typescript_config(fmt_options);
codeblock_config.line_width = line_width;
dprint_plugin_typescript::format_text(
&fake_filename,
None,
text.to_string(),
&codeblock_config,
)
@ -340,13 +335,33 @@ pub fn format_css(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
malva::format_text(
let formatted_str = malva::format_text(
file_text,
malva::detect_syntax(file_path).unwrap_or(malva::Syntax::Css),
&get_resolved_malva_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
.map_err(AnyError::from)?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
fn format_yaml(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let formatted_str =
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
.map_err(AnyError::from)?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
pub fn format_html(
@ -354,7 +369,7 @@ pub fn format_html(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
markup_fmt::format_text(
let format_result = markup_fmt::format_text(
file_text,
markup_fmt::detect_language(file_path)
.unwrap_or(markup_fmt::Language::Html),
@ -405,6 +420,7 @@ pub fn format_html(
typescript_config.line_width = hints.print_width as u32;
dprint_plugin_typescript::format_text(
&path,
None,
text.to_string(),
&typescript_config,
)
@ -419,9 +435,30 @@ pub fn format_html(
}
},
)
.map(Some)
.map_err(|error| match error {
markup_fmt::FormatError::Syntax(error) => AnyError::from(error),
markup_fmt::FormatError::Syntax(error) => {
fn inner(
error: &markup_fmt::SyntaxError,
file_path: &Path,
) -> Option<String> {
let url = Url::from_file_path(file_path).ok()?;
let error_msg = format!(
"Syntax error ({}) at {}:{}:{}\n",
error.kind,
url.as_str(),
error.line,
error.column
);
Some(error_msg)
}
if let Some(error_msg) = inner(&error, file_path) {
AnyError::from(generic_error(error_msg))
} else {
AnyError::from(error)
}
}
markup_fmt::FormatError::External(errors) => {
let last = errors.len() - 1;
AnyError::msg(
@ -438,6 +475,14 @@ pub fn format_html(
.collect::<String>(),
)
}
});
let formatted_str = format_result?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
@ -447,8 +492,11 @@ pub fn format_file(
file_text: &str,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
ext: Option<String>,
) -> Result<Option<String>, AnyError> {
let ext = get_extension(file_path).unwrap_or_default();
let ext = ext
.or_else(|| get_extension(file_path))
.unwrap_or("ts".to_string());
match ext.as_str() {
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown" => {
@ -456,48 +504,28 @@ pub fn format_file(
}
"json" | "jsonc" => format_json(file_path, file_text, fmt_options),
"css" | "scss" | "sass" | "less" => {
if unstable_options.css {
format_css(file_path, file_text, fmt_options)
} else {
Ok(None)
}
format_css(file_path, file_text, fmt_options)
}
"html" => {
if unstable_options.html {
format_html(file_path, file_text, fmt_options)
} else {
Ok(None)
}
}
"svelte" | "vue" | "astro" => {
"html" => format_html(file_path, file_text, fmt_options),
"svelte" | "vue" | "astro" | "vto" | "njk" => {
if unstable_options.component {
format_html(file_path, file_text, fmt_options)
} else {
Ok(None)
}
}
"yml" | "yaml" => {
if unstable_options.yaml {
pretty_yaml::format_text(
file_text,
&get_resolved_yaml_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
} else {
Ok(None)
}
}
"yml" | "yaml" => format_yaml(file_text, fmt_options),
"ipynb" => dprint_plugin_jupyter::format_text(
file_text,
|file_path: &Path, file_text: String| {
format_file(file_path, &file_text, fmt_options, unstable_options)
format_file(file_path, &file_text, fmt_options, unstable_options, None)
},
),
_ => {
let config = get_resolved_typescript_config(fmt_options);
dprint_plugin_typescript::format_text(
file_path,
Some(&ext),
file_text.to_string(),
&config,
)
@ -523,6 +551,7 @@ trait Formatter {
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError>;
fn finish(&self) -> Result<(), AnyError>;
@ -542,6 +571,7 @@ impl Formatter for CheckFormatter {
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError> {
// prevent threads outputting at the same time
let output_lock = Arc::new(Mutex::new(0));
@ -563,6 +593,7 @@ impl Formatter for CheckFormatter {
&file_text,
&fmt_options,
&unstable_options,
ext.clone(),
) {
Ok(Some(formatted_text)) => {
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
@ -640,6 +671,7 @@ impl Formatter for RealFormatter {
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError> {
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
@ -659,7 +691,13 @@ impl Formatter for RealFormatter {
&file_path,
&file_contents.text,
|file_path, file_text| {
format_file(file_path, file_text, &fmt_options, &unstable_options)
format_file(
file_path,
file_text,
&fmt_options,
&unstable_options,
ext.clone(),
)
},
) {
Ok(Some(formatted_text)) => {
@ -785,6 +823,7 @@ fn format_stdin(
&source,
&fmt_options.options,
&fmt_options.unstable,
None,
)?;
if fmt_flags.check {
#[allow(clippy::print_stdout)]
@ -1142,6 +1181,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
| "svelte"
| "vue"
| "astro"
| "vto"
| "njk"
| "md"
| "mkd"
| "mkdn"
@ -1200,6 +1241,10 @@ mod test {
assert!(is_supported_ext_fmt(Path::new("foo.VUE")));
assert!(is_supported_ext_fmt(Path::new("foo.astro")));
assert!(is_supported_ext_fmt(Path::new("foo.AsTrO")));
assert!(is_supported_ext_fmt(Path::new("foo.vto")));
assert!(is_supported_ext_fmt(Path::new("foo.Vto")));
assert!(is_supported_ext_fmt(Path::new("foo.njk")));
assert!(is_supported_ext_fmt(Path::new("foo.NJk")));
assert!(is_supported_ext_fmt(Path::new("foo.yml")));
assert!(is_supported_ext_fmt(Path::new("foo.Yml")));
assert!(is_supported_ext_fmt(Path::new("foo.yaml")));
@ -1266,6 +1311,7 @@ mod test {
..Default::default()
},
&UnstableFmtOptions::default(),
None,
)
.unwrap()
.unwrap();

View file

@ -29,7 +29,7 @@ use crate::args::Flags;
use crate::args::InfoFlags;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::graph_exit_lock_errors;
use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliNpmResolver;
use crate::npm::ManagedCliNpmResolver;
use crate::util::checksum;
@ -75,14 +75,18 @@ pub async fn info(
// write out the lockfile if there is one
if let Some(lockfile) = &maybe_lockfile {
graph_exit_lock_errors(&graph);
graph_exit_integrity_errors(&graph);
lockfile.write_if_changed()?;
}
if info_flags.json {
let mut json_graph = serde_json::json!(graph);
if let Some(output) = json_graph.as_object_mut() {
output.insert("version".to_string(), JSON_SCHEMA_VERSION.into());
output.shift_insert(
0,
"version".to_string(),
JSON_SCHEMA_VERSION.into(),
);
}
add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref());
display::write_json_to_stdout(&json_graph)?;
@ -644,8 +648,21 @@ impl<'a> GraphDisplayContext<'a> {
ModuleError::InvalidTypeAssertion { .. } => {
self.build_error_msg(specifier, "(invalid import attribute)")
}
ModuleError::LoadingErr(_, _, _) => {
self.build_error_msg(specifier, "(loading error)")
ModuleError::LoadingErr(_, _, err) => {
use deno_graph::ModuleLoadError::*;
let message = match err {
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)",
Loader(err) => match deno_core::error::get_custom_error_class(err) {
Some("NotCapable") => "(not capable, requires --allow-import)",
_ => "(loading error)",
},
Jsr(_) => "(loading error)",
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
Npm(_) => "(npm loading error)",
TooManyRedirects => "(too many redirects error)",
};
self.build_error_msg(specifier, message.as_ref())
}
ModuleError::ParseErr(_, _) => {
self.build_error_msg(specifier, "(parsing error)")

View file

@ -94,9 +94,16 @@ impl CliLinter {
&self,
file_path: &Path,
source_code: String,
ext: Option<&str>,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
let specifier = specifier_from_file_path(file_path)?;
let media_type = MediaType::from_specifier(&specifier);
let media_type = if let Some(ext) = ext {
MediaType::from_str(&format!("placeholder.{ext}"))
} else if file_path.extension().is_none() {
MediaType::TypeScript
} else {
MediaType::from_specifier(&specifier)
};
if self.fix {
self.lint_file_and_fix(&specifier, media_type, source_code, file_path)

View file

@ -117,6 +117,7 @@ pub async fn lint(
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
lint_config.clone(),
paths_with_options.dir,
@ -155,7 +156,7 @@ pub async fn lint(
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = &lint_flags.ext {
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let r = lint_stdin(&file_path, lint_rules, deno_lint_config);
@ -179,6 +180,7 @@ pub async fn lint(
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
deno_lint_config.clone(),
paths_with_options.dir,
@ -264,6 +266,7 @@ impl WorkspaceLinter {
pub async fn lint_files(
&mut self,
cli_options: &Arc<CliOptions>,
lint_options: LintOptions,
lint_config: LintConfig,
member_dir: WorkspaceDirectory,
@ -348,6 +351,7 @@ impl WorkspaceLinter {
let reporter_lock = self.reporter_lock.clone();
let maybe_incremental_cache = maybe_incremental_cache.clone();
let linter = linter.clone();
let cli_options = cli_options.clone();
async move {
run_parallelized(paths, {
move |file_path| {
@ -361,7 +365,11 @@ impl WorkspaceLinter {
}
}
let r = linter.lint_file(&file_path, file_text);
let r = linter.lint_file(
&file_path,
file_text,
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r {
if let Some(incremental_cache) = &maybe_incremental_cache {
if file_diagnostics.is_empty() {
@ -421,11 +429,14 @@ fn collect_lint_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
FileCollector::new(|e| is_script_ext(e.path))
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
FileCollector::new(|e| {
is_script_ext(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
})
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
}
#[allow(clippy::print_stdout)]
@ -497,7 +508,7 @@ fn lint_stdin(
});
linter
.lint_file(file_path, deno_ast::strip_bom(source_code))
.lint_file(file_path, deno_ast::strip_bom(source_code), None)
.map_err(AnyError::from)
}

View file

@ -14,7 +14,7 @@ use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::rules::LintRule;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
mod no_sloppy_imports;
mod no_slow_types;
@ -144,13 +144,13 @@ impl ConfiguredRules {
}
pub struct LintRuleProvider {
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
}
impl LintRuleProvider {
pub fn new(
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
) -> Self {
Self {

View file

@ -16,24 +16,25 @@ use deno_lint::diagnostic::LintDiagnosticRange;
use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange;
use deno_lint::rules::LintRule;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use text_lines::LineAndColumnIndex;
use crate::graph_util::CliJsrUrlProvider;
use crate::resolver::SloppyImportsResolution;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
use super::ExtendedLintRule;
#[derive(Debug)]
pub struct NoSloppyImportsRule {
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
// None for making printing out the lint rules easy
workspace_resolver: Option<Arc<WorkspaceResolver>>,
}
impl NoSloppyImportsRule {
pub fn new(
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
) -> Self {
NoSloppyImportsRule {
@ -172,7 +173,7 @@ impl LintRule for NoSloppyImportsRule {
#[derive(Debug)]
struct SloppyImportCaptureResolver<'a> {
workspace_resolver: &'a WorkspaceResolver,
sloppy_imports_resolver: &'a SloppyImportsResolver,
sloppy_imports_resolver: &'a CliSloppyImportsResolver,
captures: RefCell<HashMap<Range, SloppyImportsResolution>>,
}
@ -194,7 +195,13 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
}
| deno_config::workspace::MappedResolution::ImportMap {
specifier, ..
} => match self.sloppy_imports_resolver.resolve(&specifier, mode) {
} => match self.sloppy_imports_resolver.resolve(
&specifier,
match mode {
ResolutionMode::Execution => SloppyImportsResolutionMode::Execution,
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
},
) {
Some(res) => {
self
.captures

View file

@ -3,7 +3,6 @@
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::diagnostics::DiagnosticLevel;
@ -21,6 +20,7 @@ use deno_ast::SourceRanged;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_graph::FastCheckDiagnostic;
use deno_semver::Version;
@ -36,7 +36,7 @@ impl PublishDiagnosticsCollector {
pub fn print_and_error(&self) -> Result<(), AnyError> {
let mut errors = 0;
let mut has_slow_types_errors = false;
let mut diagnostics = self.diagnostics.lock().unwrap().take();
let mut diagnostics = self.diagnostics.lock().take();
diagnostics.sort_by_cached_key(|d| d.sorting_key());
@ -75,8 +75,16 @@ impl PublishDiagnosticsCollector {
}
}
pub fn has_error(&self) -> bool {
self
.diagnostics
.lock()
.iter()
.any(|d| matches!(d.level(), DiagnosticLevel::Error))
}
pub fn push(&self, diagnostic: PublishDiagnostic) {
self.diagnostics.lock().unwrap().push(diagnostic);
self.diagnostics.lock().push(diagnostic);
}
}

View file

@ -128,7 +128,7 @@ impl GraphDiagnosticsCollector {
follow_dynamic: true,
// search the entire graph and not just the fast check subset
prefer_fast_check_graph: false,
follow_type_only: true,
kind: deno_graph::GraphKind::All,
};
let mut iter = graph.walk(graph.roots.iter(), options);
while let Some((specifier, entry)) = iter.next() {

View file

@ -43,7 +43,8 @@ use crate::cache::ParsedSourceCache;
use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check::CheckOptions;
use crate::tools::lint::collect_no_slow_type_diagnostics;
use crate::tools::registry::diagnostics::PublishDiagnostic;
@ -108,7 +109,9 @@ pub async fn publish(
}
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
if cli_options.unstable_sloppy_imports() {
Some(SloppyImportsResolver::new(cli_factory.fs().clone()))
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
cli_factory.fs().clone(),
)))
} else {
None
},
@ -341,13 +344,11 @@ impl PublishPreparer {
bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE")
} else {
log::info!("Checking for slow types in the public API...");
let mut any_pkg_had_diagnostics = false;
for package in package_configs {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
collect_no_slow_type_diagnostics(&graph, &export_urls);
if !diagnostics.is_empty() {
any_pkg_had_diagnostics = true;
for diagnostic in diagnostics {
diagnostics_collector
.push(PublishDiagnostic::FastCheck(diagnostic));
@ -355,7 +356,9 @@ impl PublishPreparer {
}
}
if any_pkg_had_diagnostics {
// skip type checking the slow type graph if there are any errors because
// errors like remote modules existing will cause type checking to crash
if diagnostics_collector.has_error() {
Ok(Arc::new(graph))
} else {
// fast check passed, type check the output as a temporary measure

View file

@ -5,6 +5,7 @@ mod cache_deps;
pub use cache_deps::cache_top_level_deps;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::VersionReq;
use std::borrow::Cow;
use std::path::PathBuf;
@ -437,18 +438,6 @@ pub async fn add(
}
let http_client = cli_factory.http_client_provider();
let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
let req = AddPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
package_reqs.push(req);
}
let deps_http_cache = cli_factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
deps_http_cache.clone(),
@ -463,6 +452,37 @@ pub async fn add(
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher));
let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
let req = AddPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
match req {
Ok(add_req) => package_reqs.push(add_req),
Err(package_req) => {
if jsr_resolver.req_to_nv(&package_req).await.is_some() {
bail!(
"{entry_text} is missing a prefix. Did you mean `{}`?",
crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}"))
)
} else if npm_resolver.req_to_nv(&package_req).await.is_some() {
bail!(
"{entry_text} is missing a prefix. Did you mean `{}`?",
crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))
)
} else {
bail!(
"{} was not found in either jsr or npm.",
crate::colors::red(entry_text)
);
}
}
}
}
let package_futures = package_reqs
.into_iter()
.map({
@ -636,7 +656,7 @@ struct AddPackageReq {
}
impl AddPackageReq {
pub fn parse(entry_text: &str) -> Result<Self, AnyError> {
pub fn parse(entry_text: &str) -> Result<Result<Self, PackageReq>, AnyError> {
enum Prefix {
Jsr,
Npm,
@ -675,13 +695,13 @@ impl AddPackageReq {
None => match parse_alias(entry_text) {
Some((alias, text)) => {
let (maybe_prefix, entry_text) = parse_prefix(text);
(
maybe_prefix.unwrap_or(Prefix::Jsr),
Some(alias.to_string()),
entry_text,
)
if maybe_prefix.is_none() {
return Ok(Err(PackageReq::from_str(entry_text)?));
}
(maybe_prefix.unwrap(), Some(alias.to_string()), entry_text)
}
None => (Prefix::Jsr, None, entry_text),
None => return Ok(Err(PackageReq::from_str(entry_text)?)),
},
};
@ -690,19 +710,30 @@ impl AddPackageReq {
let req_ref =
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
let package_req = req_ref.into_inner().req;
Ok(AddPackageReq {
Ok(Ok(AddPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
value: AddPackageReqValue::Jsr(package_req),
})
}))
}
Prefix::Npm => {
let req_ref =
NpmPackageReqReference::from_str(&format!("npm:{}", entry_text))?;
let package_req = req_ref.into_inner().req;
Ok(AddPackageReq {
let mut package_req = req_ref.into_inner().req;
// deno_semver defaults to a version req of `*` if none is specified
// we want to default to `latest` instead
if package_req.version_req == *deno_semver::WILDCARD_VERSION_REQ
&& package_req.version_req.version_text() == "*"
&& !entry_text.contains("@*")
{
package_req.version_req = VersionReq::from_raw_text_and_inner(
"latest".into(),
deno_semver::RangeSetOrTag::Tag("latest".into()),
);
}
Ok(Ok(AddPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
value: AddPackageReqValue::Npm(package_req),
})
}))
}
}
}
@ -847,42 +878,44 @@ fn update_config_file_content<
#[cfg(test)]
mod test {
use deno_semver::VersionReq;
use super::*;
#[test]
fn test_parse_add_package_req() {
assert_eq!(
AddPackageReq::parse("jsr:foo").unwrap(),
AddPackageReq::parse("jsr:foo").unwrap().unwrap(),
AddPackageReq {
alias: "foo".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("alias@jsr:foo").unwrap(),
AddPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
AddPackageReq {
alias: "alias".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("@alias/pkg@npm:foo").unwrap(),
AddPackageReq::parse("@alias/pkg@npm:foo").unwrap().unwrap(),
AddPackageReq {
alias: "@alias/pkg".to_string(),
value: AddPackageReqValue::Npm(PackageReq::from_str("foo").unwrap())
value: AddPackageReqValue::Npm(
PackageReq::from_str("foo@latest").unwrap()
)
}
);
assert_eq!(
AddPackageReq::parse("@alias/pkg@jsr:foo").unwrap(),
AddPackageReq::parse("@alias/pkg@jsr:foo").unwrap().unwrap(),
AddPackageReq {
alias: "@alias/pkg".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("alias@jsr:foo@^1.5.0").unwrap(),
AddPackageReq::parse("alias@jsr:foo@^1.5.0")
.unwrap()
.unwrap(),
AddPackageReq {
alias: "alias".to_string(),
value: AddPackageReqValue::Jsr(
@ -891,15 +924,11 @@ mod test {
}
);
assert_eq!(
AddPackageReq::parse("@scope/pkg@tag").unwrap(),
AddPackageReq {
alias: "@scope/pkg".to_string(),
value: AddPackageReqValue::Jsr(PackageReq {
name: "@scope/pkg".to_string(),
// this is a tag
version_req: VersionReq::parse_from_specifier("tag").unwrap(),
}),
}
AddPackageReq::parse("@scope/pkg@tag")
.unwrap()
.unwrap_err()
.to_string(),
"@scope/pkg@tag",
);
}
}

View file

@ -16,6 +16,7 @@ pub async fn cache_top_level_deps(
) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?;
let cli_options = factory.cli_options()?;
let root_permissions = factory.root_permissions_container()?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
if !npm_resolver.ensure_top_level_package_json_install().await? {
if let Some(lockfile) = cli_options.maybe_lockfile() {
@ -106,7 +107,8 @@ pub async fn cache_top_level_deps(
&roots,
false,
deno_config::deno_json::TsTypeLib::DenoWorker,
crate::file_fetcher::FetchPermissionsOption::AllowAll,
root_permissions.clone(),
None,
)
.await?;
}

View file

@ -12,9 +12,10 @@ use deno_graph::DynamicTemplatePart;
use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_node::is_builtin_node_module;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::CliSloppyImportsResolver;
#[derive(Debug, Clone)]
pub enum SpecifierUnfurlerDiagnostic {
@ -42,14 +43,14 @@ impl SpecifierUnfurlerDiagnostic {
}
pub struct SpecifierUnfurler {
sloppy_imports_resolver: Option<SloppyImportsResolver>,
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
workspace_resolver: WorkspaceResolver,
bare_node_builtins: bool,
}
impl SpecifierUnfurler {
pub fn new(
sloppy_imports_resolver: Option<SloppyImportsResolver>,
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
workspace_resolver: WorkspaceResolver,
bare_node_builtins: bool,
) -> Self {
@ -179,7 +180,7 @@ impl SpecifierUnfurler {
let resolved =
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
sloppy_imports_resolver
.resolve(&resolved, deno_graph::source::ResolutionMode::Execution)
.resolve(&resolved, SloppyImportsResolutionMode::Execution)
.map(|res| res.into_specifier())
.unwrap_or(resolved)
} else {
@ -388,6 +389,8 @@ fn to_range(
mod tests {
use std::sync::Arc;
use crate::resolver::SloppyImportsCachedFs;
use super::*;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
@ -455,7 +458,9 @@ mod tests {
);
let fs = Arc::new(RealFs);
let unfurler = SpecifierUnfurler::new(
Some(SloppyImportsResolver::new(fs)),
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
fs,
))),
workspace_resolver,
true,
);

View file

@ -162,7 +162,7 @@ pub async fn run(
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
let permissions = factory.create_permissions_container()?;
let permissions = factory.root_permissions_container()?;
let npm_resolver = factory.npm_resolver().await?.clone();
let resolver = factory.resolver().await?.clone();
let file_fetcher = factory.file_fetcher()?;
@ -177,7 +177,7 @@ pub async fn run(
.create_custom_worker(
WorkerExecutionMode::Repl,
main_module.clone(),
permissions,
permissions.clone(),
vec![crate::ops::testing::deno_test::init_ops(test_event_sender)],
Default::default(),
)
@ -189,7 +189,7 @@ pub async fn run(
npm_resolver,
resolver,
worker,
main_module,
main_module.clone(),
test_event_receiver,
)
.await?;

View file

@ -60,10 +60,9 @@ pub async fn run_script(
maybe_npm_install(&factory).await?;
let permissions = factory.create_permissions_container()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
let mut worker = worker_factory
.create_main_worker(mode, main_module, permissions)
.create_main_worker(mode, main_module.clone())
.await?;
let exit_code = worker.run().await?;
@ -79,7 +78,6 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
let file_fetcher = factory.file_fetcher()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
let permissions = factory.create_permissions_container()?;
let mut source = Vec::new();
std::io::stdin().read_to_end(&mut source)?;
// Save a fake file into file fetcher cache
@ -91,7 +89,7 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
});
let mut worker = worker_factory
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
.create_main_worker(WorkerExecutionMode::Run, main_module.clone())
.await?;
let exit_code = worker.run().await?;
Ok(exit_code)
@ -125,11 +123,10 @@ async fn run_with_watch(
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
let permissions = factory.create_permissions_container()?;
let mut worker = factory
.create_cli_main_worker_factory()
.await?
.create_main_worker(mode, main_module, permissions)
.create_main_worker(mode, main_module.clone())
.await?;
if watch_flags.hmr {
@ -173,10 +170,9 @@ pub async fn eval_command(
source: source_code.into_bytes().into(),
});
let permissions = factory.create_permissions_container()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
let mut worker = worker_factory
.create_main_worker(WorkerExecutionMode::Eval, main_module, permissions)
.create_main_worker(WorkerExecutionMode::Eval, main_module.clone())
.await?;
let exit_code = worker.run().await?;
Ok(exit_code)

View file

@ -5,7 +5,6 @@ use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::futures::TryFutureExt;
use deno_core::ModuleSpecifier;
use deno_runtime::deno_permissions::PermissionsContainer;
use super::run::check_permission_before_script;
use super::run::maybe_npm_install;
@ -44,13 +43,11 @@ pub async fn serve(
maybe_npm_install(&factory).await?;
let permissions = factory.create_permissions_container()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
do_serve(
worker_factory,
main_module,
permissions,
main_module.clone(),
serve_flags.worker_count,
false,
)
@ -60,7 +57,6 @@ pub async fn serve(
async fn do_serve(
worker_factory: CliMainWorkerFactory,
main_module: ModuleSpecifier,
permissions: PermissionsContainer,
worker_count: Option<usize>,
hmr: bool,
) -> Result<i32, AnyError> {
@ -71,7 +67,6 @@ async fn do_serve(
worker_count,
},
main_module.clone(),
permissions.clone(),
)
.await?;
let worker_count = match worker_count {
@ -87,15 +82,13 @@ async fn do_serve(
for i in 0..extra_workers {
let worker_factory = worker_factory.clone();
let main_module = main_module.clone();
let permissions = permissions.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
channels.push(rx);
std::thread::Builder::new()
.name(format!("serve-worker-{i}"))
.spawn(move || {
deno_runtime::tokio_util::create_and_run_current_thread(async move {
let result =
run_worker(i, worker_factory, main_module, permissions, hmr).await;
let result = run_worker(i, worker_factory, main_module, hmr).await;
let _ = tx.send(result);
});
})?;
@ -124,7 +117,6 @@ async fn run_worker(
worker_count: usize,
worker_factory: CliMainWorkerFactory,
main_module: ModuleSpecifier,
permissions: PermissionsContainer,
hmr: bool,
) -> Result<i32, AnyError> {
let mut worker = worker_factory
@ -134,7 +126,6 @@ async fn run_worker(
worker_count: Some(worker_count),
},
main_module,
permissions,
)
.await?;
if hmr {
@ -171,11 +162,9 @@ async fn serve_with_watch(
maybe_npm_install(&factory).await?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
let permissions = factory.create_permissions_container()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
do_serve(worker_factory, main_module, permissions, worker_count, hmr)
do_serve(worker_factory, main_module.clone(), worker_count, hmr)
.await?;
Ok(())

View file

@ -16,7 +16,7 @@ use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::normalize_path;
use deno_path_util::normalize_path;
use deno_task_shell::ShellCommand;
use crate::args::CliOptions;
@ -36,19 +36,7 @@ pub async fn execute_script(
let cli_options = factory.cli_options()?;
let start_dir = &cli_options.start_dir;
if !start_dir.has_deno_or_pkg_json() {
if task_flags.is_run {
bail!(
r#"deno run couldn't find deno.json(c).
If you meant to run a script, specify it, e.g., `deno run ./script.ts`.
To run a task, ensure the config file exists.
Examples:
- Script: `deno run ./script.ts`
- Task: `deno run dev`
See https://docs.deno.com/go/config"#
)
} else {
bail!("deno task couldn't find deno.json(c). See https://docs.deno.com/go/config")
}
bail!("deno task couldn't find deno.json(c). See https://docs.deno.com/go/config")
}
let force_use_pkg_json =
std::env::var_os(crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
@ -202,9 +190,7 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
custom_commands,
init_cwd: opts.cli_options.initial_cwd(),
argv: cli_options.argv(),
root_node_modules_dir: npm_resolver
.root_node_modules_path()
.map(|p| p.as_path()),
root_node_modules_dir: npm_resolver.root_node_modules_path(),
})
.await
}

View file

@ -9,21 +9,18 @@ use crate::display;
use crate::factory::CliFactory;
use crate::file_fetcher::File;
use crate::file_fetcher::FileFetcher;
use crate::graph_container::MainModuleGraphContainer;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops;
use crate::util::extract::extract_doc_tests;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
use crate::util::path::get_extension;
use crate::util::path::is_script_ext;
use crate::util::path::mapped_specifier_for_tsc;
use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CoverageCollector;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned;
use deno_config::glob::FilePatterns;
use deno_config::glob::WalkEntry;
use deno_core::anyhow;
@ -151,6 +148,20 @@ pub enum TestMode {
Both,
}
impl TestMode {
/// Returns `true` if the test mode indicates that code snippet extraction is
/// needed.
fn needs_test_extraction(&self) -> bool {
matches!(self, Self::Documentation | Self::Both)
}
/// Returns `true` if the test mode indicates that the test should be
/// type-checked and run.
fn needs_test_run(&self) -> bool {
matches!(self, Self::Executable | Self::Both)
}
}
#[derive(Clone, Debug, Default)]
pub struct TestFilter {
pub substring: Option<String>,
@ -1174,233 +1185,6 @@ async fn wait_for_activity_to_stabilize(
})
}
fn extract_files_from_regex_blocks(
specifier: &ModuleSpecifier,
source: &str,
media_type: MediaType,
file_line_index: usize,
blocks_regex: &Regex,
lines_regex: &Regex,
) -> Result<Vec<File>, AnyError> {
let files = blocks_regex
.captures_iter(source)
.filter_map(|block| {
block.get(1)?;
let maybe_attributes: Option<Vec<_>> = block
.get(1)
.map(|attributes| attributes.as_str().split(' ').collect());
let file_media_type = if let Some(attributes) = maybe_attributes {
if attributes.contains(&"ignore") {
return None;
}
match attributes.first() {
Some(&"js") => MediaType::JavaScript,
Some(&"javascript") => MediaType::JavaScript,
Some(&"mjs") => MediaType::Mjs,
Some(&"cjs") => MediaType::Cjs,
Some(&"jsx") => MediaType::Jsx,
Some(&"ts") => MediaType::TypeScript,
Some(&"typescript") => MediaType::TypeScript,
Some(&"mts") => MediaType::Mts,
Some(&"cts") => MediaType::Cts,
Some(&"tsx") => MediaType::Tsx,
_ => MediaType::Unknown,
}
} else {
media_type
};
if file_media_type == MediaType::Unknown {
return None;
}
let line_offset = source[0..block.get(0).unwrap().start()]
.chars()
.filter(|c| *c == '\n')
.count();
let line_count = block.get(0).unwrap().as_str().split('\n').count();
let body = block.get(2).unwrap();
let text = body.as_str();
// TODO(caspervonb) generate an inline source map
let mut file_source = String::new();
for line in lines_regex.captures_iter(text) {
let text = line.get(1).unwrap();
writeln!(file_source, "{}", text.as_str()).unwrap();
}
let file_specifier = ModuleSpecifier::parse(&format!(
"{}${}-{}",
specifier,
file_line_index + line_offset + 1,
file_line_index + line_offset + line_count + 1,
))
.unwrap();
let file_specifier =
mapped_specifier_for_tsc(&file_specifier, file_media_type)
.map(|s| ModuleSpecifier::parse(&s).unwrap())
.unwrap_or(file_specifier);
Some(File {
specifier: file_specifier,
maybe_headers: None,
source: file_source.into_bytes().into(),
})
})
.collect();
Ok(files)
}
fn extract_files_from_source_comments(
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<Vec<File>, AnyError> {
let parsed_source = deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})?;
let comments = parsed_source.comments().get_vec();
let blocks_regex = lazy_regex::regex!(r"```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex = lazy_regex::regex!(r"(?:\* ?)(?:\# ?)?(.*)");
let files = comments
.iter()
.filter(|comment| {
if comment.kind != CommentKind::Block || !comment.text.starts_with('*') {
return false;
}
true
})
.flat_map(|comment| {
extract_files_from_regex_blocks(
specifier,
&comment.text,
media_type,
parsed_source.text_info_lazy().line_index(comment.start()),
blocks_regex,
lines_regex,
)
})
.flatten()
.collect();
Ok(files)
}
fn extract_files_from_fenced_blocks(
specifier: &ModuleSpecifier,
source: &str,
media_type: MediaType,
) -> Result<Vec<File>, AnyError> {
// The pattern matches code blocks as well as anything in HTML comment syntax,
// but it stores the latter without any capturing groups. This way, a simple
// check can be done to see if a block is inside a comment (and skip typechecking)
// or not by checking for the presence of capturing groups in the matches.
let blocks_regex =
lazy_regex::regex!(r"(?s)<!--.*?-->|```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex = lazy_regex::regex!(r"(?:\# ?)?(.*)");
extract_files_from_regex_blocks(
specifier,
source,
media_type,
/* file line index */ 0,
blocks_regex,
lines_regex,
)
}
async fn fetch_inline_files(
file_fetcher: &FileFetcher,
specifiers: Vec<ModuleSpecifier>,
) -> Result<Vec<File>, AnyError> {
let mut files = Vec::new();
for specifier in specifiers {
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await?
.into_text_decoded()?;
let inline_files = if file.media_type == MediaType::Unknown {
extract_files_from_fenced_blocks(
&file.specifier,
&file.source,
file.media_type,
)
} else {
extract_files_from_source_comments(
&file.specifier,
file.source,
file.media_type,
)
};
files.extend(inline_files?);
}
Ok(files)
}
/// Type check a collection of module and document specifiers.
pub async fn check_specifiers(
file_fetcher: &FileFetcher,
main_graph_container: &Arc<MainModuleGraphContainer>,
specifiers: Vec<(ModuleSpecifier, TestMode)>,
) -> Result<(), AnyError> {
let inline_files = fetch_inline_files(
file_fetcher,
specifiers
.iter()
.filter_map(|(specifier, mode)| {
if *mode != TestMode::Executable {
Some(specifier.clone())
} else {
None
}
})
.collect(),
)
.await?;
let mut module_specifiers = specifiers
.into_iter()
.filter_map(|(specifier, mode)| {
if mode != TestMode::Documentation {
Some(specifier)
} else {
None
}
})
.collect::<Vec<_>>();
if !inline_files.is_empty() {
module_specifiers
.extend(inline_files.iter().map(|file| file.specifier.clone()));
for file in inline_files {
file_fetcher.insert_memory_files(file);
}
}
main_graph_container
.check_specifiers(&module_specifiers)
.await?;
Ok(())
}
static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false);
/// Test a collection of specifiers with test modes concurrently.
@ -1788,14 +1572,22 @@ pub async fn run_tests(
return Err(generic_error("No test modules found"));
}
let doc_tests = get_doc_tests(&specifiers_with_mode, file_fetcher).await?;
let specifiers_for_typecheck_and_test =
get_target_specifiers(specifiers_with_mode, &doc_tests);
for doc_test in doc_tests {
file_fetcher.insert_memory_files(doc_test);
}
let main_graph_container = factory.main_module_graph_container().await?;
check_specifiers(
file_fetcher,
main_graph_container,
specifiers_with_mode.clone(),
)
.await?;
// Typecheck
main_graph_container
.check_specifiers(
&specifiers_for_typecheck_and_test,
cli_options.ext_flag().as_ref(),
)
.await?;
if workspace_test_options.no_run {
return Ok(());
@ -1804,17 +1596,12 @@ pub async fn run_tests(
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
// Run tests
test_specifiers(
worker_factory,
&permissions,
permission_desc_parser,
specifiers_with_mode
.into_iter()
.filter_map(|(s, m)| match m {
TestMode::Documentation => None,
_ => Some(s),
})
.collect(),
specifiers_for_typecheck_and_test,
TestSpecifiersOptions {
cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err(
|_| {
@ -1949,8 +1736,6 @@ pub async fn run_tests_with_watch(
test_modules.clone()
};
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let specifiers_with_mode = fetch_specifiers_with_test_mode(
&cli_options,
file_fetcher,
@ -1962,30 +1747,37 @@ pub async fn run_tests_with_watch(
.filter(|(specifier, _)| test_modules_to_reload.contains(specifier))
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
let doc_tests =
get_doc_tests(&specifiers_with_mode, file_fetcher).await?;
let specifiers_for_typecheck_and_test =
get_target_specifiers(specifiers_with_mode, &doc_tests);
for doc_test in doc_tests {
file_fetcher.insert_memory_files(doc_test);
}
let main_graph_container =
factory.main_module_graph_container().await?;
check_specifiers(
file_fetcher,
main_graph_container,
specifiers_with_mode.clone(),
)
.await?;
// Typecheck
main_graph_container
.check_specifiers(
&specifiers_for_typecheck_and_test,
cli_options.ext_flag().as_ref(),
)
.await?;
if workspace_test_options.no_run {
return Ok(());
}
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
test_specifiers(
worker_factory,
&permissions,
permission_desc_parser,
specifiers_with_mode
.into_iter()
.filter_map(|(s, m)| match m {
TestMode::Documentation => None,
_ => Some(s),
})
.collect(),
specifiers_for_typecheck_and_test,
TestSpecifiersOptions {
cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err(
|_| {
@ -2020,6 +1812,38 @@ pub async fn run_tests_with_watch(
Ok(())
}
/// Extracts doc tests from files specified by the given specifiers.
async fn get_doc_tests(
specifiers_with_mode: &[(Url, TestMode)],
file_fetcher: &FileFetcher,
) -> Result<Vec<File>, AnyError> {
let specifiers_needing_extraction = specifiers_with_mode
.iter()
.filter(|(_, mode)| mode.needs_test_extraction())
.map(|(s, _)| s);
let mut doc_tests = Vec::new();
for s in specifiers_needing_extraction {
let file = file_fetcher.fetch_bypass_permissions(s).await?;
doc_tests.extend(extract_doc_tests(file)?);
}
Ok(doc_tests)
}
/// Get a list of specifiers that we need to perform typecheck and run tests on.
/// The result includes "pseudo specifiers" for doc tests.
fn get_target_specifiers(
specifiers_with_mode: Vec<(Url, TestMode)>,
doc_tests: &[File],
) -> Vec<Url> {
specifiers_with_mode
.into_iter()
.filter_map(|(s, mode)| mode.needs_test_run().then_some(s))
.chain(doc_tests.iter().map(|d| d.specifier.clone()))
.collect()
}
/// Tracks failures for the `--fail-fast` argument in
/// order to tell when to stop running tests.
#[derive(Clone, Default)]

View file

@ -516,6 +516,7 @@ delete Object.prototype.__proto__;
/** @typedef {{
* ls: ts.LanguageService & { [k:string]: any },
* compilerOptions: ts.CompilerOptions,
* forceEnabledVerbatimModuleSyntax: boolean,
* }} LanguageServiceEntry */
/** @type {{ unscoped: LanguageServiceEntry, byScope: Map<string, LanguageServiceEntry> }} */
const languageServiceEntries = {
@ -1025,7 +1026,7 @@ delete Object.prototype.__proto__;
: ts.sortAndDeduplicateDiagnostics(
checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(),
)),
].filter(filterMapDiagnostic);
].filter(filterMapDiagnostic.bind(null, false));
// emit the tsbuildinfo file
// @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871)
@ -1040,11 +1041,28 @@ delete Object.prototype.__proto__;
debug("<<< exec stop");
}
/** @param {ts.Diagnostic} diagnostic */
function filterMapDiagnostic(diagnostic) {
/**
* @param {boolean} isLsp
* @param {ts.Diagnostic} diagnostic
*/
function filterMapDiagnostic(isLsp, diagnostic) {
if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) {
return false;
}
if (isLsp) {
// TS1484: `...` is a type and must be imported using a type-only import when 'verbatimModuleSyntax' is enabled.
// We force-enable `verbatimModuleSyntax` in the LSP so the `type`
// modifier is used when auto-importing types. But we don't want this
// diagnostic unless it was explicitly enabled by the user.
if (diagnostic.code == 1484) {
const entry = (lastRequestScope
? languageServiceEntries.byScope.get(lastRequestScope)
: null) ?? languageServiceEntries.unscoped;
if (entry.forceEnabledVerbatimModuleSyntax) {
return false;
}
}
}
// make the diagnostic for using an `export =` in an es module a warning
if (diagnostic.code === 1203) {
diagnostic.category = ts.DiagnosticCategory.Warning;
@ -1136,14 +1154,17 @@ delete Object.prototype.__proto__;
"moduleResolution": "NodeNext",
"moduleDetection": "force",
"noEmit": true,
"noImplicitOverride": true,
"resolveJsonModule": true,
"strict": true,
"target": "esnext",
"useDefineForClassFields": true,
"verbatimModuleSyntax": true,
"jsx": "react",
"jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment",
}),
forceEnabledVerbatimModuleSyntax: true,
};
setLogDebug(enableDebugLogging, "TSLS");
debug("serverInit()");
@ -1209,8 +1230,17 @@ delete Object.prototype.__proto__;
const ls = oldEntry
? oldEntry.ls
: ts.createLanguageService(host, documentRegistry);
let forceEnabledVerbatimModuleSyntax = false;
if (!config["verbatimModuleSyntax"]) {
config["verbatimModuleSyntax"] = true;
forceEnabledVerbatimModuleSyntax = true;
}
const compilerOptions = lspTsConfigToCompilerOptions(config);
newByScope.set(scope, { ls, compilerOptions });
newByScope.set(scope, {
ls,
compilerOptions,
forceEnabledVerbatimModuleSyntax,
});
languageServiceEntries.byScope.delete(scope);
}
for (const oldEntry of languageServiceEntries.byScope.values()) {
@ -1275,7 +1305,7 @@ delete Object.prototype.__proto__;
...ls.getSemanticDiagnostics(specifier),
...ls.getSuggestionDiagnostics(specifier),
...ls.getSyntacticDiagnostics(specifier),
].filter(filterMapDiagnostic));
].filter(filterMapDiagnostic.bind(null, true)));
}
return respond(id, diagnosticMap);
} catch (e) {

View file

@ -140,7 +140,9 @@ impl Diagnostic {
pub fn include_when_remote(&self) -> bool {
/// TS6133: value is declared but its value is never read (noUnusedParameters and noUnusedLocals)
const TS6133: u64 = 6133;
self.code != TS6133
/// TS4114: This member must have an 'override' modifier because it overrides a member in the base class 'X'.
const TS4114: u64 = 4114;
!matches!(self.code, TS6133 | TS4114)
}
fn fmt_category_and_code(&self, f: &mut fmt::Formatter) -> fmt::Result {

View file

@ -10,7 +10,7 @@
*
* @category Platform
*/
declare interface ImportMeta {
interface ImportMeta {
/** A string representation of the fully qualified module URL. When the
* module is loaded locally, the value will be a file URL (e.g.
* `file:///path/module.ts`).
@ -89,7 +89,7 @@ declare interface ImportMeta {
*
* @category Performance
*/
declare interface Performance {
interface Performance {
/** Stores a timestamp with the associated name (a "mark"). */
mark(markName: string, options?: PerformanceMarkOptions): PerformanceMark;
@ -109,7 +109,7 @@ declare interface Performance {
*
* @category Performance
*/
declare interface PerformanceMarkOptions {
interface PerformanceMarkOptions {
/** Metadata to be included in the mark. */
// deno-lint-ignore no-explicit-any
detail?: any;
@ -126,7 +126,7 @@ declare interface PerformanceMarkOptions {
*
* @category Performance
*/
declare interface PerformanceMeasureOptions {
interface PerformanceMeasureOptions {
/** Metadata to be included in the measure. */
// deno-lint-ignore no-explicit-any
detail?: any;
@ -317,6 +317,7 @@ declare namespace Deno {
*
* @category Errors */
export class NotADirectory extends Error {}
/**
* Raised when trying to perform an operation while the relevant Deno
* permission (like `--allow-read`) has not been granted.
@ -326,6 +327,8 @@ declare namespace Deno {
*
* @category Errors */
export class NotCapable extends Error {}
export {}; // only export exports
}
/** The current process ID of this instance of the Deno CLI.
@ -5407,7 +5410,9 @@ declare namespace Deno {
*
* @category FFI
*/
export type NativeStructType = { readonly struct: readonly NativeType[] };
export interface NativeStructType {
readonly struct: readonly NativeType[];
}
/**
* @category FFI
@ -5700,7 +5705,9 @@ declare namespace Deno {
*
* @category FFI
*/
export type PointerObject<T = unknown> = { [brand]: T };
export interface PointerObject<T = unknown> {
[brand]: T;
}
/** Pointers are represented either with a {@linkcode PointerObject}
* object or a `null` if the pointer is null.
@ -6034,9 +6041,11 @@ declare namespace Deno {
*
* @category Fetch
*/
export interface HttpClient extends Disposable {
export class HttpClient implements Disposable {
/** Close the HTTP client. */
close(): void;
[Symbol.dispose](): void;
}
/**
@ -6137,4 +6146,6 @@ declare namespace Deno {
| CreateHttpClientOptions
| (CreateHttpClientOptions & TlsCertifiedKeyPem),
): HttpClient;
export {}; // only export exports
}

View file

@ -13,6 +13,7 @@
/// <reference lib="deno.fetch" />
/// <reference lib="deno.websocket" />
/// <reference lib="deno.crypto" />
/// <reference lib="deno.ns" />
/** @category WASM */
declare namespace WebAssembly {
@ -412,7 +413,7 @@ declare function clearInterval(id?: number): void;
declare function clearTimeout(id?: number): void;
/** @category Platform */
declare interface VoidFunction {
interface VoidFunction {
(): void;
}
@ -444,7 +445,7 @@ declare function queueMicrotask(func: VoidFunction): void;
declare function dispatchEvent(event: Event): boolean;
/** @category Platform */
declare interface DOMStringList {
interface DOMStringList {
/** Returns the number of strings in strings. */
readonly length: number;
/** Returns true if strings contains string, and false otherwise. */
@ -455,13 +456,13 @@ declare interface DOMStringList {
}
/** @category Platform */
declare type BufferSource = ArrayBufferView | ArrayBuffer;
type BufferSource = ArrayBufferView | ArrayBuffer;
/** @category I/O */
declare var console: Console;
/** @category Events */
declare interface ErrorEventInit extends EventInit {
interface ErrorEventInit extends EventInit {
message?: string;
filename?: string;
lineno?: number;
@ -470,7 +471,7 @@ declare interface ErrorEventInit extends EventInit {
}
/** @category Events */
declare interface ErrorEvent extends Event {
interface ErrorEvent extends Event {
readonly message: string;
readonly filename: string;
readonly lineno: number;
@ -485,13 +486,13 @@ declare var ErrorEvent: {
};
/** @category Events */
declare interface PromiseRejectionEventInit extends EventInit {
interface PromiseRejectionEventInit extends EventInit {
promise: Promise<any>;
reason?: any;
}
/** @category Events */
declare interface PromiseRejectionEvent extends Event {
interface PromiseRejectionEvent extends Event {
readonly promise: Promise<any>;
readonly reason: any;
}
@ -506,24 +507,24 @@ declare var PromiseRejectionEvent: {
};
/** @category Workers */
declare interface AbstractWorkerEventMap {
interface AbstractWorkerEventMap {
"error": ErrorEvent;
}
/** @category Workers */
declare interface WorkerEventMap extends AbstractWorkerEventMap {
interface WorkerEventMap extends AbstractWorkerEventMap {
"message": MessageEvent;
"messageerror": MessageEvent;
}
/** @category Workers */
declare interface WorkerOptions {
interface WorkerOptions {
type?: "classic" | "module";
name?: string;
}
/** @category Workers */
declare interface Worker extends EventTarget {
interface Worker extends EventTarget {
onerror: (this: Worker, e: ErrorEvent) => any | null;
onmessage: (this: Worker, e: MessageEvent) => any | null;
onmessageerror: (this: Worker, e: MessageEvent) => any | null;
@ -559,10 +560,10 @@ declare var Worker: {
};
/** @category Performance */
declare type PerformanceEntryList = PerformanceEntry[];
type PerformanceEntryList = PerformanceEntry[];
/** @category Performance */
declare interface Performance extends EventTarget {
interface Performance extends EventTarget {
/** Returns a timestamp representing the start of the performance measurement. */
readonly timeOrigin: number;
@ -616,7 +617,7 @@ declare var Performance: {
declare var performance: Performance;
/** @category Performance */
declare interface PerformanceMarkOptions {
interface PerformanceMarkOptions {
/** Metadata to be included in the mark. */
detail?: any;
@ -625,7 +626,7 @@ declare interface PerformanceMarkOptions {
}
/** @category Performance */
declare interface PerformanceMeasureOptions {
interface PerformanceMeasureOptions {
/** Metadata to be included in the measure. */
detail?: any;
@ -647,7 +648,7 @@ declare interface PerformanceMeasureOptions {
*
* @category Performance
*/
declare interface PerformanceEntry {
interface PerformanceEntry {
readonly duration: number;
readonly entryType: string;
readonly name: string;
@ -674,7 +675,7 @@ declare var PerformanceEntry: {
*
* @category Performance
*/
declare interface PerformanceMark extends PerformanceEntry {
interface PerformanceMark extends PerformanceEntry {
readonly detail: any;
readonly entryType: "mark";
}
@ -698,7 +699,7 @@ declare var PerformanceMark: {
*
* @category Performance
*/
declare interface PerformanceMeasure extends PerformanceEntry {
interface PerformanceMeasure extends PerformanceEntry {
readonly detail: any;
readonly entryType: "measure";
}
@ -716,12 +717,12 @@ declare var PerformanceMeasure: {
};
/** @category Events */
declare interface CustomEventInit<T = any> extends EventInit {
interface CustomEventInit<T = any> extends EventInit {
detail?: T;
}
/** @category Events */
declare interface CustomEvent<T = any> extends Event {
interface CustomEvent<T = any> extends Event {
/** Returns any custom data event was created with. Typically used for
* synthetic events. */
readonly detail: T;
@ -734,9 +735,21 @@ declare var CustomEvent: {
};
/** @category Platform */
declare interface ErrorConstructor {
interface ErrorConstructor {
/** See https://v8.dev/docs/stack-trace-api#stack-trace-collection-for-custom-exceptions. */
captureStackTrace(error: Object, constructor?: Function): void;
// TODO(nayeemrmn): Support `Error.prepareStackTrace()`. We currently use this
// internally in a way that makes it unavailable for users.
}
/** The [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)
* which also supports setting a {@linkcode Deno.HttpClient} which provides a
* way to connect via proxies and use custom TLS certificates.
*
* @tags allow-net, allow-read
* @category Fetch
*/
declare function fetch(
input: Request | URL | string,
init?: RequestInit & { client: Deno.HttpClient },
): Promise<Response>;

View file

@ -28,9 +28,13 @@ declare namespace Deno {
*/
export class UnsafeWindowSurface {
constructor(
system: "cocoa" | "win32" | "x11" | "wayland",
windowHandle: Deno.PointerValue<unknown>,
displayHandle: Deno.PointerValue<unknown>,
options: {
system: "cocoa" | "win32" | "x11" | "wayland";
windowHandle: Deno.PointerValue<unknown>;
displayHandle: Deno.PointerValue<unknown>;
width: number;
height: number;
},
);
getContext(context: "webgpu"): GPUCanvasContext;
present(): void;
@ -208,7 +212,7 @@ declare namespace Deno {
* @category Cloud
* @experimental
*/
export function openKv(path?: string): Promise<Deno.Kv>;
export function openKv(path?: string): Promise<Kv>;
/** **UNSTABLE**: New API, yet to be vetted.
*
@ -471,7 +475,11 @@ declare namespace Deno {
* @category Cloud
* @experimental
*/
export type KvEntry<T> = { key: KvKey; value: T; versionstamp: string };
export interface KvEntry<T> {
key: KvKey;
value: T;
versionstamp: string;
}
/**
* **UNSTABLE**: New API, yet to be vetted.
@ -676,7 +684,7 @@ declare namespace Deno {
value: unknown,
options?: {
delay?: number;
keysIfUndelivered?: Deno.KvKey[];
keysIfUndelivered?: KvKey[];
backoffSchedule?: number[];
},
): this;
@ -907,7 +915,7 @@ declare namespace Deno {
value: unknown,
options?: {
delay?: number;
keysIfUndelivered?: Deno.KvKey[];
keysIfUndelivered?: KvKey[];
backoffSchedule?: number[];
},
): Promise<KvCommitResult>;
@ -1037,10 +1045,10 @@ declare namespace Deno {
* @category Jupyter
* @experimental
*/
export type VegaObject = {
export interface VegaObject {
$schema: string;
[key: string]: unknown;
};
}
/**
* A collection of supported media types and data for Jupyter frontends.
@ -1048,7 +1056,7 @@ declare namespace Deno {
* @category Jupyter
* @experimental
*/
export type MediaBundle = {
export interface MediaBundle {
"text/plain"?: string;
"text/html"?: string;
"image/svg+xml"?: string;
@ -1074,7 +1082,7 @@ declare namespace Deno {
// Must support a catch all for custom media types / mimetypes
[key: string]: string | object | undefined;
};
}
/**
* @category Jupyter
@ -1086,9 +1094,9 @@ declare namespace Deno {
* @category Jupyter
* @experimental
*/
export type Displayable = {
export interface Displayable {
[$display]: () => MediaBundle | Promise<MediaBundle>;
};
}
/**
* Display function for Jupyter Deno Kernel.
@ -1213,30 +1221,19 @@ declare namespace Deno {
buffers?: Uint8Array[];
},
): Promise<void>;
}
}
/** **UNSTABLE**: New API, yet to be vetted.
*
* The [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)
* which also supports setting a {@linkcode Deno.HttpClient} which provides a
* way to connect via proxies and use custom TLS certificates.
*
* @tags allow-net, allow-read
* @category Fetch
* @experimental
*/
declare function fetch(
input: Request | URL | string,
init?: RequestInit & { client: Deno.HttpClient },
): Promise<Response>;
export {}; // only export exports
}
export {}; // only export exports
}
/** **UNSTABLE**: New API, yet to be vetted.
*
* @category Workers
* @experimental
*/
declare interface WorkerOptions {
interface WorkerOptions {
/** **UNSTABLE**: New API, yet to be vetted.
*
* Configure permissions options to change the level of access the worker will
@ -1277,7 +1274,7 @@ declare interface WorkerOptions {
* @category WebSockets
* @experimental
*/
declare interface WebSocketStreamOptions {
interface WebSocketStreamOptions {
protocols?: string[];
signal?: AbortSignal;
headers?: HeadersInit;
@ -1288,7 +1285,7 @@ declare interface WebSocketStreamOptions {
* @category WebSockets
* @experimental
*/
declare interface WebSocketConnection {
interface WebSocketConnection {
readable: ReadableStream<string | Uint8Array>;
writable: WritableStream<string | Uint8Array>;
extensions: string;
@ -1300,7 +1297,7 @@ declare interface WebSocketConnection {
* @category WebSockets
* @experimental
*/
declare interface WebSocketCloseInfo {
interface WebSocketCloseInfo {
code?: number;
reason?: string;
}
@ -1311,7 +1308,7 @@ declare interface WebSocketCloseInfo {
* @category WebSockets
* @experimental
*/
declare interface WebSocketStream {
interface WebSocketStream {
url: string;
opened: Promise<WebSocketConnection>;
closed: Promise<WebSocketCloseInfo>;
@ -1335,7 +1332,7 @@ declare var WebSocketStream: {
* @category WebSockets
* @experimental
*/
declare interface WebSocketError extends DOMException {
interface WebSocketError extends DOMException {
readonly closeCode: number;
readonly reason: string;
}
@ -2895,7 +2892,7 @@ declare namespace Temporal {
* @category Temporal
* @experimental
*/
declare interface Date {
interface Date {
toTemporalInstant(): Temporal.Instant;
}
@ -2997,7 +2994,7 @@ declare namespace Intl {
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
/**
* The size in bytes of each element in the array.
*/
@ -3312,7 +3309,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface Float16ArrayConstructor {
interface Float16ArrayConstructor {
readonly prototype: Float16Array;
new (length: number): Float16Array;
new (array: ArrayLike<number> | ArrayBufferLike): Float16Array;
@ -3361,7 +3358,7 @@ declare var Float16Array: Float16ArrayConstructor;
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
[Symbol.iterator](): IterableIterator<number>;
/**
* Returns an array of key, value pairs for every entry in the array
@ -3381,7 +3378,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface Float16Constructor {
interface Float16Constructor {
new (elements: Iterable<number>): Float16Array;
/**
@ -3401,7 +3398,7 @@ declare interface Float16Constructor {
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
readonly [Symbol.toStringTag]: "Float16Array";
}
@ -3409,7 +3406,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
/**
* Determines whether an array includes a certain element, returning true or false as appropriate.
* @param searchElement The element to search for.
@ -3422,7 +3419,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface Float16ArrayConstructor {
interface Float16ArrayConstructor {
new (): Float16Array;
}
@ -3430,7 +3427,7 @@ declare interface Float16ArrayConstructor {
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
/**
* Returns the item located at the specified index.
* @param index The zero-based index of the desired code unit. A negative index will count back from the last item.
@ -3442,7 +3439,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface Float16Array {
interface Float16Array {
/**
* Returns the value of the last element in the array where predicate is true, and undefined
* otherwise.
@ -3518,7 +3515,7 @@ declare interface Float16Array {
* @category Platform
* @experimental
*/
declare interface DataView {
interface DataView {
/**
* Gets the Float16 value at the specified byte offset from the start of the view. There is
* no alignment constraint; multi-byte values may be fetched from any offset.

View file

@ -8,14 +8,14 @@
/// <reference lib="deno.cache" />
/** @category Platform */
declare interface WindowEventMap {
interface WindowEventMap {
"error": ErrorEvent;
"unhandledrejection": PromiseRejectionEvent;
"rejectionhandled": PromiseRejectionEvent;
}
/** @category Platform */
declare interface Window extends EventTarget {
interface Window extends EventTarget {
readonly window: Window & typeof globalThis;
readonly self: Window & typeof globalThis;
onerror: ((this: Window, ev: ErrorEvent) => any) | null;
@ -105,7 +105,7 @@ declare var sessionStorage: Storage;
declare var caches: CacheStorage;
/** @category Platform */
declare interface Navigator {
interface Navigator {
readonly gpu: GPU;
readonly hardwareConcurrency: number;
readonly userAgent: string;
@ -221,7 +221,7 @@ declare function removeEventListener(
*
* @category Platform
*/
declare interface Location {
interface Location {
/** Returns a DOMStringList object listing the origins of the ancestor
* browsing contexts, from the parent browsing context to the top-level
* browsing context.

View file

@ -6,12 +6,12 @@
/// <reference lib="esnext" />
/** @category GPU */
declare interface GPUObjectBase {
interface GPUObjectBase {
label: string;
}
/** @category GPU */
declare interface GPUObjectDescriptorBase {
interface GPUObjectDescriptorBase {
label?: string;
}
@ -84,13 +84,13 @@ declare class GPU {
}
/** @category GPU */
declare interface GPURequestAdapterOptions {
interface GPURequestAdapterOptions {
powerPreference?: GPUPowerPreference;
forceFallbackAdapter?: boolean;
}
/** @category GPU */
declare type GPUPowerPreference = "low-power" | "high-performance";
type GPUPowerPreference = "low-power" | "high-performance";
/** @category GPU */
declare class GPUAdapter {
@ -103,13 +103,13 @@ declare class GPUAdapter {
}
/** @category GPU */
declare interface GPUDeviceDescriptor extends GPUObjectDescriptorBase {
interface GPUDeviceDescriptor extends GPUObjectDescriptorBase {
requiredFeatures?: GPUFeatureName[];
requiredLimits?: Record<string, number>;
}
/** @category GPU */
declare type GPUFeatureName =
type GPUFeatureName =
| "depth-clip-control"
| "depth32float-stencil8"
| "pipeline-statistics-query"
@ -206,20 +206,20 @@ declare class GPUBuffer implements GPUObjectBase {
}
/** @category GPU */
declare type GPUBufferMapState = "unmapped" | "pending" | "mapped";
type GPUBufferMapState = "unmapped" | "pending" | "mapped";
/** @category GPU */
declare interface GPUBufferDescriptor extends GPUObjectDescriptorBase {
interface GPUBufferDescriptor extends GPUObjectDescriptorBase {
size: number;
usage: GPUBufferUsageFlags;
mappedAtCreation?: boolean;
}
/** @category GPU */
declare type GPUBufferUsageFlags = number;
type GPUBufferUsageFlags = number;
/** @category GPU */
declare type GPUFlagsConstant = number;
type GPUFlagsConstant = number;
/** @category GPU */
declare class GPUBufferUsage {
@ -236,7 +236,7 @@ declare class GPUBufferUsage {
}
/** @category GPU */
declare type GPUMapModeFlags = number;
type GPUMapModeFlags = number;
/** @category GPU */
declare class GPUMapMode {
@ -262,7 +262,7 @@ declare class GPUTexture implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUTextureDescriptor extends GPUObjectDescriptorBase {
interface GPUTextureDescriptor extends GPUObjectDescriptorBase {
size: GPUExtent3D;
mipLevelCount?: number;
sampleCount?: number;
@ -273,10 +273,10 @@ declare interface GPUTextureDescriptor extends GPUObjectDescriptorBase {
}
/** @category GPU */
declare type GPUTextureDimension = "1d" | "2d" | "3d";
type GPUTextureDimension = "1d" | "2d" | "3d";
/** @category GPU */
declare type GPUTextureUsageFlags = number;
type GPUTextureUsageFlags = number;
/** @category GPU */
declare class GPUTextureUsage {
@ -293,7 +293,7 @@ declare class GPUTextureView implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUTextureViewDescriptor extends GPUObjectDescriptorBase {
interface GPUTextureViewDescriptor extends GPUObjectDescriptorBase {
format?: GPUTextureFormat;
dimension?: GPUTextureViewDimension;
aspect?: GPUTextureAspect;
@ -304,7 +304,7 @@ declare interface GPUTextureViewDescriptor extends GPUObjectDescriptorBase {
}
/** @category GPU */
declare type GPUTextureViewDimension =
type GPUTextureViewDimension =
| "1d"
| "2d"
| "2d-array"
@ -313,10 +313,10 @@ declare type GPUTextureViewDimension =
| "3d";
/** @category GPU */
declare type GPUTextureAspect = "all" | "stencil-only" | "depth-only";
type GPUTextureAspect = "all" | "stencil-only" | "depth-only";
/** @category GPU */
declare type GPUTextureFormat =
type GPUTextureFormat =
| "r8unorm"
| "r8snorm"
| "r8uint"
@ -419,7 +419,7 @@ declare class GPUSampler implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUSamplerDescriptor extends GPUObjectDescriptorBase {
interface GPUSamplerDescriptor extends GPUObjectDescriptorBase {
addressModeU?: GPUAddressMode;
addressModeV?: GPUAddressMode;
addressModeW?: GPUAddressMode;
@ -433,16 +433,16 @@ declare interface GPUSamplerDescriptor extends GPUObjectDescriptorBase {
}
/** @category GPU */
declare type GPUAddressMode = "clamp-to-edge" | "repeat" | "mirror-repeat";
type GPUAddressMode = "clamp-to-edge" | "repeat" | "mirror-repeat";
/** @category GPU */
declare type GPUFilterMode = "nearest" | "linear";
type GPUFilterMode = "nearest" | "linear";
/** @category GPU */
declare type GPUMipmapFilterMode = "nearest" | "linear";
type GPUMipmapFilterMode = "nearest" | "linear";
/** @category GPU */
declare type GPUCompareFunction =
type GPUCompareFunction =
| "never"
| "less"
| "equal"
@ -458,12 +458,12 @@ declare class GPUBindGroupLayout implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUBindGroupLayoutDescriptor extends GPUObjectDescriptorBase {
interface GPUBindGroupLayoutDescriptor extends GPUObjectDescriptorBase {
entries: GPUBindGroupLayoutEntry[];
}
/** @category GPU */
declare interface GPUBindGroupLayoutEntry {
interface GPUBindGroupLayoutEntry {
binding: number;
visibility: GPUShaderStageFlags;
@ -474,7 +474,7 @@ declare interface GPUBindGroupLayoutEntry {
}
/** @category GPU */
declare type GPUShaderStageFlags = number;
type GPUShaderStageFlags = number;
/** @category GPU */
declare class GPUShaderStage {
@ -484,35 +484,35 @@ declare class GPUShaderStage {
}
/** @category GPU */
declare interface GPUBufferBindingLayout {
interface GPUBufferBindingLayout {
type?: GPUBufferBindingType;
hasDynamicOffset?: boolean;
minBindingSize?: number;
}
/** @category GPU */
declare type GPUBufferBindingType = "uniform" | "storage" | "read-only-storage";
type GPUBufferBindingType = "uniform" | "storage" | "read-only-storage";
/** @category GPU */
declare interface GPUSamplerBindingLayout {
interface GPUSamplerBindingLayout {
type?: GPUSamplerBindingType;
}
/** @category GPU */
declare type GPUSamplerBindingType =
type GPUSamplerBindingType =
| "filtering"
| "non-filtering"
| "comparison";
/** @category GPU */
declare interface GPUTextureBindingLayout {
interface GPUTextureBindingLayout {
sampleType?: GPUTextureSampleType;
viewDimension?: GPUTextureViewDimension;
multisampled?: boolean;
}
/** @category GPU */
declare type GPUTextureSampleType =
type GPUTextureSampleType =
| "float"
| "unfilterable-float"
| "depth"
@ -520,13 +520,13 @@ declare type GPUTextureSampleType =
| "uint";
/** @category GPU */
declare type GPUStorageTextureAccess =
type GPUStorageTextureAccess =
| "write-only"
| "read-only"
| "read-write";
/** @category GPU */
declare interface GPUStorageTextureBindingLayout {
interface GPUStorageTextureBindingLayout {
access: GPUStorageTextureAccess;
format: GPUTextureFormat;
viewDimension?: GPUTextureViewDimension;
@ -538,25 +538,25 @@ declare class GPUBindGroup implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUBindGroupDescriptor extends GPUObjectDescriptorBase {
interface GPUBindGroupDescriptor extends GPUObjectDescriptorBase {
layout: GPUBindGroupLayout;
entries: GPUBindGroupEntry[];
}
/** @category GPU */
declare type GPUBindingResource =
type GPUBindingResource =
| GPUSampler
| GPUTextureView
| GPUBufferBinding;
/** @category GPU */
declare interface GPUBindGroupEntry {
interface GPUBindGroupEntry {
binding: number;
resource: GPUBindingResource;
}
/** @category GPU */
declare interface GPUBufferBinding {
interface GPUBufferBinding {
buffer: GPUBuffer;
offset?: number;
size?: number;
@ -568,15 +568,15 @@ declare class GPUPipelineLayout implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUPipelineLayoutDescriptor extends GPUObjectDescriptorBase {
interface GPUPipelineLayoutDescriptor extends GPUObjectDescriptorBase {
bindGroupLayouts: GPUBindGroupLayout[];
}
/** @category GPU */
declare type GPUCompilationMessageType = "error" | "warning" | "info";
type GPUCompilationMessageType = "error" | "warning" | "info";
/** @category GPU */
declare interface GPUCompilationMessage {
interface GPUCompilationMessage {
readonly message: string;
readonly type: GPUCompilationMessageType;
readonly lineNum: number;
@ -584,7 +584,7 @@ declare interface GPUCompilationMessage {
}
/** @category GPU */
declare interface GPUCompilationInfo {
interface GPUCompilationInfo {
readonly messages: ReadonlyArray<GPUCompilationMessage>;
}
@ -596,12 +596,12 @@ declare class GPUPipelineError extends DOMException {
}
/** @category GPU */
declare interface GPUPipelineErrorInit {
interface GPUPipelineErrorInit {
reason: GPUPipelineErrorReason;
}
/** @category GPU */
declare type GPUPipelineErrorReason = "validation" | "internal";
type GPUPipelineErrorReason = "validation" | "internal";
/** @category GPU */
declare class GPUShaderModule implements GPUObjectBase {
@ -609,26 +609,26 @@ declare class GPUShaderModule implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUShaderModuleDescriptor extends GPUObjectDescriptorBase {
interface GPUShaderModuleDescriptor extends GPUObjectDescriptorBase {
code: string;
sourceMap?: any;
}
/** @category GPU */
declare type GPUAutoLayoutMode = "auto";
type GPUAutoLayoutMode = "auto";
/** @category GPU */
declare interface GPUPipelineDescriptorBase extends GPUObjectDescriptorBase {
interface GPUPipelineDescriptorBase extends GPUObjectDescriptorBase {
layout: GPUPipelineLayout | GPUAutoLayoutMode;
}
/** @category GPU */
declare interface GPUPipelineBase {
interface GPUPipelineBase {
getBindGroupLayout(index: number): GPUBindGroupLayout;
}
/** @category GPU */
declare interface GPUProgrammableStage {
interface GPUProgrammableStage {
module: GPUShaderModule;
entryPoint?: string;
constants?: Record<string, number>;
@ -642,8 +642,7 @@ declare class GPUComputePipeline implements GPUObjectBase, GPUPipelineBase {
}
/** @category GPU */
declare interface GPUComputePipelineDescriptor
extends GPUPipelineDescriptorBase {
interface GPUComputePipelineDescriptor extends GPUPipelineDescriptorBase {
compute: GPUProgrammableStage;
}
@ -655,8 +654,7 @@ declare class GPURenderPipeline implements GPUObjectBase, GPUPipelineBase {
}
/** @category GPU */
declare interface GPURenderPipelineDescriptor
extends GPUPipelineDescriptorBase {
interface GPURenderPipelineDescriptor extends GPUPipelineDescriptorBase {
vertex: GPUVertexState;
primitive?: GPUPrimitiveState;
depthStencil?: GPUDepthStencilState;
@ -665,7 +663,7 @@ declare interface GPURenderPipelineDescriptor
}
/** @category GPU */
declare interface GPUPrimitiveState {
interface GPUPrimitiveState {
topology?: GPUPrimitiveTopology;
stripIndexFormat?: GPUIndexFormat;
frontFace?: GPUFrontFace;
@ -674,7 +672,7 @@ declare interface GPUPrimitiveState {
}
/** @category GPU */
declare type GPUPrimitiveTopology =
type GPUPrimitiveTopology =
| "point-list"
| "line-list"
| "line-strip"
@ -682,25 +680,25 @@ declare type GPUPrimitiveTopology =
| "triangle-strip";
/** @category GPU */
declare type GPUFrontFace = "ccw" | "cw";
type GPUFrontFace = "ccw" | "cw";
/** @category GPU */
declare type GPUCullMode = "none" | "front" | "back";
type GPUCullMode = "none" | "front" | "back";
/** @category GPU */
declare interface GPUMultisampleState {
interface GPUMultisampleState {
count?: number;
mask?: number;
alphaToCoverageEnabled?: boolean;
}
/** @category GPU */
declare interface GPUFragmentState extends GPUProgrammableStage {
interface GPUFragmentState extends GPUProgrammableStage {
targets: (GPUColorTargetState | null)[];
}
/** @category GPU */
declare interface GPUColorTargetState {
interface GPUColorTargetState {
format: GPUTextureFormat;
blend?: GPUBlendState;
@ -708,13 +706,13 @@ declare interface GPUColorTargetState {
}
/** @category GPU */
declare interface GPUBlendState {
interface GPUBlendState {
color: GPUBlendComponent;
alpha: GPUBlendComponent;
}
/** @category GPU */
declare type GPUColorWriteFlags = number;
type GPUColorWriteFlags = number;
/** @category GPU */
declare class GPUColorWrite {
@ -726,14 +724,14 @@ declare class GPUColorWrite {
}
/** @category GPU */
declare interface GPUBlendComponent {
interface GPUBlendComponent {
operation?: GPUBlendOperation;
srcFactor?: GPUBlendFactor;
dstFactor?: GPUBlendFactor;
}
/** @category GPU */
declare type GPUBlendFactor =
type GPUBlendFactor =
| "zero"
| "one"
| "src"
@ -749,7 +747,7 @@ declare type GPUBlendFactor =
| "one-minus-constant";
/** @category GPU */
declare type GPUBlendOperation =
type GPUBlendOperation =
| "add"
| "subtract"
| "reverse-subtract"
@ -757,7 +755,7 @@ declare type GPUBlendOperation =
| "max";
/** @category GPU */
declare interface GPUDepthStencilState {
interface GPUDepthStencilState {
format: GPUTextureFormat;
depthWriteEnabled: boolean;
@ -775,7 +773,7 @@ declare interface GPUDepthStencilState {
}
/** @category GPU */
declare interface GPUStencilFaceState {
interface GPUStencilFaceState {
compare?: GPUCompareFunction;
failOp?: GPUStencilOperation;
depthFailOp?: GPUStencilOperation;
@ -783,7 +781,7 @@ declare interface GPUStencilFaceState {
}
/** @category GPU */
declare type GPUStencilOperation =
type GPUStencilOperation =
| "keep"
| "zero"
| "replace"
@ -794,10 +792,10 @@ declare type GPUStencilOperation =
| "decrement-wrap";
/** @category GPU */
declare type GPUIndexFormat = "uint16" | "uint32";
type GPUIndexFormat = "uint16" | "uint32";
/** @category GPU */
declare type GPUVertexFormat =
type GPUVertexFormat =
| "uint8x2"
| "uint8x4"
| "sint8x2"
@ -831,22 +829,22 @@ declare type GPUVertexFormat =
| "unorm10-10-10-2";
/** @category GPU */
declare type GPUVertexStepMode = "vertex" | "instance";
type GPUVertexStepMode = "vertex" | "instance";
/** @category GPU */
declare interface GPUVertexState extends GPUProgrammableStage {
interface GPUVertexState extends GPUProgrammableStage {
buffers?: (GPUVertexBufferLayout | null)[];
}
/** @category GPU */
declare interface GPUVertexBufferLayout {
interface GPUVertexBufferLayout {
arrayStride: number;
stepMode?: GPUVertexStepMode;
attributes: GPUVertexAttribute[];
}
/** @category GPU */
declare interface GPUVertexAttribute {
interface GPUVertexAttribute {
format: GPUVertexFormat;
offset: number;
@ -854,7 +852,7 @@ declare interface GPUVertexAttribute {
}
/** @category GPU */
declare interface GPUImageDataLayout {
interface GPUImageDataLayout {
offset?: number;
bytesPerRow?: number;
rowsPerImage?: number;
@ -866,7 +864,7 @@ declare class GPUCommandBuffer implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUCommandBufferDescriptor extends GPUObjectDescriptorBase {}
interface GPUCommandBufferDescriptor extends GPUObjectDescriptorBase {}
/** @category GPU */
declare class GPUCommandEncoder implements GPUObjectBase {
@ -927,15 +925,15 @@ declare class GPUCommandEncoder implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUCommandEncoderDescriptor extends GPUObjectDescriptorBase {}
interface GPUCommandEncoderDescriptor extends GPUObjectDescriptorBase {}
/** @category GPU */
declare interface GPUImageCopyBuffer extends GPUImageDataLayout {
interface GPUImageCopyBuffer extends GPUImageDataLayout {
buffer: GPUBuffer;
}
/** @category GPU */
declare interface GPUImageCopyTexture {
interface GPUImageCopyTexture {
texture: GPUTexture;
mipLevel?: number;
origin?: GPUOrigin3D;
@ -943,7 +941,7 @@ declare interface GPUImageCopyTexture {
}
/** @category GPU */
declare interface GPUProgrammablePassEncoder {
interface GPUProgrammablePassEncoder {
setBindGroup(
index: number,
bindGroup: GPUBindGroup,
@ -993,19 +991,19 @@ declare class GPUComputePassEncoder
}
/** @category GPU */
declare interface GPUComputePassTimestampWrites {
interface GPUComputePassTimestampWrites {
querySet: GPUQuerySet;
beginningOfPassWriteIndex?: number;
endOfPassWriteIndex?: number;
}
/** @category GPU */
declare interface GPUComputePassDescriptor extends GPUObjectDescriptorBase {
interface GPUComputePassDescriptor extends GPUObjectDescriptorBase {
timestampWrites?: GPUComputePassTimestampWrites;
}
/** @category GPU */
declare interface GPURenderEncoderBase {
interface GPURenderEncoderBase {
setPipeline(pipeline: GPURenderPipeline): undefined;
setIndexBuffer(
@ -1120,14 +1118,14 @@ declare class GPURenderPassEncoder
}
/** @category GPU */
declare interface GPURenderPassTimestampWrites {
interface GPURenderPassTimestampWrites {
querySet: GPUQuerySet;
beginningOfPassWriteIndex?: number;
endOfPassWriteIndex?: number;
}
/** @category GPU */
declare interface GPURenderPassDescriptor extends GPUObjectDescriptorBase {
interface GPURenderPassDescriptor extends GPUObjectDescriptorBase {
colorAttachments: (GPURenderPassColorAttachment | null)[];
depthStencilAttachment?: GPURenderPassDepthStencilAttachment;
occlusionQuerySet?: GPUQuerySet;
@ -1135,7 +1133,7 @@ declare interface GPURenderPassDescriptor extends GPUObjectDescriptorBase {
}
/** @category GPU */
declare interface GPURenderPassColorAttachment {
interface GPURenderPassColorAttachment {
view: GPUTextureView;
resolveTarget?: GPUTextureView;
@ -1145,7 +1143,7 @@ declare interface GPURenderPassColorAttachment {
}
/** @category GPU */
declare interface GPURenderPassDepthStencilAttachment {
interface GPURenderPassDepthStencilAttachment {
view: GPUTextureView;
depthClearValue?: number;
@ -1160,10 +1158,10 @@ declare interface GPURenderPassDepthStencilAttachment {
}
/** @category GPU */
declare type GPULoadOp = "load" | "clear";
type GPULoadOp = "load" | "clear";
/** @category GPU */
declare type GPUStoreOp = "store" | "discard";
type GPUStoreOp = "store" | "discard";
/** @category GPU */
declare class GPURenderBundle implements GPUObjectBase {
@ -1171,7 +1169,7 @@ declare class GPURenderBundle implements GPUObjectBase {
}
/** @category GPU */
declare interface GPURenderBundleDescriptor extends GPUObjectDescriptorBase {}
interface GPURenderBundleDescriptor extends GPUObjectDescriptorBase {}
/** @category GPU */
declare class GPURenderBundleEncoder
@ -1228,14 +1226,14 @@ declare class GPURenderBundleEncoder
}
/** @category GPU */
declare interface GPURenderPassLayout extends GPUObjectDescriptorBase {
interface GPURenderPassLayout extends GPUObjectDescriptorBase {
colorFormats: (GPUTextureFormat | null)[];
depthStencilFormat?: GPUTextureFormat;
sampleCount?: number;
}
/** @category GPU */
declare interface GPURenderBundleEncoderDescriptor extends GPURenderPassLayout {
interface GPURenderBundleEncoderDescriptor extends GPURenderPassLayout {
depthReadOnly?: boolean;
stencilReadOnly?: boolean;
}
@ -1275,19 +1273,19 @@ declare class GPUQuerySet implements GPUObjectBase {
}
/** @category GPU */
declare interface GPUQuerySetDescriptor extends GPUObjectDescriptorBase {
interface GPUQuerySetDescriptor extends GPUObjectDescriptorBase {
type: GPUQueryType;
count: number;
}
/** @category GPU */
declare type GPUQueryType = "occlusion" | "timestamp";
type GPUQueryType = "occlusion" | "timestamp";
/** @category GPU */
declare type GPUDeviceLostReason = "destroyed";
type GPUDeviceLostReason = "destroyed";
/** @category GPU */
declare interface GPUDeviceLostInfo {
interface GPUDeviceLostInfo {
readonly reason: GPUDeviceLostReason;
readonly message: string;
}
@ -1313,7 +1311,7 @@ declare class GPUInternalError extends GPUError {
}
/** @category GPU */
declare type GPUErrorFilter = "out-of-memory" | "validation" | "internal";
type GPUErrorFilter = "out-of-memory" | "validation" | "internal";
/** @category GPU */
declare class GPUUncapturedErrorEvent extends Event {
@ -1326,12 +1324,12 @@ declare class GPUUncapturedErrorEvent extends Event {
}
/** @category GPU */
declare interface GPUUncapturedErrorEventInit extends EventInit {
interface GPUUncapturedErrorEventInit extends EventInit {
error: GPUError;
}
/** @category GPU */
declare interface GPUColorDict {
interface GPUColorDict {
r: number;
g: number;
b: number;
@ -1339,44 +1337,42 @@ declare interface GPUColorDict {
}
/** @category GPU */
declare type GPUColor = number[] | GPUColorDict;
type GPUColor = number[] | GPUColorDict;
/** @category GPU */
declare interface GPUOrigin3DDict {
interface GPUOrigin3DDict {
x?: number;
y?: number;
z?: number;
}
/** @category GPU */
declare type GPUOrigin3D = number[] | GPUOrigin3DDict;
type GPUOrigin3D = number[] | GPUOrigin3DDict;
/** @category GPU */
declare interface GPUExtent3DDict {
interface GPUExtent3DDict {
width: number;
height?: number;
depthOrArrayLayers?: number;
}
/** @category GPU */
declare type GPUExtent3D = number[] | GPUExtent3DDict;
type GPUExtent3D = number[] | GPUExtent3DDict;
/** @category GPU */
declare type GPUCanvasAlphaMode = "opaque" | "premultiplied";
type GPUCanvasAlphaMode = "opaque" | "premultiplied";
/** @category GPU */
declare interface GPUCanvasConfiguration {
interface GPUCanvasConfiguration {
device: GPUDevice;
format: GPUTextureFormat;
usage?: GPUTextureUsageFlags;
viewFormats?: GPUTextureFormat[];
colorSpace?: "srgb" | "display-p3";
alphaMode?: GPUCanvasAlphaMode;
width: number;
height: number;
}
/** @category GPU */
declare interface GPUCanvasContext {
interface GPUCanvasContext {
configure(configuration: GPUCanvasConfiguration): undefined;
unconfigure(): undefined;
getCurrentTexture(): GPUTexture;

1592
cli/util/extract.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,6 @@ use deno_core::error::AnyError;
use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::PathClean;
use crate::util::path::get_atomic_file_path;
use crate::util::progress_bar::ProgressBar;
@ -37,10 +36,98 @@ pub fn atomic_write_file_with_retries<T: AsRef<[u8]>>(
file_path: &Path,
data: T,
mode: u32,
) -> std::io::Result<()> {
struct RealAtomicWriteFileFs {
mode: u32,
}
impl AtomicWriteFileFs for RealAtomicWriteFileFs {
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> {
write_file(path, bytes, self.mode)
}
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> {
std::fs::rename(from, to)
}
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
std::fs::remove_file(path)
}
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(dir_path)
}
fn path_exists(&self, path: &Path) -> bool {
path.exists()
}
}
atomic_write_file_with_retries_and_fs(
&RealAtomicWriteFileFs { mode },
file_path,
data.as_ref(),
)
}
pub trait AtomicWriteFileFs {
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()>;
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()>;
fn remove_file(&self, path: &Path) -> std::io::Result<()>;
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()>;
fn path_exists(&self, path: &Path) -> bool;
}
pub struct AtomicWriteFileFsAdapter<'a> {
pub fs: &'a dyn FileSystem,
pub write_mode: u32,
}
impl<'a> AtomicWriteFileFs for AtomicWriteFileFsAdapter<'a> {
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> {
self
.fs
.write_file_sync(
path,
deno_runtime::deno_fs::OpenOptions::write(
true,
false,
false,
Some(self.write_mode),
),
None,
bytes,
)
.map_err(|e| e.into_io_error())
}
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> {
self.fs.rename_sync(from, to).map_err(|e| e.into_io_error())
}
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
self
.fs
.remove_sync(path, false)
.map_err(|e| e.into_io_error())
}
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> {
self
.fs
.mkdir_sync(dir_path, /* recursive */ true, None)
.map_err(|e| e.into_io_error())
}
fn path_exists(&self, path: &Path) -> bool {
self.fs.exists_sync(path)
}
}
pub fn atomic_write_file_with_retries_and_fs<T: AsRef<[u8]>>(
fs: &impl AtomicWriteFileFs,
file_path: &Path,
data: T,
) -> std::io::Result<()> {
let mut count = 0;
loop {
match atomic_write_file(file_path, data.as_ref(), mode) {
match atomic_write_file(fs, file_path, data.as_ref()) {
Ok(()) => return Ok(()),
Err(err) => {
if count >= 5 {
@ -61,63 +148,54 @@ pub fn atomic_write_file_with_retries<T: AsRef<[u8]>>(
///
/// This also handles creating the directory if a NotFound error
/// occurs.
fn atomic_write_file<T: AsRef<[u8]>>(
fn atomic_write_file(
fs: &impl AtomicWriteFileFs,
file_path: &Path,
data: T,
mode: u32,
data: &[u8],
) -> std::io::Result<()> {
fn atomic_write_file_raw(
fs: &impl AtomicWriteFileFs,
temp_file_path: &Path,
file_path: &Path,
data: &[u8],
mode: u32,
) -> std::io::Result<()> {
write_file(temp_file_path, data, mode)?;
std::fs::rename(temp_file_path, file_path).map_err(|err| {
fs.write_file(temp_file_path, data)?;
fs.rename_file(temp_file_path, file_path).map_err(|err| {
// clean up the created temp file on error
let _ = std::fs::remove_file(temp_file_path);
let _ = fs.remove_file(temp_file_path);
err
})
}
fn inner(file_path: &Path, data: &[u8], mode: u32) -> std::io::Result<()> {
let temp_file_path = get_atomic_file_path(file_path);
let temp_file_path = get_atomic_file_path(file_path);
if let Err(write_err) =
atomic_write_file_raw(&temp_file_path, file_path, data, mode)
{
if write_err.kind() == ErrorKind::NotFound {
let parent_dir_path = file_path.parent().unwrap();
match std::fs::create_dir_all(parent_dir_path) {
Ok(()) => {
return atomic_write_file_raw(
&temp_file_path,
file_path,
data,
mode,
)
if let Err(write_err) =
atomic_write_file_raw(fs, &temp_file_path, file_path, data)
{
if write_err.kind() == ErrorKind::NotFound {
let parent_dir_path = file_path.parent().unwrap();
match fs.create_dir_all(parent_dir_path) {
Ok(()) => {
return atomic_write_file_raw(fs, &temp_file_path, file_path, data)
.map_err(|err| add_file_context_to_err(file_path, err));
}
Err(create_err) => {
if !parent_dir_path.exists() {
return Err(Error::new(
create_err.kind(),
format!(
"{:#} (for '{}')\nCheck the permission of the directory.",
create_err,
parent_dir_path.display()
),
));
}
}
Err(create_err) => {
if !fs.path_exists(parent_dir_path) {
return Err(Error::new(
create_err.kind(),
format!(
"{:#} (for '{}')\nCheck the permission of the directory.",
create_err,
parent_dir_path.display()
),
));
}
}
}
return Err(add_file_context_to_err(file_path, write_err));
}
Ok(())
return Err(add_file_context_to_err(file_path, write_err));
}
inner(file_path, data.as_ref(), mode)
Ok(())
}
/// Creates a std::fs::File handling if the parent does not exist.
@ -211,48 +289,18 @@ pub fn canonicalize_path(path: &Path) -> Result<PathBuf, Error> {
pub fn canonicalize_path_maybe_not_exists(
path: &Path,
) -> Result<PathBuf, Error> {
canonicalize_path_maybe_not_exists_with_custom_fn(path, canonicalize_path)
deno_path_util::canonicalize_path_maybe_not_exists(path, &canonicalize_path)
}
pub fn canonicalize_path_maybe_not_exists_with_fs(
path: &Path,
fs: &dyn FileSystem,
) -> Result<PathBuf, Error> {
canonicalize_path_maybe_not_exists_with_custom_fn(path, |path| {
deno_path_util::canonicalize_path_maybe_not_exists(path, &|path| {
fs.realpath_sync(path).map_err(|err| err.into_io_error())
})
}
fn canonicalize_path_maybe_not_exists_with_custom_fn(
path: &Path,
canonicalize: impl Fn(&Path) -> Result<PathBuf, Error>,
) -> Result<PathBuf, Error> {
let path = path.to_path_buf().clean();
let mut path = path.as_path();
let mut names_stack = Vec::new();
loop {
match canonicalize(path) {
Ok(mut canonicalized_path) => {
for name in names_stack.into_iter().rev() {
canonicalized_path = canonicalized_path.join(name);
}
return Ok(canonicalized_path);
}
Err(err) if err.kind() == ErrorKind::NotFound => {
names_stack.push(match path.file_name() {
Some(name) => name.to_owned(),
None => return Err(err),
});
path = match path.parent() {
Some(parent) => parent,
None => return Err(err),
};
}
Err(err) => return Err(err),
}
}
}
/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`.
/// Specifiers that start with http and https are left intact.
/// Note: This ignores all .git and node_modules folders.
@ -708,8 +756,8 @@ pub fn specifier_from_file_path(
mod tests {
use super::*;
use deno_core::futures;
use deno_core::normalize_path;
use deno_core::parking_lot::Mutex;
use deno_path_util::normalize_path;
use pretty_assertions::assert_eq;
use test_util::PathRef;
use test_util::TempDir;

View file

@ -7,6 +7,7 @@ pub mod console;
pub mod diff;
pub mod display;
pub mod draw_thread;
pub mod extract;
pub mod file_watcher;
pub mod fs;
pub mod logger;

View file

@ -165,48 +165,6 @@ pub fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
pathdiff::diff_paths(to, from)
}
/// Gets if the provided character is not supported on all
/// kinds of file systems.
pub fn is_banned_path_char(c: char) -> bool {
matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*')
}
/// Gets a safe local directory name for the provided url.
///
/// For example:
/// https://deno.land:8080/path -> deno.land_8080/path
pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
fn sanitize_segment(text: &str) -> String {
text
.chars()
.map(|c| if is_banned_segment_char(c) { '_' } else { c })
.collect()
}
fn is_banned_segment_char(c: char) -> bool {
matches!(c, '/' | '\\') || is_banned_path_char(c)
}
let mut result = String::new();
if let Some(domain) = root.domain() {
result.push_str(&sanitize_segment(domain));
}
if let Some(port) = root.port() {
if !result.is_empty() {
result.push('_');
}
result.push_str(&port.to_string());
}
let mut result = PathBuf::from(result);
if let Some(segments) = root.path_segments() {
for segment in segments.filter(|s| !s.is_empty()) {
result = result.join(sanitize_segment(segment));
}
}
result
}
/// Slightly different behaviour than the default matching
/// where an exact path needs to be matched to be opted-in
/// rather than just a partial directory match.

View file

@ -29,12 +29,14 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::ops::process::NpmProcessStateProviderRc;
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::web_worker::WebWorker;
use deno_runtime::web_worker::WebWorkerOptions;
use deno_runtime::web_worker::WebWorkerServiceOptions;
use deno_runtime::worker::MainWorker;
use deno_runtime::worker::WorkerOptions;
use deno_runtime::worker::WorkerServiceOptions;
use deno_runtime::BootstrapOptions;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
@ -62,13 +64,12 @@ pub trait ModuleLoaderFactory: Send + Sync {
fn create_for_main(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter;
fn create_for_worker(
&self,
root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter;
}
@ -134,8 +135,8 @@ struct SharedWorkerState {
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
permission_desc_parser: Arc<RuntimePermissionDescriptorParser>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
root_permissions: PermissionsContainer,
shared_array_buffer_store: SharedArrayBufferStore,
storage_key_resolver: StorageKeyResolver,
options: CliMainWorkerOptions,
@ -147,13 +148,13 @@ impl SharedWorkerState {
NodeExtInitServices {
node_require_resolver: self.npm_resolver.clone().into_require_resolver(),
node_resolver: self.node_resolver.clone(),
npm_process_state_provider: self
.npm_resolver
.clone()
.into_process_state_provider(),
npm_resolver: self.npm_resolver.clone().into_npm_resolver(),
}
}
pub fn npm_process_state_provider(&self) -> NpmProcessStateProviderRc {
self.npm_resolver.clone().into_process_state_provider()
}
}
pub struct CliMainWorker {
@ -430,8 +431,8 @@ impl CliMainWorkerFactory {
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
permission_parser: Arc<RuntimePermissionDescriptorParser>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
root_permissions: PermissionsContainer,
storage_key_resolver: StorageKeyResolver,
subcommand: DenoSubcommand,
options: CliMainWorkerOptions,
@ -450,8 +451,8 @@ impl CliMainWorkerFactory {
module_loader_factory,
node_resolver,
npm_resolver,
permission_desc_parser: permission_parser,
root_cert_store_provider,
root_permissions,
shared_array_buffer_store: Default::default(),
storage_key_resolver,
options,
@ -464,13 +465,12 @@ impl CliMainWorkerFactory {
&self,
mode: WorkerExecutionMode,
main_module: ModuleSpecifier,
permissions: PermissionsContainer,
) -> Result<CliMainWorker, AnyError> {
self
.create_custom_worker(
mode,
main_module,
permissions,
self.shared.root_permissions.clone(),
vec![],
Default::default(),
)
@ -526,16 +526,13 @@ impl CliMainWorkerFactory {
let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_));
(node_resolution.into_url(), is_main_cjs)
} else {
(main_module, false)
let is_cjs = main_module.path().ends_with(".cjs");
(main_module, is_cjs)
};
let ModuleLoaderAndSourceMapGetter { module_loader } =
shared.module_loader_factory.create_for_main(
PermissionsContainer::allow_all(
self.shared.permission_desc_parser.clone(),
),
permissions.clone(),
);
let ModuleLoaderAndSourceMapGetter { module_loader } = shared
.module_loader_factory
.create_for_main(permissions.clone());
let maybe_inspector_server = shared.maybe_inspector_server.clone();
let create_web_worker_cb =
@ -571,6 +568,22 @@ impl CliMainWorkerFactory {
}
}
let services = WorkerServiceOptions {
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
module_loader,
fs: shared.fs.clone(),
node_services: Some(shared.create_node_init_services()),
npm_process_state_provider: Some(shared.npm_process_state_provider()),
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
feature_checker,
permissions,
v8_code_cache: shared.code_cache.clone(),
};
let options = WorkerOptions {
bootstrap: BootstrapOptions {
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
@ -605,7 +618,6 @@ impl CliMainWorkerFactory {
.options
.unsafely_ignore_certificate_errors
.clone(),
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
seed: shared.options.seed,
format_js_error_fn: Some(Arc::new(format_js_error)),
create_web_worker_cb,
@ -613,28 +625,16 @@ impl CliMainWorkerFactory {
should_break_on_first_statement: shared.options.inspect_brk,
should_wait_for_inspector_session: shared.options.inspect_wait,
strace_ops: shared.options.strace_ops.clone(),
module_loader,
fs: shared.fs.clone(),
node_services: Some(shared.create_node_init_services()),
get_error_class_fn: Some(&errors::get_error_class_name),
cache_storage_dir,
origin_storage_dir,
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
stdio,
feature_checker,
permission_desc_parser: shared.permission_desc_parser.clone(),
skip_op_registration: shared.options.skip_op_registration,
v8_code_cache: shared.code_cache.clone(),
};
let mut worker = MainWorker::bootstrap_from_options(
main_module.clone(),
permissions,
services,
options,
);
@ -766,7 +766,26 @@ fn create_web_worker_callback(
}
}
let services = WebWorkerServiceOptions {
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
module_loader,
fs: shared.fs.clone(),
node_services: Some(shared.create_node_init_services()),
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
maybe_inspector_server,
feature_checker,
npm_process_state_provider: Some(shared.npm_process_state_provider()),
permissions: args.permissions,
};
let options = WebWorkerOptions {
name: args.name,
main_module: args.main_module.clone(),
worker_id: args.worker_id,
bootstrap: BootstrapOptions {
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
args: shared.options.argv.clone(),
@ -777,7 +796,7 @@ fn create_web_worker_callback(
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
enable_testing_features: shared.options.enable_testing_features,
locale: deno_core::v8::icu::get_language_tag(),
location: Some(args.main_module.clone()),
location: Some(args.main_module),
no_color: !colors::use_color(),
color_level: colors::get_color_level(),
is_stdout_tty: deno_terminal::is_stdout_tty(),
@ -799,38 +818,19 @@ fn create_web_worker_callback(
.options
.unsafely_ignore_certificate_errors
.clone(),
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
seed: shared.options.seed,
create_web_worker_cb,
format_js_error_fn: Some(Arc::new(format_js_error)),
module_loader,
fs: shared.fs.clone(),
node_services: Some(shared.create_node_init_services()),
worker_type: args.worker_type,
maybe_inspector_server,
get_error_class_fn: Some(&errors::get_error_class_name),
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
stdio: stdio.clone(),
cache_storage_dir,
feature_checker,
permission_desc_parser: shared.permission_desc_parser.clone(),
strace_ops: shared.options.strace_ops.clone(),
close_on_idle: args.close_on_idle,
maybe_worker_metadata: args.maybe_worker_metadata,
};
WebWorker::bootstrap_from_options(
args.name,
args.permissions,
args.main_module,
args.worker_id,
options,
)
WebWorker::bootstrap_from_options(services, options)
})
}
@ -840,23 +840,43 @@ fn create_web_worker_callback(
mod tests {
use super::*;
use deno_core::resolve_path;
use deno_core::FsModuleLoader;
use deno_fs::RealFs;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
fn create_test_worker() -> MainWorker {
let main_module =
resolve_path("./hello.js", &std::env::current_dir().unwrap()).unwrap();
let permissions = PermissionsContainer::new(
Arc::new(RuntimePermissionDescriptorParser::new(Arc::new(RealFs))),
Permissions::none_without_prompt(),
);
let fs = Arc::new(RealFs);
let permission_desc_parser =
Arc::new(RuntimePermissionDescriptorParser::new(fs.clone()));
let options = WorkerOptions {
startup_snapshot: crate::js::deno_isolate_init(),
..Default::default()
};
MainWorker::bootstrap_from_options(main_module, permissions, options)
MainWorker::bootstrap_from_options(
main_module,
WorkerServiceOptions {
module_loader: Rc::new(FsModuleLoader),
permissions: PermissionsContainer::new(
permission_desc_parser,
Permissions::none_without_prompt(),
),
blob_store: Default::default(),
broadcast_channel: Default::default(),
feature_checker: Default::default(),
node_services: Default::default(),
npm_process_state_provider: Default::default(),
root_cert_store_provider: Default::default(),
shared_array_buffer_store: Default::default(),
compiled_wasm_module_store: Default::default(),
v8_code_cache: Default::default(),
fs,
},
options,
)
}
#[tokio::test]

View file

@ -9,7 +9,7 @@
* @category Messaging
* @experimental
*/
declare interface BroadcastChannelEventMap {
interface BroadcastChannelEventMap {
"message": MessageEvent;
"messageerror": MessageEvent;
}
@ -18,7 +18,7 @@ declare interface BroadcastChannelEventMap {
* @category Messaging
* @experimental
*/
declare interface BroadcastChannel extends EventTarget {
interface BroadcastChannel extends EventTarget {
/**
* Returns the channel name (as passed to the constructor).
*/

View file

@ -9,7 +9,7 @@
declare var caches: CacheStorage;
/** @category Cache */
declare interface CacheStorage {
interface CacheStorage {
/** Open a cache storage for the provided name. */
open(cacheName: string): Promise<Cache>;
/** Check if cache already exists for the provided name. */
@ -19,7 +19,7 @@ declare interface CacheStorage {
}
/** @category Cache */
declare interface Cache {
interface Cache {
/**
* Put the provided request/response into the cache.
*
@ -65,7 +65,7 @@ declare var CacheStorage: {
};
/** @category Cache */
declare interface CacheQueryOptions {
interface CacheQueryOptions {
ignoreMethod?: boolean;
ignoreSearch?: boolean;
ignoreVary?: boolean;

View file

@ -12,14 +12,14 @@
*
* @category Canvas
*/
declare type ColorSpaceConversion = "default" | "none";
type ColorSpaceConversion = "default" | "none";
/**
* Specifies how the bitmap image should be oriented.
*
* @category Canvas
*/
declare type ImageOrientation = "flipY" | "from-image" | "none";
type ImageOrientation = "flipY" | "from-image" | "none";
/**
* Specifies whether the bitmap's color channels should be premultiplied by
@ -27,7 +27,7 @@ declare type ImageOrientation = "flipY" | "from-image" | "none";
*
* @category Canvas
*/
declare type PremultiplyAlpha = "default" | "none" | "premultiply";
type PremultiplyAlpha = "default" | "none" | "premultiply";
/**
* Specifies the algorithm to be used for resizing the input to match the
@ -35,20 +35,20 @@ declare type PremultiplyAlpha = "default" | "none" | "premultiply";
*
* @category Canvas
*/
declare type ResizeQuality = "high" | "low" | "medium" | "pixelated";
type ResizeQuality = "high" | "low" | "medium" | "pixelated";
/**
* The `ImageBitmapSource` type represents an image data source that can be
* used to create an `ImageBitmap`.
*
* @category Canvas */
declare type ImageBitmapSource = Blob | ImageData;
type ImageBitmapSource = Blob | ImageData;
/**
* The options of {@linkcode createImageBitmap}.
*
* @category Canvas */
declare interface ImageBitmapOptions {
interface ImageBitmapOptions {
/**
* Specifies whether the image should be decoded using color space
* conversion. Either none or default (default). The value default
@ -116,7 +116,7 @@ declare function createImageBitmap(
*
* @category Canvas
*/
declare interface ImageBitmap {
interface ImageBitmap {
/**
* The height of the bitmap.
*/

View file

@ -33,6 +33,7 @@ import {
op_get_non_index_property_names,
op_preview_entries,
} from "ext:core/ops";
import * as ops from "ext:core/ops";
const {
Array,
ArrayBufferPrototypeGetByteLength,
@ -83,6 +84,7 @@ const {
NumberIsInteger,
NumberIsNaN,
NumberParseInt,
NumberPrototypeToFixed,
NumberPrototypeToString,
NumberPrototypeValueOf,
ObjectAssign,
@ -151,11 +153,23 @@ const {
SymbolPrototypeToString,
SymbolPrototypeValueOf,
SymbolToStringTag,
TypedArrayPrototypeGetBuffer,
TypedArrayPrototypeGetByteLength,
TypedArrayPrototypeGetLength,
Uint8Array,
Uint32Array,
} = primordials;
let currentTime = DateNow;
if (ops.op_now) {
const hrU8 = new Uint8Array(8);
const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8));
currentTime = function opNow() {
ops.op_now(hrU8);
return (hr[0] * 1000 + hr[1] / 1e6);
};
}
let noColorStdout = () => false;
let noColorStderr = () => false;
@ -3331,7 +3345,7 @@ class Console {
return;
}
MapPrototypeSet(timerMap, label, DateNow());
MapPrototypeSet(timerMap, label, currentTime());
};
timeLog = (label = "default", ...args) => {
@ -3343,7 +3357,16 @@ class Console {
}
const startTime = MapPrototypeGet(timerMap, label);
const duration = DateNow() - startTime;
let duration = currentTime() - startTime;
if (duration < 1) {
duration = NumberPrototypeToFixed(duration, 3);
} else if (duration < 10) {
duration = NumberPrototypeToFixed(duration, 2);
} else if (duration < 100) {
duration = NumberPrototypeToFixed(duration, 1);
} else {
duration = NumberPrototypeToFixed(duration, 0);
}
this.info(`${label}: ${duration}ms`, ...new SafeArrayIterator(args));
};
@ -3358,7 +3381,16 @@ class Console {
const startTime = MapPrototypeGet(timerMap, label);
MapPrototypeDelete(timerMap, label);
const duration = DateNow() - startTime;
let duration = currentTime() - startTime;
if (duration < 1) {
duration = NumberPrototypeToFixed(duration, 3);
} else if (duration < 10) {
duration = NumberPrototypeToFixed(duration, 2);
} else if (duration < 100) {
duration = NumberPrototypeToFixed(duration, 1);
} else {
duration = NumberPrototypeToFixed(duration, 0);
}
this.info(`${label}: ${duration}ms`);
};

View file

@ -6,7 +6,7 @@
/// <reference lib="esnext" />
/** @category I/O */
declare interface Console {
interface Console {
assert(condition?: boolean, ...data: any[]): void;
clear(): void;
count(label?: string): void;

View file

@ -939,7 +939,6 @@ class SubtleCrypto {
* @param {KeyUsages[]} keyUsages
* @returns {Promise<any>}
*/
// deno-lint-ignore require-await
async importKey(format, keyData, algorithm, extractable, keyUsages) {
webidl.assertBranded(this, SubtleCryptoPrototype);
const prefix = "Failed to execute 'importKey' on 'SubtleCrypto'";
@ -977,86 +976,24 @@ class SubtleCrypto {
const normalizedAlgorithm = normalizeAlgorithm(algorithm, "importKey");
const algorithmName = normalizedAlgorithm.name;
// 8.
const result = await importKeyInner(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
switch (algorithmName) {
case "HMAC": {
return importKeyHMAC(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "ECDH":
case "ECDSA": {
return importKeyEC(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "RSASSA-PKCS1-v1_5":
case "RSA-PSS":
case "RSA-OAEP": {
return importKeyRSA(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "HKDF": {
return importKeyHKDF(format, keyData, extractable, keyUsages);
}
case "PBKDF2": {
return importKeyPBKDF2(format, keyData, extractable, keyUsages);
}
case "AES-CTR":
case "AES-CBC":
case "AES-GCM": {
return importKeyAES(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
["encrypt", "decrypt", "wrapKey", "unwrapKey"],
);
}
case "AES-KW": {
return importKeyAES(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
["wrapKey", "unwrapKey"],
);
}
case "X25519": {
return importKeyX25519(
format,
keyData,
extractable,
keyUsages,
);
}
case "Ed25519": {
return importKeyEd25519(
format,
keyData,
extractable,
keyUsages,
);
}
default:
throw new DOMException("Not implemented", "NotSupportedError");
// 9.
if (
ArrayPrototypeIncludes(["private", "secret"], result[_type]) &&
keyUsages.length == 0
) {
throw new SyntaxError("Invalid key usage");
}
return result;
}
/**
@ -3352,6 +3289,96 @@ function importKeyEC(
}
}
// deno-lint-ignore require-await
async function importKeyInner(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
) {
const algorithmName = normalizedAlgorithm.name;
switch (algorithmName) {
case "HMAC": {
return importKeyHMAC(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "ECDH":
case "ECDSA": {
return importKeyEC(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "RSASSA-PKCS1-v1_5":
case "RSA-PSS":
case "RSA-OAEP": {
return importKeyRSA(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
);
}
case "HKDF": {
return importKeyHKDF(format, keyData, extractable, keyUsages);
}
case "PBKDF2": {
return importKeyPBKDF2(format, keyData, extractable, keyUsages);
}
case "AES-CTR":
case "AES-CBC":
case "AES-GCM": {
return importKeyAES(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
["encrypt", "decrypt", "wrapKey", "unwrapKey"],
);
}
case "AES-KW": {
return importKeyAES(
format,
normalizedAlgorithm,
keyData,
extractable,
keyUsages,
["wrapKey", "unwrapKey"],
);
}
case "X25519": {
return importKeyX25519(
format,
keyData,
extractable,
keyUsages,
);
}
case "Ed25519": {
return importKeyEd25519(
format,
keyData,
extractable,
keyUsages,
);
}
default:
throw new DOMException("Not implemented", "NotSupportedError");
}
}
const SUPPORTED_KEY_USAGES = {
"RSASSA-PKCS1-v1_5": {
public: ["verify"],

View file

@ -33,6 +33,7 @@ p521 = "0.13.3"
rand.workspace = true
ring = { workspace = true, features = ["std"] }
rsa.workspace = true
sec1.workspace = true
serde.workspace = true
serde_bytes.workspace = true
sha1.workspace = true

View file

@ -254,7 +254,9 @@ fn export_key_ec(
point.as_ref().to_vec()
}
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
let point = key_data.as_ec_public_key_p521()?;
point.as_ref().to_vec()
}
};
Ok(ExportKeyResult::Raw(subject_public_key.into()))
@ -272,7 +274,9 @@ fn export_key_ec(
point.as_ref().to_vec()
}
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
let point = key_data.as_ec_public_key_p521()?;
point.as_ref().to_vec()
}
};
@ -285,9 +289,10 @@ fn export_key_ec(
oid: elliptic_curve::ALGORITHM_OID,
parameters: Some((&p384::NistP384::OID).into()),
},
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
}
EcNamedCurve::P521 => AlgorithmIdentifierOwned {
oid: elliptic_curve::ALGORITHM_OID,
parameters: Some((&p521::NistP521::OID).into()),
},
};
let alg_id = match algorithm {
@ -351,7 +356,24 @@ fn export_key_ec(
))
}
}
EcNamedCurve::P521 => Err(data_error("Unsupported named curve")),
EcNamedCurve::P521 => {
let point = key_data.as_ec_public_key_p521()?;
let coords = point.coordinates();
if let p521::elliptic_curve::sec1::Coordinates::Uncompressed { x, y } =
coords
{
Ok(ExportKeyResult::JwkPublicEc {
x: bytes_to_b64(x),
y: bytes_to_b64(y),
})
} else {
Err(custom_error(
"DOMExceptionOperationError",
"failed to decode public key",
))
}
}
},
ExportKeyFormat::JwkPrivate => {
let private_key = key_data.as_ec_private_key()?;

View file

@ -7,14 +7,12 @@ use deno_core::JsBuffer;
use deno_core::ToJsBuffer;
use elliptic_curve::pkcs8::PrivateKeyInfo;
use p256::pkcs8::EncodePrivateKey;
use ring::signature::EcdsaKeyPair;
use rsa::pkcs1::UintRef;
use rsa::pkcs8::der::Encode;
use serde::Deserialize;
use serde::Serialize;
use spki::der::Decode;
use crate::key::CryptoNamedCurve;
use crate::shared::*;
#[derive(Deserialize)]
@ -45,7 +43,9 @@ pub enum KeyData {
y: String,
},
JwkPrivateEc {
#[allow(dead_code)]
x: String,
#[allow(dead_code)]
y: String,
d: String,
},
@ -543,9 +543,7 @@ fn import_key_ec_jwk(
raw_data: RustRawKeyData::Public(point_bytes.into()),
})
}
KeyData::JwkPrivateEc { d, x, y } => {
jwt_b64_int_or_err!(private_d, &d, "invalid JWK private key");
let point_bytes = import_key_ec_jwk_to_point(x, y, named_curve)?;
KeyData::JwkPrivateEc { d, .. } => {
let pkcs8_der = match named_curve {
EcNamedCurve::P256 => {
let d = decode_b64url_to_field_bytes::<p256::NistP256>(&d)?;
@ -562,27 +560,14 @@ fn import_key_ec_jwk(
.map_err(|_| data_error("invalid JWK private key"))?
}
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
let d = decode_b64url_to_field_bytes::<p521::NistP521>(&d)?;
let pk = p521::SecretKey::from_bytes(&d)?;
pk.to_pkcs8_der()
.map_err(|_| data_error("invalid JWK private key"))?
}
};
// Import using ring, to validate key
let key_alg = match named_curve {
EcNamedCurve::P256 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P384 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
}
};
let rng = ring::rand::SystemRandom::new();
let _key_pair = EcdsaKeyPair::from_private_key_and_public_key(
key_alg,
private_d.as_bytes(),
point_bytes.as_ref(),
&rng,
);
Ok(ImportKeyResult::Ec {
raw_data: RustRawKeyData::Private(pkcs8_der.as_bytes().to_vec().into()),
})
@ -649,24 +634,15 @@ fn import_key_ec(
})
}
KeyData::Pkcs8(data) => {
// 2-7
// Deserialize PKCS8 - validate structure, extracts named_curve
let named_curve_alg = match named_curve {
EcNamedCurve::P256 | EcNamedCurve::P384 => {
let pk = PrivateKeyInfo::from_der(data.as_ref())
.map_err(|_| data_error("expected valid PKCS#8 data"))?;
pk.algorithm
.parameters
.ok_or_else(|| data_error("malformed parameters"))?
.try_into()
.unwrap()
}
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
}
};
let pk = PrivateKeyInfo::from_der(data.as_ref())
.map_err(|_| data_error("expected valid PKCS#8 data"))?;
let named_curve_alg = pk
.algorithm
.parameters
.ok_or_else(|| data_error("malformed parameters"))?
.try_into()
.unwrap();
// 8-9.
let pk_named_curve = match named_curve_alg {
// id-secp256r1
ID_SECP256R1_OID => Some(EcNamedCurve::P256),
@ -677,27 +653,8 @@ fn import_key_ec(
_ => None,
};
// 10.
if let Some(pk_named_curve) = pk_named_curve {
let signing_alg = match pk_named_curve {
EcNamedCurve::P256 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P384 => CryptoNamedCurve::P384.into(),
EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve"))
}
};
let rng = ring::rand::SystemRandom::new();
// deserialize pkcs8 using ring crate, to VALIDATE public key
let _private_key = EcdsaKeyPair::from_pkcs8(signing_alg, &data, &rng)
.map_err(|_| data_error("invalid key"))?;
// 11.
if named_curve != pk_named_curve {
return Err(data_error("curve mismatch"));
}
} else {
return Err(data_error("Unsupported named curve"));
if pk_named_curve != Some(named_curve) {
return Err(data_error("curve mismatch"));
}
Ok(ImportKeyResult::Ec {

View file

@ -9,23 +9,23 @@
declare var crypto: Crypto;
/** @category Crypto */
declare interface Algorithm {
interface Algorithm {
name: string;
}
/** @category Crypto */
declare interface KeyAlgorithm {
interface KeyAlgorithm {
name: string;
}
/** @category Crypto */
declare type AlgorithmIdentifier = string | Algorithm;
type AlgorithmIdentifier = string | Algorithm;
/** @category Crypto */
declare type HashAlgorithmIdentifier = AlgorithmIdentifier;
type HashAlgorithmIdentifier = AlgorithmIdentifier;
/** @category Crypto */
declare type KeyType = "private" | "public" | "secret";
type KeyType = "private" | "public" | "secret";
/** @category Crypto */
declare type KeyUsage =
type KeyUsage =
| "decrypt"
| "deriveBits"
| "deriveKey"
@ -35,19 +35,19 @@ declare type KeyUsage =
| "verify"
| "wrapKey";
/** @category Crypto */
declare type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki";
type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki";
/** @category Crypto */
declare type NamedCurve = string;
type NamedCurve = string;
/** @category Crypto */
declare interface RsaOtherPrimesInfo {
interface RsaOtherPrimesInfo {
d?: string;
r?: string;
t?: string;
}
/** @category Crypto */
declare interface JsonWebKey {
interface JsonWebKey {
alg?: string;
crv?: string;
d?: string;
@ -56,6 +56,7 @@ declare interface JsonWebKey {
e?: string;
ext?: boolean;
k?: string;
// deno-lint-ignore camelcase
key_ops?: string[];
kty?: string;
n?: string;
@ -69,129 +70,129 @@ declare interface JsonWebKey {
}
/** @category Crypto */
declare interface AesCbcParams extends Algorithm {
interface AesCbcParams extends Algorithm {
iv: BufferSource;
}
/** @category Crypto */
declare interface AesGcmParams extends Algorithm {
interface AesGcmParams extends Algorithm {
iv: BufferSource;
additionalData?: BufferSource;
tagLength?: number;
}
/** @category Crypto */
declare interface AesCtrParams extends Algorithm {
interface AesCtrParams extends Algorithm {
counter: BufferSource;
length: number;
}
/** @category Crypto */
declare interface HmacKeyGenParams extends Algorithm {
interface HmacKeyGenParams extends Algorithm {
hash: HashAlgorithmIdentifier;
length?: number;
}
/** @category Crypto */
declare interface EcKeyGenParams extends Algorithm {
interface EcKeyGenParams extends Algorithm {
namedCurve: NamedCurve;
}
/** @category Crypto */
declare interface EcKeyImportParams extends Algorithm {
interface EcKeyImportParams extends Algorithm {
namedCurve: NamedCurve;
}
/** @category Crypto */
declare interface EcdsaParams extends Algorithm {
interface EcdsaParams extends Algorithm {
hash: HashAlgorithmIdentifier;
}
/** @category Crypto */
declare interface RsaHashedImportParams extends Algorithm {
interface RsaHashedImportParams extends Algorithm {
hash: HashAlgorithmIdentifier;
}
/** @category Crypto */
declare interface RsaHashedKeyGenParams extends RsaKeyGenParams {
interface RsaHashedKeyGenParams extends RsaKeyGenParams {
hash: HashAlgorithmIdentifier;
}
/** @category Crypto */
declare interface RsaKeyGenParams extends Algorithm {
interface RsaKeyGenParams extends Algorithm {
modulusLength: number;
publicExponent: Uint8Array;
}
/** @category Crypto */
declare interface RsaPssParams extends Algorithm {
interface RsaPssParams extends Algorithm {
saltLength: number;
}
/** @category Crypto */
declare interface RsaOaepParams extends Algorithm {
interface RsaOaepParams extends Algorithm {
label?: Uint8Array;
}
/** @category Crypto */
declare interface HmacImportParams extends Algorithm {
interface HmacImportParams extends Algorithm {
hash: HashAlgorithmIdentifier;
length?: number;
}
/** @category Crypto */
declare interface EcKeyAlgorithm extends KeyAlgorithm {
interface EcKeyAlgorithm extends KeyAlgorithm {
namedCurve: NamedCurve;
}
/** @category Crypto */
declare interface HmacKeyAlgorithm extends KeyAlgorithm {
interface HmacKeyAlgorithm extends KeyAlgorithm {
hash: KeyAlgorithm;
length: number;
}
/** @category Crypto */
declare interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm {
interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm {
hash: KeyAlgorithm;
}
/** @category Crypto */
declare interface RsaKeyAlgorithm extends KeyAlgorithm {
interface RsaKeyAlgorithm extends KeyAlgorithm {
modulusLength: number;
publicExponent: Uint8Array;
}
/** @category Crypto */
declare interface HkdfParams extends Algorithm {
interface HkdfParams extends Algorithm {
hash: HashAlgorithmIdentifier;
info: BufferSource;
salt: BufferSource;
}
/** @category Crypto */
declare interface Pbkdf2Params extends Algorithm {
interface Pbkdf2Params extends Algorithm {
hash: HashAlgorithmIdentifier;
iterations: number;
salt: BufferSource;
}
/** @category Crypto */
declare interface AesDerivedKeyParams extends Algorithm {
interface AesDerivedKeyParams extends Algorithm {
length: number;
}
/** @category Crypto */
declare interface EcdhKeyDeriveParams extends Algorithm {
interface EcdhKeyDeriveParams extends Algorithm {
public: CryptoKey;
}
/** @category Crypto */
declare interface AesKeyGenParams extends Algorithm {
interface AesKeyGenParams extends Algorithm {
length: number;
}
/** @category Crypto */
declare interface AesKeyAlgorithm extends KeyAlgorithm {
interface AesKeyAlgorithm extends KeyAlgorithm {
length: number;
}
@ -200,7 +201,7 @@ declare interface AesKeyAlgorithm extends KeyAlgorithm {
*
* @category Crypto
*/
declare interface CryptoKey {
interface CryptoKey {
readonly algorithm: KeyAlgorithm;
readonly extractable: boolean;
readonly type: KeyType;
@ -218,7 +219,7 @@ declare var CryptoKey: {
*
* @category Crypto
*/
declare interface CryptoKeyPair {
interface CryptoKeyPair {
privateKey: CryptoKey;
publicKey: CryptoKey;
}
@ -235,7 +236,7 @@ declare var CryptoKeyPair: {
*
* @category Crypto
*/
declare interface SubtleCrypto {
interface SubtleCrypto {
generateKey(
algorithm: RsaHashedKeyGenParams | EcKeyGenParams,
extractable: boolean,
@ -374,7 +375,7 @@ declare var SubtleCrypto: {
};
/** @category Crypto */
declare interface Crypto {
interface Crypto {
readonly subtle: SubtleCrypto;
getRandomValues<
T extends

View file

@ -98,11 +98,9 @@ impl V8RawKeyData {
pub fn as_ec_public_key_p256(&self) -> Result<p256::EncodedPoint, AnyError> {
match self {
V8RawKeyData::Public(data) => {
// public_key is a serialized EncodedPoint
p256::EncodedPoint::from_bytes(data)
.map_err(|_| type_error("expected valid public EC key"))
}
V8RawKeyData::Public(data) => p256::PublicKey::from_sec1_bytes(data)
.map(|p| p.to_encoded_point(false))
.map_err(|_| type_error("expected valid public EC key")),
V8RawKeyData::Private(data) => {
let signing_key = p256::SecretKey::from_pkcs8_der(data)
.map_err(|_| type_error("expected valid private EC key"))?;
@ -114,14 +112,29 @@ impl V8RawKeyData {
}
pub fn as_ec_public_key_p384(&self) -> Result<p384::EncodedPoint, AnyError> {
match self {
V8RawKeyData::Public(data) => p384::PublicKey::from_sec1_bytes(data)
.map(|p| p.to_encoded_point(false))
.map_err(|_| type_error("expected valid public EC key")),
V8RawKeyData::Private(data) => {
let signing_key = p384::SecretKey::from_pkcs8_der(data)
.map_err(|_| type_error("expected valid private EC key"))?;
Ok(signing_key.public_key().to_encoded_point(false))
}
// Should never reach here.
V8RawKeyData::Secret(_) => unreachable!(),
}
}
pub fn as_ec_public_key_p521(&self) -> Result<p521::EncodedPoint, AnyError> {
match self {
V8RawKeyData::Public(data) => {
// public_key is a serialized EncodedPoint
p384::EncodedPoint::from_bytes(data)
p521::EncodedPoint::from_bytes(data)
.map_err(|_| type_error("expected valid public EC key"))
}
V8RawKeyData::Private(data) => {
let signing_key = p384::SecretKey::from_pkcs8_der(data)
let signing_key = p521::SecretKey::from_pkcs8_der(data)
.map_err(|_| type_error("expected valid private EC key"))?;
Ok(signing_key.public_key().to_encoded_point(false))
}

View file

@ -122,7 +122,7 @@ function newInnerRequest(method, url, headerList, body, maybeBlob) {
try {
this.headerListInner = headerList();
} catch {
throw new TypeError("cannot read headers: request closed");
throw new TypeError("Cannot read headers: request closed");
}
}
return this.headerListInner;
@ -153,7 +153,7 @@ function newInnerRequest(method, url, headerList, body, maybeBlob) {
try {
this.urlListProcessed[currentIndex] = this.urlList[currentIndex]();
} catch {
throw new TypeError("cannot read url: request closed");
throw new TypeError("Cannot read url: request closed");
}
}
return this.urlListProcessed[currentIndex];
@ -193,7 +193,7 @@ function cloneInnerRequest(request, skipBody = false) {
try {
this.urlListProcessed[0] = this.urlList[0]();
} catch {
throw new TypeError("cannot read url: request closed");
throw new TypeError("Cannot read url: request closed");
}
}
return this.urlListProcessed[0];
@ -204,7 +204,7 @@ function cloneInnerRequest(request, skipBody = false) {
try {
this.urlListProcessed[currentIndex] = this.urlList[currentIndex]();
} catch {
throw new TypeError("cannot read url: request closed");
throw new TypeError("Cannot read url: request closed");
}
}
return this.urlListProcessed[currentIndex];
@ -236,13 +236,13 @@ const KNOWN_METHODS = {
*/
function validateAndNormalizeMethod(m) {
if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) {
throw new TypeError("Method is not valid.");
throw new TypeError("Method is not valid");
}
const upperCase = byteUpperCase(m);
if (
upperCase === "CONNECT" || upperCase === "TRACE" || upperCase === "TRACK"
) {
throw new TypeError("Method is forbidden.");
throw new TypeError("Method is forbidden");
}
return upperCase;
}
@ -418,7 +418,7 @@ class Request {
((init.body !== undefined && init.body !== null) ||
inputBody !== null)
) {
throw new TypeError("Request with GET/HEAD method cannot have body.");
throw new TypeError("Request with GET/HEAD method cannot have body");
}
// 36.
@ -442,7 +442,7 @@ class Request {
// 41.
if (initBody === null && inputBody !== null) {
if (input[_body] && input[_body].unusable()) {
throw new TypeError("Input request's body is unusable.");
throw new TypeError("Input request's body is unusable");
}
finalBody = inputBody.createProxy();
}
@ -489,7 +489,7 @@ class Request {
const prefix = "Failed to execute 'Request.clone'";
webidl.assertBranded(this, RequestPrototype);
if (this[_body] && this[_body].unusable()) {
throw new TypeError("Body is unusable.");
throw new TypeError("Body is unusable");
}
const clonedReq = cloneInnerRequest(this[_request]);

View file

@ -6,7 +6,7 @@
/// <reference lib="esnext" />
/** @category Platform */
declare interface DomIterable<K, V> {
interface DomIterable<K, V> {
keys(): IterableIterator<K>;
values(): IterableIterator<V>;
entries(): IterableIterator<[K, V]>;
@ -18,7 +18,7 @@ declare interface DomIterable<K, V> {
}
/** @category Fetch */
declare type FormDataEntryValue = File | string;
type FormDataEntryValue = File | string;
/** Provides a way to easily construct a set of key/value pairs representing
* form fields and their values, which can then be easily sent using the
@ -27,7 +27,7 @@ declare type FormDataEntryValue = File | string;
*
* @category Fetch
*/
declare interface FormData extends DomIterable<string, FormDataEntryValue> {
interface FormData extends DomIterable<string, FormDataEntryValue> {
append(name: string, value: string | Blob, fileName?: string): void;
delete(name: string): void;
get(name: string): FormDataEntryValue | null;
@ -43,7 +43,7 @@ declare var FormData: {
};
/** @category Fetch */
declare interface Body {
interface Body {
/** A simple getter used to expose a `ReadableStream` of the body contents. */
readonly body: ReadableStream<Uint8Array> | null;
/** Stores a `Boolean` that declares whether the body has been used in a
@ -77,7 +77,7 @@ declare interface Body {
}
/** @category Fetch */
declare type HeadersInit = Iterable<string[]> | Record<string, string>;
type HeadersInit = Iterable<string[]> | Record<string, string>;
/** This Fetch API interface allows you to perform various actions on HTTP
* request and response headers. These actions include retrieving, setting,
@ -89,7 +89,7 @@ declare type HeadersInit = Iterable<string[]> | Record<string, string>;
*
* @category Fetch
*/
declare interface Headers extends DomIterable<string, string> {
interface Headers extends DomIterable<string, string> {
/** Appends a new value onto an existing header inside a `Headers` object, or
* adds the header if it does not already exist.
*/
@ -130,9 +130,9 @@ declare var Headers: {
};
/** @category Fetch */
declare type RequestInfo = Request | string;
type RequestInfo = Request | string;
/** @category Fetch */
declare type RequestCache =
type RequestCache =
| "default"
| "force-cache"
| "no-cache"
@ -140,13 +140,13 @@ declare type RequestCache =
| "only-if-cached"
| "reload";
/** @category Fetch */
declare type RequestCredentials = "include" | "omit" | "same-origin";
type RequestCredentials = "include" | "omit" | "same-origin";
/** @category Fetch */
declare type RequestMode = "cors" | "navigate" | "no-cors" | "same-origin";
type RequestMode = "cors" | "navigate" | "no-cors" | "same-origin";
/** @category Fetch */
declare type RequestRedirect = "error" | "follow" | "manual";
type RequestRedirect = "error" | "follow" | "manual";
/** @category Fetch */
declare type ReferrerPolicy =
type ReferrerPolicy =
| ""
| "no-referrer"
| "no-referrer-when-downgrade"
@ -157,7 +157,7 @@ declare type ReferrerPolicy =
| "strict-origin-when-cross-origin"
| "unsafe-url";
/** @category Fetch */
declare type BodyInit =
type BodyInit =
| Blob
| BufferSource
| FormData
@ -165,7 +165,7 @@ declare type BodyInit =
| ReadableStream<Uint8Array>
| string;
/** @category Fetch */
declare type RequestDestination =
type RequestDestination =
| ""
| "audio"
| "audioworklet"
@ -186,7 +186,7 @@ declare type RequestDestination =
| "xslt";
/** @category Fetch */
declare interface RequestInit {
interface RequestInit {
/**
* A BodyInit object or null to set request's body.
*/
@ -254,7 +254,7 @@ declare interface RequestInit {
*
* @category Fetch
*/
declare interface Request extends Body {
interface Request extends Body {
/**
* Returns the cache mode associated with request, which is a string
* indicating how the request will interact with the browser's cache when
@ -350,14 +350,14 @@ declare var Request: {
};
/** @category Fetch */
declare interface ResponseInit {
interface ResponseInit {
headers?: HeadersInit;
status?: number;
statusText?: string;
}
/** @category Fetch */
declare type ResponseType =
type ResponseType =
| "basic"
| "cors"
| "default"
@ -369,7 +369,7 @@ declare type ResponseType =
*
* @category Fetch
*/
declare interface Response extends Body {
interface Response extends Body {
readonly headers: Headers;
readonly ok: boolean;
readonly redirected: boolean;
@ -413,14 +413,14 @@ declare function fetch(
/**
* @category Fetch
*/
declare interface EventSourceInit {
interface EventSourceInit {
withCredentials?: boolean;
}
/**
* @category Fetch
*/
declare interface EventSourceEventMap {
interface EventSourceEventMap {
"error": Event;
"message": MessageEvent;
"open": Event;
@ -429,7 +429,7 @@ declare interface EventSourceEventMap {
/**
* @category Fetch
*/
declare interface EventSource extends EventTarget {
interface EventSource extends EventTarget {
onerror: ((this: EventSource, ev: Event) => any) | null;
onmessage: ((this: EventSource, ev: MessageEvent) => any) | null;
onopen: ((this: EventSource, ev: Event) => any) | null;

View file

@ -97,7 +97,7 @@ pub(crate) fn from_env() -> Proxies {
if env::var_os("REQUEST_METHOD").is_none() {
if let Some(proxy) = parse_env_var("HTTP_PROXY", Filter::Http) {
intercepts.push(proxy);
} else if let Some(proxy) = parse_env_var("http_proxy", Filter::Https) {
} else if let Some(proxy) = parse_env_var("http_proxy", Filter::Http) {
intercepts.push(proxy);
}
}

View file

@ -21,6 +21,7 @@ async-trait.workspace = true
base32.workspace = true
deno_core.workspace = true
deno_io.workspace = true
deno_path_util.workspace = true
deno_permissions.workspace = true
filetime.workspace = true
libc.workspace = true

View file

@ -12,12 +12,12 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use deno_core::normalize_path;
use deno_core::parking_lot::Mutex;
use deno_io::fs::File;
use deno_io::fs::FsError;
use deno_io::fs::FsResult;
use deno_io::fs::FsStat;
use deno_path_util::normalize_path;
use crate::interface::AccessCheckCb;
use crate::interface::FsDirEntry;
@ -44,7 +44,7 @@ impl InMemoryFs {
pub fn setup_text_files(&self, files: Vec<(String, String)>) {
for (path, text) in files {
let path = PathBuf::from(path);
self.mkdir_sync(path.parent().unwrap(), true, 0).unwrap();
self.mkdir_sync(path.parent().unwrap(), true, None).unwrap();
self
.write_file_sync(
&path,
@ -101,7 +101,7 @@ impl FileSystem for InMemoryFs {
&self,
path: &Path,
recursive: bool,
_mode: u32,
_mode: Option<u32>,
) -> FsResult<()> {
let path = normalize_path(path);
@ -119,7 +119,7 @@ impl FileSystem for InMemoryFs {
},
None => {
if recursive {
self.mkdir_sync(parent, true, 0)?;
self.mkdir_sync(parent, true, None)?;
} else {
return Err(FsError::Io(Error::new(
ErrorKind::NotFound,
@ -149,7 +149,7 @@ impl FileSystem for InMemoryFs {
&self,
path: PathBuf,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()> {
self.mkdir_sync(&path, recursive, mode)
}

View file

@ -121,13 +121,17 @@ pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync {
access_check: Option<AccessCheckCb<'a>>,
) -> FsResult<Rc<dyn File>>;
fn mkdir_sync(&self, path: &Path, recursive: bool, mode: u32)
-> FsResult<()>;
fn mkdir_sync(
&self,
path: &Path,
recursive: bool,
mode: Option<u32>,
) -> FsResult<()>;
async fn mkdir_async(
&self,
path: PathBuf,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()>;
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()>;

View file

@ -197,7 +197,7 @@ where
.check_write(&path, "Deno.mkdirSync()")?;
let fs = state.borrow::<FileSystemRc>();
fs.mkdir_sync(&path, recursive, mode)
fs.mkdir_sync(&path, recursive, Some(mode))
.context_path("mkdir", &path)?;
Ok(())
@ -221,7 +221,7 @@ where
(state.borrow::<FileSystemRc>().clone(), path)
};
fs.mkdir_async(path.clone(), recursive, mode)
fs.mkdir_async(path.clone(), recursive, Some(mode))
.await
.context_path("mkdir", &path)?;
@ -886,7 +886,7 @@ where
const MAX_TRIES: u32 = 10;
for _ in 0..MAX_TRIES {
let path = tmp_name(&mut rng, &dir, prefix.as_deref(), suffix.as_deref())?;
match fs.mkdir_sync(&path, false, 0o700) {
match fs.mkdir_sync(&path, false, Some(0o700)) {
Ok(_) => {
// PERMISSIONS: ensure the absolute path is not leaked
let path = strip_dir_prefix(&dir, dir_arg.as_deref(), path)?;
@ -928,7 +928,11 @@ where
const MAX_TRIES: u32 = 10;
for _ in 0..MAX_TRIES {
let path = tmp_name(&mut rng, &dir, prefix.as_deref(), suffix.as_deref())?;
match fs.clone().mkdir_async(path.clone(), false, 0o700).await {
match fs
.clone()
.mkdir_async(path.clone(), false, Some(0o700))
.await
{
Ok(_) => {
// PERMISSIONS: ensure the absolute path is not leaked
let path = strip_dir_prefix(&dir, dir_arg.as_deref(), path)?;

View file

@ -11,13 +11,13 @@ use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use deno_core::normalize_path;
use deno_core::unsync::spawn_blocking;
use deno_io::fs::File;
use deno_io::fs::FsError;
use deno_io::fs::FsResult;
use deno_io::fs::FsStat;
use deno_io::StdFileResourceInner;
use deno_path_util::normalize_path;
use crate::interface::AccessCheckCb;
use crate::interface::FsDirEntry;
@ -101,7 +101,7 @@ impl FileSystem for RealFs {
&self,
path: &Path,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()> {
mkdir(path, recursive, mode)
}
@ -109,7 +109,7 @@ impl FileSystem for RealFs {
&self,
path: PathBuf,
recursive: bool,
mode: u32,
mode: Option<u32>,
) -> FsResult<()> {
spawn_blocking(move || mkdir(&path, recursive, mode)).await?
}
@ -407,11 +407,11 @@ impl FileSystem for RealFs {
}
}
fn mkdir(path: &Path, recursive: bool, mode: u32) -> FsResult<()> {
fn mkdir(path: &Path, recursive: bool, mode: Option<u32>) -> FsResult<()> {
let mut builder = fs::DirBuilder::new();
builder.recursive(recursive);
#[cfg(unix)]
{
if let Some(mode) = mode {
use std::os::unix::fs::DirBuilderExt;
builder.mode(mode);
}

Some files were not shown because too many files have changed in this diff Show more