0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-01 12:16:11 -05:00

Merge branch 'main' into premature_stdin_closure

This commit is contained in:
Bartek Iwańczuk 2024-10-26 23:56:49 +01:00 committed by GitHub
commit 957f63922c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3315 changed files with 74750 additions and 45660 deletions

View file

@ -1,9 +1,8 @@
FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye
# Install cmake and protobuf-compiler # Install cmake
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y cmake \ && apt-get install -y cmake \
&& apt-get install -y protobuf-compiler \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install Deno # Install Deno

View file

@ -4,6 +4,7 @@
"include": [ "include": [
"ban-untagged-todo", "ban-untagged-todo",
"camelcase", "camelcase",
"no-console",
"guard-for-in" "guard-for-in"
], ],
"exclude": [ "exclude": [

View file

@ -39,10 +39,14 @@
"tests/node_compat/runner/TODO.md", "tests/node_compat/runner/TODO.md",
"tests/node_compat/test", "tests/node_compat/test",
"tests/registry/", "tests/registry/",
"tests/specs/bench/default_ts",
"tests/specs/fmt", "tests/specs/fmt",
"tests/specs/lint/bom", "tests/specs/lint/bom",
"tests/specs/lint/default_ts",
"tests/specs/lint/syntax_error_reporting", "tests/specs/lint/syntax_error_reporting",
"tests/specs/publish/no_check_surfaces_syntax_error", "tests/specs/publish/no_check_surfaces_syntax_error",
"tests/specs/run/default_ts",
"tests/specs/test/default_ts",
"tests/testdata/byte_order_mark.ts", "tests/testdata/byte_order_mark.ts",
"tests/testdata/encoding", "tests/testdata/encoding",
"tests/testdata/file_extensions/ts_with_js_extension.js", "tests/testdata/file_extensions/ts_with_js_extension.js",
@ -56,7 +60,6 @@
"tests/testdata/run/byte_order_mark.ts", "tests/testdata/run/byte_order_mark.ts",
"tests/testdata/run/error_syntax_empty_trailing_line.mjs", "tests/testdata/run/error_syntax_empty_trailing_line.mjs",
"tests/testdata/run/inline_js_source_map*", "tests/testdata/run/inline_js_source_map*",
"tests/testdata/test/glob/",
"tests/testdata/test/markdown_windows.md", "tests/testdata/test/markdown_windows.md",
"tests/util/std", "tests/util/std",
"tests/wpt/runner/expectation.json", "tests/wpt/runner/expectation.json",
@ -65,11 +68,11 @@
"third_party" "third_party"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.91.6.wasm", "https://plugins.dprint.dev/typescript-0.93.0.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/json-0.19.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.5.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.2.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm",
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0", "https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.4.0.wasm" "https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
] ]
} }

View file

@ -2,6 +2,11 @@ name: cargo_publish
on: workflow_dispatch on: workflow_dispatch
# Ensures only one publish is running at a time
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs: jobs:
build: build:
name: cargo publish name: cargo publish
@ -28,16 +33,10 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install protoc
uses: arduino/setup-protoc@v3
with:
version: '21.12'
repo-token: '${{ secrets.GITHUB_TOKEN }}'
- name: Publish - name: Publish
env: env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 11; const cacheVersion = 22;
const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86Runner = "ubuntu-22.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl"; const ubuntuX86XlRunner = "ubuntu-22.04-xl";
@ -191,14 +191,9 @@ const installNodeStep = {
uses: "actions/setup-node@v4", uses: "actions/setup-node@v4",
with: { "node-version": 18 }, with: { "node-version": 18 },
}; };
const installProtocStep = {
name: "Install protoc",
uses: "arduino/setup-protoc@v3",
with: { "version": "21.12", "repo-token": "${{ secrets.GITHUB_TOKEN }}" },
};
const installDenoStep = { const installDenoStep = {
name: "Install Deno", name: "Install Deno",
uses: "denoland/setup-deno@v1", uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" }, with: { "deno-version": "v1.x" },
}; };
@ -354,7 +349,7 @@ const ci = {
needs: ["pre_build"], needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}", if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": "${{ matrix.runner }}", "runs-on": "${{ matrix.runner }}",
"timeout-minutes": 150, "timeout-minutes": 180,
defaults: { defaults: {
run: { run: {
// GH actions does not fail fast by default on // GH actions does not fail fast by default on
@ -494,7 +489,6 @@ const ci = {
if: "matrix.job == 'bench' || matrix.job == 'test'", if: "matrix.job == 'bench' || matrix.job == 'test'",
...installNodeStep, ...installNodeStep,
}, },
installProtocStep,
{ {
if: [ if: [
"matrix.profile == 'release' &&", "matrix.profile == 'release' &&",
@ -649,7 +643,7 @@ const ci = {
name: "test_format.js", name: "test_format.js",
if: "matrix.job == 'lint' && matrix.os == 'linux'", if: "matrix.job == 'lint' && matrix.os == 'linux'",
run: run:
"deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check", "deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check",
}, },
{ {
name: "Lint PR title", name: "Lint PR title",
@ -664,7 +658,7 @@ const ci = {
name: "lint.js", name: "lint.js",
if: "matrix.job == 'lint'", if: "matrix.job == 'lint'",
run: run:
"deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js", "deno run --allow-write --allow-read --allow-run --allow-net ./tools/lint.js",
}, },
{ {
name: "jsdoc_checker.js", name: "jsdoc_checker.js",
@ -758,8 +752,10 @@ const ci = {
run: [ run: [
"cd target/release", "cd target/release",
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno", "zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"strip denort", "strip denort",
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort", "zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"./deno types > lib.deno.d.ts", "./deno types > lib.deno.d.ts",
].join("\n"), ].join("\n"),
}, },
@ -784,8 +780,10 @@ const ci = {
"--entitlements-xml-file=cli/entitlements.plist", "--entitlements-xml-file=cli/entitlements.plist",
"cd target/release", "cd target/release",
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno", "zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
"strip denort", "strip denort",
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort", "zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
] ]
.join("\n"), .join("\n"),
}, },
@ -800,7 +798,9 @@ const ci = {
shell: "pwsh", shell: "pwsh",
run: [ run: [
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
].join("\n"), ].join("\n"),
}, },
{ {
@ -813,6 +813,7 @@ const ci = {
].join("\n"), ].join("\n"),
run: [ run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
"echo ${{ github.sha }} > canary-latest.txt", "echo ${{ github.sha }} > canary-latest.txt",
'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt', 'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt',
].join("\n"), ].join("\n"),
@ -826,7 +827,7 @@ const ci = {
"!startsWith(github.ref, 'refs/tags/')", "!startsWith(github.ref, 'refs/tags/')",
].join("\n"), ].join("\n"),
run: run:
"target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js", "target/release/deno run -A --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js",
}, },
{ {
name: "Test (full, debug)", name: "Test (full, debug)",
@ -879,9 +880,9 @@ const ci = {
DENO_BIN: "./target/debug/deno", DENO_BIN: "./target/debug/deno",
}, },
run: [ run: [
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup", " ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"', ' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"',
].join("\n"), ].join("\n"),
}, },
@ -892,9 +893,9 @@ const ci = {
DENO_BIN: "./target/release/deno", DENO_BIN: "./target/release/deno",
}, },
run: [ run: [
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup", " ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts run --quiet --release \\", " ./tests/wpt/wpt.ts run --quiet --release \\",
' --binary="$DENO_BIN" \\', ' --binary="$DENO_BIN" \\',
" --json=wpt.json \\", " --json=wpt.json \\",
@ -958,8 +959,7 @@ const ci = {
"git clone --depth 1 --branch gh-pages \\", "git clone --depth 1 --branch gh-pages \\",
" https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \\", " https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \\",
" gh-pages", " gh-pages",
"./target/release/deno run --allow-all --unstable \\", "./target/release/deno run --allow-all ./tools/build_benchmark_jsons.js --release",
" ./tools/build_benchmark_jsons.js --release",
"cd gh-pages", "cd gh-pages",
'git config user.email "propelml@gmail.com"', 'git config user.email "propelml@gmail.com"',
'git config user.name "denobot"', 'git config user.name "denobot"',
@ -995,8 +995,10 @@ const ci = {
"github.repository == 'denoland/deno' &&", "github.repository == 'denoland/deno' &&",
"startsWith(github.ref, 'refs/tags/')", "startsWith(github.ref, 'refs/tags/')",
].join("\n"), ].join("\n"),
run: run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
}, },
{ {
name: "Upload release to dl.deno.land (windows)", name: "Upload release to dl.deno.land (windows)",
@ -1010,8 +1012,10 @@ const ci = {
env: { env: {
CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe", CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe",
}, },
run: run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
}, },
{ {
name: "Create release notes", name: "Create release notes",
@ -1041,15 +1045,25 @@ const ci = {
with: { with: {
files: [ files: [
"target/release/deno-x86_64-pc-windows-msvc.zip", "target/release/deno-x86_64-pc-windows-msvc.zip",
"target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/denort-x86_64-pc-windows-msvc.zip", "target/release/denort-x86_64-pc-windows-msvc.zip",
"target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/deno-x86_64-unknown-linux-gnu.zip", "target/release/deno-x86_64-unknown-linux-gnu.zip",
"target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-x86_64-unknown-linux-gnu.zip", "target/release/denort-x86_64-unknown-linux-gnu.zip",
"target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-x86_64-apple-darwin.zip", "target/release/deno-x86_64-apple-darwin.zip",
"target/release/deno-x86_64-apple-darwin.zip.sha256sum",
"target/release/denort-x86_64-apple-darwin.zip", "target/release/denort-x86_64-apple-darwin.zip",
"target/release/denort-x86_64-apple-darwin.zip.sha256sum",
"target/release/deno-aarch64-unknown-linux-gnu.zip", "target/release/deno-aarch64-unknown-linux-gnu.zip",
"target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-aarch64-unknown-linux-gnu.zip", "target/release/denort-aarch64-unknown-linux-gnu.zip",
"target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-aarch64-apple-darwin.zip", "target/release/deno-aarch64-apple-darwin.zip",
"target/release/deno-aarch64-apple-darwin.zip.sha256sum",
"target/release/denort-aarch64-apple-darwin.zip", "target/release/denort-aarch64-apple-darwin.zip",
"target/release/denort-aarch64-apple-darwin.zip.sha256sum",
"target/release/deno_src.tar.gz", "target/release/deno_src.tar.gz",
"target/release/lib.deno.d.ts", "target/release/lib.deno.d.ts",
].join("\n"), ].join("\n"),
@ -1068,6 +1082,7 @@ const ci = {
"./target", "./target",
"!./target/*/gn_out", "!./target/*/gn_out",
"!./target/*/*.zip", "!./target/*/*.zip",
"!./target/*/*.sha256sum",
"!./target/*/*.tar.gz", "!./target/*/*.tar.gz",
].join("\n"), ].join("\n"),
key: prCacheKeyPrefix + "${{ github.sha }}", key: prCacheKeyPrefix + "${{ github.sha }}",

View file

@ -48,7 +48,7 @@ jobs:
- pre_build - pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}' if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
runs-on: '${{ matrix.runner }}' runs-on: '${{ matrix.runner }}'
timeout-minutes: 150 timeout-minutes: 180
defaults: defaults:
run: run:
shell: bash shell: bash
@ -178,7 +178,7 @@ jobs:
if: '!(matrix.skip)' if: '!(matrix.skip)'
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')' - if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
name: Install Deno name: Install Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install Python - name: Install Python
@ -199,12 +199,6 @@ jobs:
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: 18
- name: Install protoc
uses: arduino/setup-protoc@v3
with:
version: '21.12'
repo-token: '${{ secrets.GITHUB_TOKEN }}'
if: '!(matrix.skip)'
- if: |- - if: |-
!(matrix.skip) && (matrix.profile == 'release' && !(matrix.skip) && (matrix.profile == 'release' &&
matrix.job == 'test' && matrix.job == 'test' &&
@ -367,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '11-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '11-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -381,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '11-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -389,7 +383,7 @@ jobs:
cache-path: ./target cache-path: ./target
- name: test_format.js - name: test_format.js
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')' if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check
- name: Lint PR title - name: Lint PR title
if: '!(matrix.skip) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && matrix.os == ''linux'')' if: '!(matrix.skip) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && matrix.os == ''linux'')'
env: env:
@ -397,7 +391,7 @@ jobs:
run: deno run ./tools/verify_pr_title.js "$PR_TITLE" run: deno run ./tools/verify_pr_title.js "$PR_TITLE"
- name: lint.js - name: lint.js
if: '!(matrix.skip) && (matrix.job == ''lint'')' if: '!(matrix.skip) && (matrix.job == ''lint'')'
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/lint.js
- name: jsdoc_checker.js - name: jsdoc_checker.js
if: '!(matrix.skip) && (matrix.job == ''lint'')' if: '!(matrix.skip) && (matrix.job == ''lint'')'
run: deno run --allow-read --allow-env --allow-sys ./tools/jsdoc_checker.js run: deno run --allow-read --allow-env --allow-sys ./tools/jsdoc_checker.js
@ -449,8 +443,10 @@ jobs:
run: |- run: |-
cd target/release cd target/release
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
strip denort strip denort
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
./deno types > lib.deno.d.ts ./deno types > lib.deno.d.ts
- name: Pre-release (mac) - name: Pre-release (mac)
if: |- if: |-
@ -466,8 +462,10 @@ jobs:
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
cd target/release cd target/release
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum
strip denort strip denort
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum
- name: Pre-release (windows) - name: Pre-release (windows)
if: |- if: |-
!(matrix.skip) && (matrix.os == 'windows' && !(matrix.skip) && (matrix.os == 'windows' &&
@ -477,7 +475,9 @@ jobs:
shell: pwsh shell: pwsh
run: |- run: |-
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
- name: Upload canary to dl.deno.land - name: Upload canary to dl.deno.land
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -486,6 +486,7 @@ jobs:
github.ref == 'refs/heads/main') github.ref == 'refs/heads/main')
run: |- run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/ gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/
echo ${{ github.sha }} > canary-latest.txt echo ${{ github.sha }} > canary-latest.txt
gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt
- name: Autobahn testsuite - name: Autobahn testsuite
@ -494,7 +495,7 @@ jobs:
matrix.job == 'test' && matrix.job == 'test' &&
matrix.profile == 'release' && matrix.profile == 'release' &&
!startsWith(github.ref, 'refs/tags/')) !startsWith(github.ref, 'refs/tags/'))
run: target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js run: target/release/deno run -A --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js
- name: 'Test (full, debug)' - name: 'Test (full, debug)'
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -531,18 +532,18 @@ jobs:
env: env:
DENO_BIN: ./target/debug/deno DENO_BIN: ./target/debug/deno
run: |- run: |-
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN" ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"
- name: Run web platform tests (release) - name: Run web platform tests (release)
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')' if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')'
env: env:
DENO_BIN: ./target/release/deno DENO_BIN: ./target/release/deno
run: |- run: |-
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --release \ ./tests/wpt/wpt.ts run --quiet --release \
--binary="$DENO_BIN" \ --binary="$DENO_BIN" \
--json=wpt.json \ --json=wpt.json \
@ -590,8 +591,7 @@ jobs:
git clone --depth 1 --branch gh-pages \ git clone --depth 1 --branch gh-pages \
https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \ https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \
gh-pages gh-pages
./target/release/deno run --allow-all --unstable \ ./target/release/deno run --allow-all ./tools/build_benchmark_jsons.js --release
./tools/build_benchmark_jsons.js --release
cd gh-pages cd gh-pages
git config user.email "propelml@gmail.com" git config user.email "propelml@gmail.com"
git config user.name "denobot" git config user.name "denobot"
@ -616,7 +616,9 @@ jobs:
matrix.profile == 'release' && matrix.profile == 'release' &&
github.repository == 'denoland/deno' && github.repository == 'denoland/deno' &&
startsWith(github.ref, 'refs/tags/')) startsWith(github.ref, 'refs/tags/'))
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/' run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Upload release to dl.deno.land (windows) - name: Upload release to dl.deno.land (windows)
if: |- if: |-
!(matrix.skip) && (matrix.os == 'windows' && !(matrix.skip) && (matrix.os == 'windows' &&
@ -626,7 +628,9 @@ jobs:
startsWith(github.ref, 'refs/tags/')) startsWith(github.ref, 'refs/tags/'))
env: env:
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe' CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/' run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Create release notes - name: Create release notes
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -648,15 +652,25 @@ jobs:
with: with:
files: |- files: |-
target/release/deno-x86_64-pc-windows-msvc.zip target/release/deno-x86_64-pc-windows-msvc.zip
target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum
target/release/denort-x86_64-pc-windows-msvc.zip target/release/denort-x86_64-pc-windows-msvc.zip
target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum
target/release/deno-x86_64-unknown-linux-gnu.zip target/release/deno-x86_64-unknown-linux-gnu.zip
target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/denort-x86_64-unknown-linux-gnu.zip target/release/denort-x86_64-unknown-linux-gnu.zip
target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/deno-x86_64-apple-darwin.zip target/release/deno-x86_64-apple-darwin.zip
target/release/deno-x86_64-apple-darwin.zip.sha256sum
target/release/denort-x86_64-apple-darwin.zip target/release/denort-x86_64-apple-darwin.zip
target/release/denort-x86_64-apple-darwin.zip.sha256sum
target/release/deno-aarch64-unknown-linux-gnu.zip target/release/deno-aarch64-unknown-linux-gnu.zip
target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/denort-aarch64-unknown-linux-gnu.zip target/release/denort-aarch64-unknown-linux-gnu.zip
target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/deno-aarch64-apple-darwin.zip target/release/deno-aarch64-apple-darwin.zip
target/release/deno-aarch64-apple-darwin.zip.sha256sum
target/release/denort-aarch64-apple-darwin.zip target/release/denort-aarch64-apple-darwin.zip
target/release/denort-aarch64-apple-darwin.zip.sha256sum
target/release/deno_src.tar.gz target/release/deno_src.tar.gz
target/release/lib.deno.d.ts target/release/lib.deno.d.ts
body_path: target/release/release-notes.md body_path: target/release/release-notes.md
@ -669,8 +683,9 @@ jobs:
./target ./target
!./target/*/gn_out !./target/*/gn_out
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '11-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04

View file

@ -1,15 +1,22 @@
name: promote_to_rc name: promote_to_release
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
releaseKind:
description: 'Kind of release'
type: choice
options:
- rc
- lts
required: true
commitHash: commitHash:
description: Commit to promote to the Release Candidate description: Commit to promote to release
required: true required: true
jobs: jobs:
promote-to-rc: promote-to-release:
name: Promote to Release Candidate name: Promote to Release
runs-on: macOS-latest runs-on: macOS-latest
if: github.repository == 'denoland/deno' if: github.repository == 'denoland/deno'
steps: steps:
@ -33,7 +40,7 @@ jobs:
project_id: denoland project_id: denoland
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
@ -42,14 +49,14 @@ jobs:
./tools/install_prebuilt.js rcodesign ./tools/install_prebuilt.js rcodesign
echo $GITHUB_WORKSPACE/third_party/prebuilt/mac >> $GITHUB_PATH echo $GITHUB_WORKSPACE/third_party/prebuilt/mac >> $GITHUB_PATH
- name: Promote to RC - name: Promote to Release
env: env:
APPLE_CODESIGN_KEY: '${{ secrets.APPLE_CODESIGN_KEY }}' APPLE_CODESIGN_KEY: '${{ secrets.APPLE_CODESIGN_KEY }}'
APPLE_CODESIGN_PASSWORD: '${{ secrets.APPLE_CODESIGN_PASSWORD }}' APPLE_CODESIGN_PASSWORD: '${{ secrets.APPLE_CODESIGN_PASSWORD }}'
run: | run: |
deno run -A ./tools/release/promote_to_rc.ts ${{github.event.inputs.commitHash}} deno run -A ./tools/release/promote_to_release.ts ${{github.event.inputs.releaseKind}} ${{github.event.inputs.commitHash}}
- name: Upload archives to dl.deno.land - name: Upload archives to dl.deno.land
run: | run: |
gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-rc-latest.txt)/ gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-${{github.event.inputs.releaseKind}}-latest.txt)/
gsutil -h "Cache-Control: no-cache" cp release-rc-latest.txt gs://dl.deno.land/release-rc-latest.txt gsutil -h "Cache-Control: no-cache" cp release-${{github.event.inputs.releaseKind}}-latest.txt gs://dl.deno.land/release-${{github.event.inputs.releaseKind}}-latest.txt

View file

@ -34,7 +34,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -39,7 +39,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -30,7 +30,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Setup Deno - name: Setup Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: ${{ matrix.deno-version }} deno-version: ${{ matrix.deno-version }}
@ -66,9 +66,9 @@ jobs:
- name: Run web platform tests - name: Run web platform tests
shell: bash shell: bash
run: | run: |
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \ deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \ deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts run \ \ ./tests/wpt/wpt.ts run \ \
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero --binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero

1156
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,6 @@ resolver = "2"
members = [ members = [
"bench_util", "bench_util",
"cli", "cli",
"cli/napi/sym",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
@ -19,15 +18,17 @@ members = [
"ext/io", "ext/io",
"ext/kv", "ext/kv",
"ext/napi", "ext/napi",
"ext/napi/sym",
"ext/net", "ext/net",
"ext/node", "ext/node",
"ext/node_resolver",
"ext/url", "ext/url",
"ext/web", "ext/web",
"ext/webgpu", "ext/webgpu",
"ext/webidl", "ext/webidl",
"ext/websocket", "ext/websocket",
"ext/webstorage", "ext/webstorage",
"resolvers/deno",
"resolvers/node",
"runtime", "runtime",
"runtime/permissions", "runtime/permissions",
"tests", "tests",
@ -44,47 +45,53 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.41.2", features = ["transpiling"] } deno_ast = { version = "=0.42.2", features = ["transpiling"] }
deno_core = { version = "0.304.0" } deno_core = { version = "0.314.2" }
deno_bench_util = { version = "0.158.0", path = "./bench_util" } deno_bench_util = { version = "0.168.0", path = "./bench_util" }
deno_lockfile = "0.21.1" deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] } deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
deno_permissions = { version = "0.24.0", path = "./runtime/permissions" } deno_npm = "=0.25.4"
deno_runtime = { version = "0.173.0", path = "./runtime" } deno_path_util = "=0.2.1"
deno_permissions = { version = "0.34.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.183.0", path = "./runtime" }
deno_semver = "=0.5.16"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.94.0", path = "./cli/napi/sym" } napi_sym = { version = "0.104.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1" denokv_proto = "0.8.1"
denokv_remote = "0.8.1" denokv_remote = "0.8.1"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features # denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.1" } denokv_sqlite = { default-features = false, version = "0.8.2" }
# exts # exts
deno_broadcast_channel = { version = "0.158.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.168.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.96.0", path = "./ext/cache" } deno_cache = { version = "0.106.0", path = "./ext/cache" }
deno_canvas = { version = "0.33.0", path = "./ext/canvas" } deno_canvas = { version = "0.43.0", path = "./ext/canvas" }
deno_console = { version = "0.164.0", path = "./ext/console" } deno_console = { version = "0.174.0", path = "./ext/console" }
deno_cron = { version = "0.44.0", path = "./ext/cron" } deno_cron = { version = "0.54.0", path = "./ext/cron" }
deno_crypto = { version = "0.178.0", path = "./ext/crypto" } deno_crypto = { version = "0.188.0", path = "./ext/crypto" }
deno_fetch = { version = "0.188.0", path = "./ext/fetch" } deno_fetch = { version = "0.198.0", path = "./ext/fetch" }
deno_ffi = { version = "0.151.0", path = "./ext/ffi" } deno_ffi = { version = "0.161.0", path = "./ext/ffi" }
deno_fs = { version = "0.74.0", path = "./ext/fs" } deno_fs = { version = "0.84.0", path = "./ext/fs" }
deno_http = { version = "0.162.0", path = "./ext/http" } deno_http = { version = "0.172.0", path = "./ext/http" }
deno_io = { version = "0.74.0", path = "./ext/io" } deno_io = { version = "0.84.0", path = "./ext/io" }
deno_kv = { version = "0.72.0", path = "./ext/kv" } deno_kv = { version = "0.82.0", path = "./ext/kv" }
deno_napi = { version = "0.95.0", path = "./ext/napi" } deno_napi = { version = "0.105.0", path = "./ext/napi" }
deno_net = { version = "0.156.0", path = "./ext/net" } deno_net = { version = "0.166.0", path = "./ext/net" }
deno_node = { version = "0.101.0", path = "./ext/node" } deno_node = { version = "0.111.0", path = "./ext/node" }
deno_tls = { version = "0.151.0", path = "./ext/tls" } deno_tls = { version = "0.161.0", path = "./ext/tls" }
deno_url = { version = "0.164.0", path = "./ext/url" } deno_url = { version = "0.174.0", path = "./ext/url" }
deno_web = { version = "0.195.0", path = "./ext/web" } deno_web = { version = "0.205.0", path = "./ext/web" }
deno_webgpu = { version = "0.131.0", path = "./ext/webgpu" } deno_webgpu = { version = "0.141.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.164.0", path = "./ext/webidl" } deno_webidl = { version = "0.174.0", path = "./ext/webidl" }
deno_websocket = { version = "0.169.0", path = "./ext/websocket" } deno_websocket = { version = "0.179.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.159.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.169.0", path = "./ext/webstorage" }
node_resolver = { version = "0.3.0", path = "./ext/node_resolver" }
# resolvers
deno_resolver = { version = "0.6.0", path = "./resolvers/deno" }
node_resolver = { version = "0.13.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
@ -99,14 +106,16 @@ cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now() # Instead use util::time::utc_now()
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] } chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
color-print = "0.3.5"
console_static_text = "=0.8.1" console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.10.2" deno_cache_dir = "=0.13.0"
deno_package_json = { version = "=0.1.1", default-features = false } deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] } elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
encoding_rs = "=0.8.33" encoding_rs = "=0.8.33"
fast-socks5 = "0.9.6" fast-socks5 = "0.9.6"
faster-hex = "0.9" faster-hex = "0.9"
@ -128,12 +137,12 @@ hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3" ipnet = "2.3"
jsonc-parser = { version = "=0.23.0", features = ["serde"] } jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.126" libc = "0.2.126"
libz-sys = { version = "1.1", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
log = "0.4.20" log = "0.4.20"
lsp-types = "=0.94.1" # used by tower-lsp and "proposed" feature is unstable in patch releases lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
memmem = "0.1.1" memmem = "0.1.1"
monch = "=0.5.0" monch = "=0.5.0"
notify = "=6.1.1" notify = "=6.1.1"
@ -141,8 +150,8 @@ num-bigint = { version = "0.4", features = ["rand"] }
once_cell = "1.17.1" once_cell = "1.17.1"
os_pipe = { version = "=1.1.5", features = ["io_safety"] } os_pipe = { version = "=1.1.5", features = ["io_safety"] }
p224 = { version = "0.13.0", features = ["ecdh"] } p224 = { version = "0.13.0", features = ["ecdh"] }
p256 = { version = "0.13.2", features = ["ecdh"] } p256 = { version = "0.13.2", features = ["ecdh", "jwk"] }
p384 = { version = "0.13.0", features = ["ecdh"] } p384 = { version = "0.13.0", features = ["ecdh", "jwk"] }
parking_lot = "0.12.0" parking_lot = "0.12.0"
percent-encoding = "2.3.0" percent-encoding = "2.3.0"
phf = { version = "0.11", features = ["macros"] } phf = { version = "0.11", features = ["macros"] }
@ -154,7 +163,7 @@ rand = "=0.8.5"
regex = "^1.7.0" regex = "^1.7.0"
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
ring = "^0.17.0" ring = "^0.17.0"
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] } rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled"] }
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] } rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
rustls-pemfile = "2" rustls-pemfile = "2"
rustls-tokio-stream = "=0.3.0" rustls-tokio-stream = "=0.3.0"
@ -162,6 +171,7 @@ rustls-webpki = "0.102"
rustyline = "=13.0.0" rustyline = "=13.0.0"
saffron = "=0.1.0" saffron = "=0.1.0"
scopeguard = "1.2.0" scopeguard = "1.2.0"
sec1 = "0.7"
serde = { version = "1.0.149", features = ["derive"] } serde = { version = "1.0.149", features = ["derive"] }
serde_bytes = "0.11" serde_bytes = "0.11"
serde_json = "1.0.85" serde_json = "1.0.85"
@ -177,21 +187,23 @@ tar = "=0.4.40"
tempfile = "3.4.0" tempfile = "3.4.0"
termcolor = "1.1.3" termcolor = "1.1.3"
thiserror = "1.0.61" thiserror = "1.0.61"
tokio = { version = "=1.36.0", features = ["full"] } tokio = { version = "1.36.0", features = ["full"] }
tokio-metrics = { version = "0.3.0", features = ["rt"] } tokio-metrics = { version = "0.3.0", features = ["rt"] }
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] } tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
tokio-socks = "0.5.1" tokio-socks = "0.5.1"
tokio-util = "0.7.4" tokio-util = "0.7.4"
tower = { version = "0.4.13", default-features = false, features = ["util"] } tower = { version = "0.4.13", default-features = false, features = ["util"] }
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] } tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
tower-lsp = { version = "=0.20.0", features = ["proposed"] } tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
tower-service = "0.3.2" tower-service = "0.3.2"
twox-hash = "=1.6.3" twox-hash = "=1.6.3"
# Upgrading past 2.4.1 may cause WPT failures # Upgrading past 2.4.1 may cause WPT failures
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] } url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] } uuid = { version = "1.3.0", features = ["v4"] }
webpki-root-certs = "0.26.5"
webpki-roots = "0.26" webpki-roots = "0.26"
which = "4.2.5" which = "4.2.5"
yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] } zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4" zstd = "=0.12.4"
@ -209,15 +221,14 @@ quote = "1"
syn = { version = "2", features = ["full", "extra-traits"] } syn = { version = "2", features = ["full", "extra-traits"] }
# unix # unix
nix = "=0.26.2" nix = "=0.27.1"
# windows deps # windows deps
junction = "=0.2.0" junction = "=0.2.0"
winapi = "=0.3.9" winapi = "=0.3.9"
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry"] } windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
winres = "=0.1.12" winres = "=0.1.12"
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.release] [profile.release]
codegen-units = 1 codegen-units = 1
incremental = true incremental = true
@ -235,13 +246,6 @@ inherits = "release"
codegen-units = 128 codegen-units = 128
lto = "thin" lto = "thin"
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.bench]
codegen-units = 1
incremental = true
lto = true
opt-level = 'z' # Optimize for size
# Key generation is too slow on `debug` # Key generation is too slow on `debug`
[profile.dev.package.num-bigint-dig] [profile.dev.package.num-bigint-dig]
opt-level = 3 opt-level = 3
@ -250,80 +254,6 @@ opt-level = 3
[profile.dev.package.v8] [profile.dev.package.v8]
opt-level = 1 opt-level = 1
# Optimize these packages for performance.
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.bench.package.async-compression]
opt-level = 3
[profile.bench.package.base64-simd]
opt-level = 3
[profile.bench.package.brotli]
opt-level = 3
[profile.bench.package.brotli-decompressor]
opt-level = 3
[profile.bench.package.bytes]
opt-level = 3
[profile.bench.package.deno_bench_util]
opt-level = 3
[profile.bench.package.deno_broadcast_channel]
opt-level = 3
[profile.bench.package.deno_core]
opt-level = 3
[profile.bench.package.deno_crypto]
opt-level = 3
[profile.bench.package.deno_fetch]
opt-level = 3
[profile.bench.package.deno_ffi]
opt-level = 3
[profile.bench.package.deno_http]
opt-level = 3
[profile.bench.package.deno_napi]
opt-level = 3
[profile.bench.package.deno_net]
opt-level = 3
[profile.bench.package.deno_node]
opt-level = 3
[profile.bench.package.deno_runtime]
opt-level = 3
[profile.bench.package.deno_tls]
opt-level = 3
[profile.bench.package.deno_url]
opt-level = 3
[profile.bench.package.deno_web]
opt-level = 3
[profile.bench.package.deno_websocket]
opt-level = 3
[profile.bench.package.fastwebsockets]
opt-level = 3
[profile.bench.package.flate2]
opt-level = 3
[profile.bench.package.futures-util]
opt-level = 3
[profile.bench.package.hyper]
opt-level = 3
[profile.bench.package.miniz_oxide]
opt-level = 3
[profile.bench.package.num-bigint-dig]
opt-level = 3
[profile.bench.package.rand]
opt-level = 3
[profile.bench.package.serde]
opt-level = 3
[profile.bench.package.serde_v8]
opt-level = 3
[profile.bench.package.test_napi]
opt-level = 3
[profile.bench.package.tokio]
opt-level = 3
[profile.bench.package.url]
opt-level = 3
[profile.bench.package.v8]
opt-level = 3
[profile.bench.package.zstd]
opt-level = 3
[profile.bench.package.zstd-sys]
opt-level = 3
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.release.package.async-compression] [profile.release.package.async-compression]
opt-level = 3 opt-level = 3
[profile.release.package.base64-simd] [profile.release.package.base64-simd]
@ -382,6 +312,8 @@ opt-level = 3
opt-level = 3 opt-level = 3
[profile.release.package.serde_v8] [profile.release.package.serde_v8]
opt-level = 3 opt-level = 3
[profile.release.package.libsui]
opt-level = 3
[profile.release.package.test_napi] [profile.release.package.test_napi]
opt-level = 3 opt-level = 3
[profile.release.package.tokio] [profile.release.package.tokio]

View file

@ -6,6 +6,778 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.0.3 / 2024.10.25
- feat(lsp): interactive inlay hints (#26382)
- fix: support node-api in denort (#26389)
- fix(check): support `--frozen` on deno check (#26479)
- fix(cli): increase size of blocking task threadpool on windows (#26465)
- fix(config): schemas for lint rule and tag autocompletion (#26515)
- fix(ext/console): ignore casing for named colors in css parsing (#26466)
- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486)
- fix(ext/node): cancel pending ipc writes on channel close (#26504)
- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475)
- fix(ext/node): only set our end of child process pipe to nonblocking mode
(#26495)
- fix(ext/node): properly map reparse point error in readlink (#26375)
- fix(ext/node): refactor http.ServerResponse into function class (#26210)
- fix(ext/node): stub HTTPParser internal binding (#26401)
- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323)
- fix(fmt): --ext flag requires to pass files (#26525)
- fix(fmt): upgrade formatters (#26469)
- fix(help): missing package specifier (#26380)
- fix(info): resolve workspace member mappings (#26350)
- fix(install): better json editing (#26450)
- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501)
- fix(install): cache type only module deps in `deno install` (#26497)
- fix(install): don't cache json exports of JSR packages (for now) (#26530)
- fix(install): update lockfile when using package.json (#26458)
- fix(lsp): import-map-remap quickfix for type imports (#26454)
- fix(node/util): support array formats in `styleText` (#26507)
- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476)
- fix(npm): ensure scoped package name is encoded in URLs (#26390)
- fix(npm): support version ranges with && or comma (#26453)
- fix: `.npmrc` settings not being passed to install/add command (#26473)
- fix: add 'fmt-component' to unstable features in schema file (#26526)
- fix: share inotify fd across watchers (#26200)
- fix: unpin tokio version (#26457)
- perf(compile): pass module source data from binary directly to v8 (#26494)
- perf: avoid multiple calls to runMicrotask (#26378)
### 2.0.2 / 2024.10.17
- fix(cli): set napi object property properly (#26344)
- fix(ext/node): add null check for kStreamBaseField (#26368)
- fix(install): don't attempt to cache specifiers that point to directories
(#26369)
- fix(jupyter): fix panics for overslow subtraction (#26371)
- fix(jupyter): update to the new logo (#26353)
- fix(net): don't try to set nodelay on upgrade streams (#26342)
- fix(node/fs): copyFile with `COPYFILE_EXCL` should not throw if the
destination doesn't exist (#26360)
- fix(node/http): normalize header names in `ServerResponse` (#26339)
- fix(runtime): send ws ping frames from inspector server (#26352)
- fix: don't warn on ignored signals on windows (#26332)
### 2.0.1 / 2024.10.16
- feat(lsp): "deno/didRefreshDenoConfigurationTree" notifications (#26215)
- feat(unstable): `--unstable-detect-cjs` for respecting explicit
`"type": "commonjs"` (#26149)
- fix(add): create deno.json when running `deno add jsr:<pkg>` (#26275)
- fix(add): exact version should not have range `^` specifier (#26302)
- fix(child_process): map node `--no-warnings` flag to `--quiet` (#26288)
- fix(cli): add prefix to install commands in help (#26318)
- fix(cli): consolidate pkg parser for install & remove (#26298)
- fix(cli): named export takes precedence over default export in doc testing
(#26112)
- fix(cli): improve deno info output for npm packages (#25906)
- fix(console/ext/repl): support using parseFloat() (#25900)
- fix(ext/console): apply coloring for console.table (#26280)
- fix(ext/napi): pass user context to napi_threadsafe_fn finalizers (#26229)
- fix(ext/node): allow writing to tty columns (#26201)
- fix(ext/node): compute pem length (upper bound) for key exports (#26231)
- fix(ext/node): fix dns.lookup result ordering (#26264)
- fix(ext/node): handle http2 server ending stream (#26235)
- fix(ext/node): implement TCP.setNoDelay (#26263)
- fix(ext/node): timingSafeEqual account for AB byteOffset (#26292)
- fix(ext/node): use primordials in `ext/node/polyfills/internal/buffer.mjs`
(#24993)
- fix(ext/webgpu): allow GL backend on Windows (#26206)
- fix(install): duplicate dependencies in `package.json` (#26128)
- fix(install): handle pkg with dep on self when pkg part of peer dep resolution
(#26277)
- fix(install): retry downloads of registry info / tarballs (#26278)
- fix(install): support installing npm package with alias (#26246)
- fix(jupyter): copy kernels icons to the kernel directory (#26084)
- fix(jupyter): keep running event loop when waiting for messages (#26049)
- fix(lsp): relative completions for bare import-mapped specifiers (#26137)
- fix(node): make `process.stdout.isTTY` writable (#26130)
- fix(node/util): export `styleText` from `node:util` (#26194)
- fix(npm): support `--allow-scripts` on `deno run` (and `deno add`,
`deno test`, etc) (#26075)
- fix(repl): importing json files (#26053)
- fix(repl): remove check flags (#26140)
- fix(unstable/worker): ensure import permissions are passed (#26101)
- fix: add hint for missing `document` global in terminal error (#26218)
- fix: do not panic on wsl share file paths on windows (#26081)
- fix: do not panic running remote cjs module (#26259)
- fix: do not panic when using methods on classes and interfaces in deno doc
html output (#26100)
- fix: improve suggestions and hints when using CommonJS modules (#26287)
- fix: node-api function call should use preamble (#26297)
- fix: panic in `prepare_stack_trace_callback` when global interceptor throws
(#26241)
- fix: use syntect for deno doc html generation (#26322)
- perf(http): avoid clone getting request method and url (#26250)
- perf(http): cache webidl.converters lookups in ext/fetch/23_response.js
(#26256)
- perf(http): make heap allocation for path conditional (#26289)
- perf: use fast calls for microtask ops (#26236)
### 2.0.0 / 2024.10.09
Read announcement blog post at: https://deno.com/blog/v2
- BREAKING: `DENO_FUTURE=1` by default, or welcome to Deno 2.0 (#25213)
- BREAKING: disallow `new Deno.FsFile()` (#25478)
- BREAKING: drop support for Deno.run.{clearEnv,gid,uid} (#25371)
- BREAKING: improve types for `Deno.serve` (#25369)
- BREAKING: improved error code accuracy (#25383)
- BREAKING: make supported compilerOptions an allow list (#25432)
- BREAKING: move `width` and `height` options to `UnsafeWindowSurface`
constructor (#24200)
- BREAKING: remove --allow-hrtime (#25367)
- BREAKING: remove "emit" and "map" from deno info output (#25468)
- BREAKING: remove `--allow-none` flag (#25337)
- BREAKING: remove `--jobs` flag (#25336)
- BREAKING: remove `--trace-ops` (#25344)
- BREAKING: remove `--ts` flag (#25338)
- BREAKING: remove `--unstable` flag (#25522)
- BREAKING: remove `deno bundle` (#25339)
- BREAKING: remove `deno vendor` (#25343)
- BREAKING: remove `Deno.[Tls]Listener.prototype.rid` (#25556)
- BREAKING: remove `Deno.{Conn,TlsConn,TcpConn,UnixConn}.prototype.rid` (#25446)
- BREAKING: remove `Deno.{Reader,Writer}[Sync]` and `Deno.Closer` (#25524)
- BREAKING: remove `Deno.Buffer` (#25441)
- BREAKING: remove `Deno.close()` (#25347)
- BREAKING: remove `Deno.ConnectTlsOptions.{certChain,certFile,privateKey}` and
`Deno.ListenTlsOptions.certChain,certFile,keyFile}` (#25525)
- BREAKING: remove `Deno.copy()` (#25345)
- BREAKING: remove `Deno.customInspect` (#25348)
- BREAKING: remove `Deno.fdatasync[Sync]()` (#25520)
- BREAKING: remove `Deno.File` (#25447)
- BREAKING: remove `Deno.flock[Sync]()` (#25350)
- BREAKING: remove `Deno.FsFile.prototype.rid` (#25499)
- BREAKING: remove `Deno.fstat[Sync]()` (#25351)
- BREAKING: remove `Deno.FsWatcher.prototype.rid` (#25444)
- BREAKING: remove `Deno.fsync[Sync]()` (#25448)
- BREAKING: remove `Deno.ftruncate[Sync]()` (#25412)
- BREAKING: remove `Deno.funlock[Sync]()` (#25442)
- BREAKING: remove `Deno.futime[Sync]()` (#25252)
- BREAKING: remove `Deno.iter[Sync]()` (#25346)
- BREAKING: remove `Deno.read[Sync]()` (#25409)
- BREAKING: remove `Deno.readAll[Sync]()` (#25386)
- BREAKING: remove `Deno.seek[Sync]()` (#25449)
- BREAKING: remove `Deno.Seeker[Sync]` (#25551)
- BREAKING: remove `Deno.shutdown()` (#25253)
- BREAKING: remove `Deno.write[Sync]()` (#25408)
- BREAKING: remove `Deno.writeAll[Sync]()` (#25407)
- BREAKING: remove deprecated `UnsafeFnPointer` constructor type with untyped
`Deno.PointerObject` parameter (#25577)
- BREAKING: remove deprecated files config (#25535)
- BREAKING: Remove obsoleted Temporal APIs part 2 (#25505)
- BREAKING: remove remaining web types for compatibility (#25334)
- BREAKING: remove support for remote import maps in deno.json (#25836)
- BREAKING: rename "deps" remote cache folder to "remote" (#25969)
- BREAKING: soft-remove `Deno.isatty()` (#25410)
- BREAKING: soft-remove `Deno.run()` (#25403)
- BREAKING: soft-remove `Deno.serveHttp()` (#25451)
- BREAKING: undeprecate `Deno.FsWatcher.prototype.return()` (#25623)
- feat: add `--allow-import` flag (#25469)
- feat: Add a hint on error about 'Relative import path ... not prefixed with
...' (#25430)
- feat: Add better error messages for unstable APIs (#25519)
- feat: Add suggestion for packages using Node-API addons (#25975)
- feat: Allow importing .cjs files (#25426)
- feat: default to TS for file extension and support ext flag in more scenarios
(#25472)
- feat: deprecate import assertions (#25281)
- feat: Don't warn about --allow-script when using esbuild (#25894)
- feat: hide several --unstable-* flags (#25378)
- feat: improve lockfile v4 to store normalized version constraints and be more
terse (#25247)
- feat: improve warnings for deprecations and lifecycle script for npm packages
(#25694)
- feat: include version number in all --json based outputs (#25335)
- feat: lockfile v4 by default (#25165)
- feat: make 'globalThis.location' a configurable property (#25812)
- feat: print `Listening on` messages on stderr instead of stdout (#25491)
- feat: remove `--lock-write` flag (#25214)
- feat: require jsr prefix for `deno install` and `deno add` (#25698)
- feat: require(esm) (#25501)
- feat: Show hints when using `window` global (#25805)
- feat: stabilize `Deno.createHttpClient()` (#25569)
- feat: suggest `deno install --entrypoint` instead of `deno cache` (#25228)
- feat: support DENO_LOG env var instead of RUST_LOG (#25356)
- feat: TypeScript 5.6 and `npm:@types/node@22` (#25614)
- feat: Update no-window lint rule (#25486)
- feat: update warning message for --allow-run with no list (#25693)
- feat: warn when using `--allow-run` with no allow list (#25215)
- feat(add): Add npm packages to package.json if present (#25477)
- feat(add): strip package subpath when adding a package (#25419)
- feat(add/install): Flag to add dev dependency to package.json (#25495)
- feat(byonm): support `deno run npm:<package>` when package is not in
package.json (#25981)
- feat(check): turn on noImplicitOverride (#25695)
- feat(check): turn on useUnknownInCatchVariables (#25465)
- feat(cli): evaluate code snippets in JSDoc and markdown (#25220)
- feat(cli): give access to `process` global everywhere (#25291)
- feat(cli): use NotCapable error for permission errors (#25431)
- feat(config): Node modules option for 2.0 (#25299)
- feat(ext/crypto): import and export p521 keys (#25789)
- feat(ext/crypto): X448 support (#26043)
- feat(ext/kv): configurable limit params (#25174)
- feat(ext/node): add abort helpers, process & streams fix (#25262)
- feat(ext/node): add rootCertificates to node:tls (#25707)
- feat(ext/node): buffer.transcode() (#25972)
- feat(ext/node): export 'promises' symbol from 'node:timers' (#25589)
- feat(ext/node): export missing constants from 'zlib' module (#25584)
- feat(ext/node): export missing symbols from domain, puncode, repl, tls
(#25585)
- feat(ext/node): export more symbols from streams and timers/promises (#25582)
- feat(ext/node): expose ES modules for _ modules (#25588)
- feat(flags): allow double commas to escape values in path based flags (#25453)
- feat(flags): support user provided args in repl subcommand (#25605)
- feat(fmt): better error on malfored HTML files (#25853)
- feat(fmt): stabilize CSS, HTML and YAML formatters (#25753)
- feat(fmt): support vto and njk extensions (#25831)
- feat(fmt): upgrade markup_fmt (#25768)
- feat(install): deno install with entrypoint (#25411)
- feat(install): warn repeatedly about not-run lifecycle scripts on explicit
installs (#25878)
- feat(lint): add `no-process-global` lint rule (#25709)
- feat(lsp): add a message when someone runs 'deno lsp' manually (#26051)
- feat(lsp): auto-import types with 'import type' (#25662)
- feat(lsp): html/css/yaml file formatting (#25353)
- feat(lsp): quick fix for @deno-types="npm:@types/*" (#25954)
- feat(lsp): turn on useUnknownInCatchVariables (#25474)
- feat(lsp): unstable setting as list (#25552)
- feat(permissions): `Deno.mainModule` doesn't require permissions (#25667)
- feat(permissions): allow importing from cdn.jsdelivr.net by default (#26013)
- feat(serve): Support second parameter in deno serve (#25606)
- feat(tools/doc): display subitems in symbol overviews where applicable
(#25885)
- feat(uninstall): alias to 'deno remove' if -g flag missing (#25461)
- feat(upgrade): better error message on failure (#25503)
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
- feat(upgrade): support LTS release channel (#25123)
- fix: add link to env var docs (#25557)
- fix: add suggestion how to fix importing CJS module (#21764)
- fix: add test ensuring als works across dynamic import (#25593)
- fix: better error for Deno.UnsafeWindowSurface, correct HttpClient name,
cleanup unused code (#25833)
- fix: cjs resolution cases (#25739)
- fix: consistent with deno_config and treat `"experimentalDecorators"` as
deprecated (#25735)
- fix: delete old Deno 1.x headers file when loading cache (#25283)
- fix: do not panic running invalid file specifier (#25530)
- fix: don't include extensionless files in file collection for lint & fmt by
default (#25721)
- fix: don't prompt when using `Deno.permissions.request` with `--no-prompt`
(#25811)
- fix: eagerly error for specifier with empty version constraint (#25944)
- fix: enable `Win32_Security` feature in `windows-sys` (#26007)
- fix: error on unsupported compiler options (#25714)
- fix: error out if a valid flag is passed before a subcommand (#25830)
- fix: fix jupyter display function type (#25326)
- fix: Float16Array type (#25506)
- fix: handle showing warnings while the progress bar is shown (#25187)
- fix: Hide 'deno cache' from help output (#25960)
- fix: invalid ipv6 hostname on `deno serve` (#25482)
- fix: linux canonicalization checks (#24641)
- fix: lock down allow-run permissions more (#25370)
- fix: make some warnings more standard (#25324)
- fix: no cmd prefix in help output go links (#25459)
- fix: only enable byonm if workspace root has pkg json (#25379)
- fix: panic when require(esm) (#25769)
- fix: precompile preserve SVG camelCase attributes (#25945)
- fix: reland async context (#25140)
- fix: remove --allow-run warning when using deno without args or subcommand
(#25684)
- fix: remove entrypoint hack for Deno 2.0 (#25332)
- fix: remove recently added deno.json node_modules aliasing (#25542)
- fix: remove the typo in the help message (#25962)
- fix: removed unstable-htttp from deno help (#25216)
- fix: replace `npm install` hint with `deno install` hint (#25244)
- fix: trim space around DENO_AUTH_TOKENS (#25147)
- fix: update deno_doc (#25290)
- fix: Update deno_npm to fix `deno install` with crossws (#25837)
- fix: update hint for `deno add <package>` (#25455)
- fix: update malva in deno to support astro css comments (#25553)
- fix: update nodeModulesDir config JSON schema (#25653)
- fix: update patchver to 0.2 (#25952)
- fix: update sui to 0.4 (#25942)
- fix: upgrade deno_ast 0.42 (#25313)
- fix: upgrade deno_core to 0.307.0 (#25287)
- fix(add/install): default to "latest" tag for npm packages in
`deno add npm:pkg` (#25858)
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
- fix(BREAKING): make dns record types have consistent naming (#25357)
- fix(byonm): resolve npm deps of jsr deps (#25399)
- fix(check): ignore noImplicitOverrides in remote modules (#25854)
- fix(check): move is cjs check from resolving to loading (#25597)
- fix(check): properly surface dependency errors in types file of js file
(#25860)
- fix(cli): `deno task` exit with status 0 (#25637)
- fix(cli): Default to auto with --node-modules-dir flag (#25772)
- fix(cli): handle edge cases around `export`s in doc tests and default export
(#25720)
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
permissions (#25398)
- fix(cli): Only set allow net flag for deno serve if not already allowed all
(#25743)
- fix(cli): Warn on not-run lifecycle scripts with global cache (#25786)
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
- fix(compile): support 'deno compile' in RC and LTS releases (#25875)
- fix(config): validate export names (#25436)
- fix(coverage): ignore urls from doc testing (#25736)
- fix(doc): surface graph errors as warnings (#25888)
- fix(dts): stabilize `fetch` declaration for use with `Deno.HttpClient`
(#25683)
- fix(ext/console): more precision in console.time (#25723)
- fix(ext/console): prevent duplicate error printing when the cause is assigned
(#25327)
- fix(ext/crypto): ensure EC public keys are exported uncompressed (#25766)
- fix(ext/crypto): fix identity test for x25519 derive bits (#26011)
- fix(ext/crypto): reject empty usages in SubtleCrypto#importKey (#25759)
- fix(ext/crypto): support md4 digest algorithm (#25656)
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
- fix(ext/fetch): fix lowercase http_proxy classified as https (#25686)
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
- fix(ext/http): do not set localhost to hostname unnecessarily (#24777)
- fix(ext/http): gracefully handle Response.error responses (#25712)
- fix(ext/node): add `FileHandle#writeFile` (#25555)
- fix(ext/node): add `vm.constants` (#25630)
- fix(ext/node): Add missing `node:path` exports (#25567)
- fix(ext/node): Add missing node:fs and node:constants exports (#25568)
- fix(ext/node): add stubs for `node:trace_events` (#25628)
- fix(ext/node): attach console stream properties (#25617)
- fix(ext/node): avoid showing `UNKNOWN` error from TCP handle (#25550)
- fix(ext/node): close upgraded socket when the underlying http connection is
closed (#25387)
- fix(ext/node): delay accept() call 2 ticks in net.Server#listen (#25481)
- fix(ext/node): don't throw error for unsupported signal binding on windows
(#25699)
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
- fix(ext/node): export `process.allowedNodeEnvironmentFlags` (#25629)
- fix(ext/node): export JWK public key (#25239)
- fix(ext/node): export request and response clases from `http2` module (#25592)
- fix(ext/node): fix `Cipheriv#update(string, undefined)` (#25571)
- fix(ext/node): fix Decipheriv when autoPadding disabled (#25598)
- fix(ext/node): fix process.stdin.pause() (#25864)
- fix(ext/node): Fix vm sandbox object panic (#24985)
- fix(ext/node): http2session ready state (#25143)
- fix(ext/node): Implement detached option in `child_process` (#25218)
- fix(ext/node): import EC JWK keys (#25266)
- fix(ext/node): import JWK octet key pairs (#25180)
- fix(ext/node): import RSA JWK keys (#25267)
- fix(ext/node): register `node:wasi` built-in (#25134)
- fix(ext/node): remove unimplemented promiseHook stubs (#25979)
- fix(ext/node): report freemem() on Linux in bytes (#25511)
- fix(ext/node): Rewrite `node:v8` serialize/deserialize (#25439)
- fix(ext/node): session close during stream setup (#25170)
- fix(ext/node): Stream should be instance of EventEmitter (#25527)
- fix(ext/node): stub `inspector/promises` (#25635)
- fix(ext/node): stub `process.cpuUsage()` (#25462)
- fix(ext/node): stub cpu_info() for OpenBSD (#25807)
- fix(ext/node): support x509 certificates in `createPublicKey` (#25731)
- fix(ext/node): throw when loading `cpu-features` module (#25257)
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
- fix(ext/node): use primordials in `ext/node/polyfills/console.ts` (#25572)
- fix(ext/node): use primordials in ext/node/polyfills/wasi.ts (#25608)
- fix(ext/node): validate input lengths in `Cipheriv` and `Decipheriv` (#25570)
- fix(ext/web): don't ignore capture in EventTarget.removeEventListener (#25788)
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
- fix(ext/webgpu): sync category comment (#25580)
- fix(ext/webstorage): make `getOwnPropertyDescriptor` with symbol return
`undefined` (#13348)
- fix(flags): --allow-all should conflict with lower permissions (#25909)
- fix(flags): don't treat empty run command as task subcommand (#25708)
- fix(flags): move some content from docs.deno.com into help output (#25951)
- fix(flags): properly error out for urls (#25770)
- fix(flags): require global flag for permission flags in install subcommand
(#25391)
- fix(fmt): --check was broken for CSS, YAML and HTML (#25848)
- fix(fmt): fix incorrect quotes in components (#25249)
- fix(fmt): fix tabs in YAML (#25536)
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
(#25222)
- fix(info): error instead of panic for npm specifiers when using byonm (#25947)
- fix(info): move "version" field to top of json output (#25890)
- fix(inspector): Fix panic when re-entering runtime ops (#25537)
- fix(install): compare versions directly to decide whether to create a child
node_modules dir for a workspace member (#26001)
- fix(install): Make sure target node_modules exists when symlinking (#25494)
- fix(install): recommend using `deno install -g` when using a single http url
(#25388)
- fix(install): store tags associated with package in node_modules dir (#26000)
- fix(install): surface package.json dependency errors (#26023)
- fix(install): Use relative symlinks in deno install (#25164)
- fix(installl): make bin entries executable even if not put in
`node_modules/.bin` (#25873)
- fix(jupyter): allow unstable flags (#25483)
- fix(lint): correctly handle old jsx in linter (#25902)
- fix(lint): support linting jsr pkg without version field (#25230)
- fix(lockfile): use loose deserialization for version constraints (#25660)
- fix(lsp): encode url parts before parsing as uri (#25509)
- fix(lsp): exclude missing import quick fixes with bad resolutions (#26025)
- fix(lsp): panic on url_to_uri() (#25238)
- fix(lsp): properly resolve jsxImportSource for caching (#25688)
- fix(lsp): update diagnostics on npm install (#25352)
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
- fix(no-slow-types): better `override` handling (#25989)
- fix(node): Don't error out if we fail to statically analyze CJS re-export
(#25748)
- fix(node): fix worker_threads issues blocking Angular support (#26024)
- fix(node): implement libuv APIs needed to support `npm:sqlite3` (#25893)
- fix(node): Include "node" condition during CJS re-export analysis (#25785)
- fix(node): Pass NPM_PROCESS_STATE to subprocesses via temp file instead of env
var (#25896)
- fix(node/byonm): do not accidentally resolve bare node built-ins (#25543)
- fix(node/cluster): improve stubs to make log4js work (#25146)
- fix(npm): better error handling for remote npm deps (#25670)
- fix(npm): root package has peer dependency on itself (#26022)
- fix(permissions): disallow any `LD_` or `DYLD_` prefixed env var without full
--allow-run permissions (#25271)
- fix(permissions): disallow launching subprocess with LD_PRELOAD env var
without full run permissions (#25221)
- fix(publish): ensure provenance is spec compliant (#25200)
- fix(regression): do not expose resolved path in Deno.Command permission denied
error (#25434)
- fix(runtime): don't error `child.output()` on consumed stream (#25657)
- fix(runtime): use more null proto objects again (#25040)
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
instantiated without snapshot (#25280)
- fix(task): correct name for scoped npm package binaries (#25390)
- fix(task): support tasks with colons in name in `deno run` (#25233)
- fix(task): use current executable for deno even when not named deno (#26019)
- fix(types): simplify mtls related types (#25658)
- fix(upgrade): more informative information on invalid version (#25319)
- fix(windows): Deno.Command - align binary resolution with linux and mac
(#25429)
- fix(workspace): handle when config has members when specified via --config
(#25988)
- perf: fast path for cached dyn imports (#25636)
- perf: Use -O3 for sui in release builds (#26010)
- perf(cache): single cache file for remote modules (#24983)
- perf(cache): single cache file for typescript emit (#24994)
- perf(ext/fetch): improve decompression throughput by upgrading `tower_http`
(#25806)
- perf(ext/node): reduce some allocations in require (#25197)
- perf(ext/web): optimize performance.measure() (#25774)
### 1.46.3 / 2024.09.04
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
permissions (#25398)
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
- fix(ext/node): session close during stream setup (#25170)
- fix(publish): ensure provenance is spec compliant (#25200)
- fix(upgrade): more informative information on invalid version (#25319)
- fix: fix jupyter display function type (#25326)
### 1.46.2 / 2024.08.29
- Revert "feat(fetch): accept async iterables for body" (#25207)
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
- fix(ext/node): export JWK public key (#25239)
- fix(ext/node): import EC JWK keys (#25266)
- fix(ext/node): import JWK octet key pairs (#25180)
- fix(ext/node): import RSA JWK keys (#25267)
- fix(ext/node): throw when loading `cpu-features` module (#25257)
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
- fix(fmt): fix incorrect quotes in components (#25249)
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
(#25222)
- fix(install): Use relative symlinks in deno install (#25164)
- fix(lsp): panic on url_to_uri() (#25238)
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
- fix(node/cluster): improve stubs to make log4js work (#25146)
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
instantiated without snapshot (#25280)
- fix(task): support tasks with colons in name in `deno run` (#25233)
- fix: handle showing warnings while the progress bar is shown (#25187)
- fix: reland async context (#25140)
- fix: removed unstable-htttp from deno help (#25216)
- fix: replace `npm install` hint with `deno install` hint (#25244)
- fix: update deno_doc (#25290)
- fix: upgrade deno_core to 0.307.0 (#25287)
- perf(ext/node): reduce some allocations in require (#25197)
### 1.46.1 / 2024.08.22
- fix(ext/node): http2session ready state (#25143)
- fix(ext/node): register `node:wasi` built-in (#25134)
- fix(urlpattern): fallback to empty string for undefined group values (#25151)
- fix: trim space around DENO_AUTH_TOKENS (#25147)
### 1.46.0 / 2024.08.22
- BREAKING(temporal/unstable): Remove obsoleted Temporal APIs (#24836)
- BREAKING(webgpu/unstable): Replace async .requestAdapterInfo() with sync .info
(#24783)
- feat: `deno compile --icon <ico>` (#25039)
- feat: `deno init --serve` (#24897)
- feat: `deno upgrade --rc` (#24905)
- feat: Add Deno.ServeDefaultExport type (#24879)
- feat: async context (#24402)
- feat: better help output (#24958)
- feat: codesign for deno compile binaries (#24604)
- feat: deno clean (#24950)
- feat: deno remove (#24952)
- feat: deno run <task> (#24891)
- feat: Deprecate "import assertions" with a warning (#24743)
- feat: glob and directory support for `deno check` and `deno cache` cli arg
paths (#25001)
- feat: Print deprecation message for npm packages (#24992)
- feat: refresh "Download" progress bar with a spinner (#24913)
- feat: Rename --unstable-hmr to --watch-hmr (#24975)
- feat: support short flags for permissions (#24883)
- feat: treat bare deno command with run arguments as deno run (#24887)
- feat: upgrade deno_core (#24886)
- feat: upgrade deno_core (#25042)
- feat: upgrade V8 to 12.8 (#24693)
- feat: Upgrade V8 to 12.9 (#25138)
- feat: vm rewrite (#24596)
- feat(clean): add progress bar (#25026)
- feat(cli): Add --env-file as alternative to --env (#24555)
- feat(cli/tools): add a subcommand `--hide-stacktraces` for test (#24095)
- feat(config): Support frozen lockfile config option in deno.json (#25100)
- feat(config/jsr): add license field (#25056)
- feat(coverage): add breadcrumbs to deno coverage `--html` report (#24860)
- feat(ext/node): rewrite crypto keys (#24463)
- feat(ext/node): support http2session.socket (#24786)
- feat(fetch): accept async iterables for body (#24623)
- feat(flags): improve help output and make `deno run` list tasks (#25108)
- feat(fmt): support CSS, SCSS, Sass and Less (#24870)
- feat(fmt): support HTML, Svelte, Vue, Astro and Angular (#25019)
- feat(fmt): support YAML (#24717)
- feat(FUTURE): terse lockfile (v4) (#25059)
- feat(install): change 'Add ...' message (#24949)
- feat(lint): Add lint for usage of node globals (with autofix) (#25048)
- feat(lsp): node specifier completions (#24904)
- feat(lsp): registry completions for import-mapped specifiers (#24792)
- feat(node): support `username` and `_password` in `.npmrc` file (#24793)
- feat(permissions): link to docs in permission prompt (#24948)
- feat(publish): error on missing license file (#25011)
- feat(publish): suggest importing `jsr:@std/` for `deno.land/std` urls (#25046)
- feat(serve): Opt-in parallelism for `deno serve` (#24920)
- feat(test): rename --allow-none to --permit-no-files (#24809)
- feat(unstable): ability to use a local copy of jsr packages (#25068)
- feat(unstable/fmt): move yaml formatting behind unstable flag (#24848)
- feat(upgrade): refresh output (#24911)
- feat(upgrade): support `deno upgrade 1.46.0` (#25096)
- feat(urlpattern): add ignoreCase option & hasRegExpGroups property, and fix
spec discrepancies (#24741)
- feat(watch): add watch paths to test subcommand (#24771)
- fix: `node:inspector` not being registered (#25007)
- fix: `rename` watch event missing (#24893)
- fix: actually add missing `node:readline/promises` module (#24772)
- fix: adapt to new jupyter runtime API and include session IDs (#24762)
- fix: add permission name when accessing a special file errors (#25085)
- fix: adjust suggestion for lockfile regeneration (#25107)
- fix: cache bust jsr meta file when version not found in dynamic branches
(#24928)
- fix: CFunctionInfo and CTypeInfo leaks (#24634)
- fix: clean up flag help output (#24686)
- fix: correct JSON config schema to show vendor option as stable (#25090)
- fix: dd-trace http message compat (#25021)
- fix: deserialize lockfile v3 straight (#25121)
- fix: Don't panic if fail to handle JS stack frame (#25122)
- fix: Don't panic if failed to add system certificate (#24823)
- fix: Don't shell out to `unzip` in deno upgrade/compile (#24926)
- fix: enable the reporting of parsing related problems when running deno lint
(#24332)
- fix: errors with CallSite methods (#24907)
- fix: include already seen deps in lockfile dep tracking (#24556)
- fix: log current version when using deno upgrade (#25079)
- fix: make `deno add` output more deterministic (#25083)
- fix: make vendor cache manifest more deterministic (#24658)
- fix: missing `emitWarning` import (#24587)
- fix: regressions around Error.prepareStackTrace (#24839)
- fix: stub `node:module.register()` (#24965)
- fix: support `npm:bindings` and `npm:callsites` packages (#24727)
- fix: unblock fsevents native module (#24542)
- fix: update deno_doc (#24972)
- fix: update dry run success message (#24885)
- fix: update lsp error message of 'relative import path' to 'use deno add' for
npm/jsr packages (#24524)
- fix: upgrade deno_core to 0.298.0 (#24709)
- fix: warn about import assertions when using typescript (#25135)
- fix(add): better error message providing scoped pkg missing leading `@` symbol
(#24961)
- fix(add): Better error message when missing npm specifier (#24970)
- fix(add): error when config file contains importMap field (#25115)
- fix(add): Handle packages without root exports (#25102)
- fix(add): Support dist tags in deno add (#24960)
- fix(cli): add NAPI support in standalone mode (#24642)
- fix(cli): Create child node_modules for conflicting dependency versions,
respect aliases in package.json (#24609)
- fix(cli): Respect implied BYONM from DENO_FUTURE in `deno task` (#24652)
- fix(cli): shorten examples in help text (#24374)
- fix(cli): support --watch when running cjs npm packages (#25038)
- fix(cli): Unhide publish subcommand help string (#24787)
- fix(cli): update permission prompt message for compiled binaries (#24081)
- fix(cli/init): broken link in deno init sample template (#24545)
- fix(compile): adhoc codesign mach-o by default (#24824)
- fix(compile): make output more deterministic (#25092)
- fix(compile): support workspace members importing other members (#24909)
- fix(compile/windows): handle cjs re-export of relative path with parent
component (#24795)
- fix(config): regression - should not discover npm workspace for nested
deno.json not in workspace (#24559)
- fix(cron): improve error message for invalid cron names (#24644)
- fix(docs): fix some deno.land/manual broken urls (#24557)
- fix(ext/console): Error Cause Not Inspect-Formatted when printed (#24526)
- fix(ext/console): render properties of Intl.Locale (#24827)
- fix(ext/crypto): respect offsets when writing into ab views in randomFillSync
(#24816)
- fix(ext/fetch): include TCP src/dst socket info in error messages (#24939)
- fix(ext/fetch): include URL and error details on fetch failures (#24910)
- fix(ext/fetch): respect authority from URL (#24705)
- fix(ext/fetch): use correct ALPN to proxies (#24696)
- fix(ext/fetch): use correct ALPN to socks5 proxies (#24817)
- fix(ext/http): correctly consume response body in `Deno.serve` (#24811)
- fix(ext/net): validate port in Deno.{connect,serve,listen} (#24399)
- fix(ext/node): add `CipherIv.setAutoPadding()` (#24940)
- fix(ext/node): add crypto.diffieHellman (#24938)
- fix(ext/node): client closing streaming request shouldn't terminate http
server (#24946)
- fix(ext/node): createBrotliCompress params (#24984)
- fix(ext/node): do not expose `self` global in node (#24637)
- fix(ext/node): don't concat set-cookie in ServerResponse.appendHeader (#25000)
- fix(ext/node): don't throw when calling PerformanceObserver.observe (#25036)
- fix(ext/node): ed25519 signing and cipheriv autopadding fixes (#24957)
- fix(ext/node): fix prismjs compatibiliy in Web Worker (#25062)
- fix(ext/node): handle node child_process with --v8-options flag (#24804)
- fix(ext/node): handle prefix mapping for IPv4-mapped IPv6 addresses (#24546)
- fix(ext/node): http request uploads of subarray of buffer should work (#24603)
- fix(ext/node): improve shelljs compat with managed npm execution (#24912)
- fix(ext/node): node:zlib coerces quality 10 to 9.5 (#24850)
- fix(ext/node): pass content-disposition header as string instead of bytes
(#25128)
- fix(ext/node): prevent panic in http2.connect with uppercase header names
(#24780)
- fix(ext/node): read correct CPU usage stats on Linux (#24732)
- fix(ext/node): rewrite X509Certificate resource and add `publicKey()` (#24988)
- fix(ext/node): stat.mode on windows (#24434)
- fix(ext/node): support ieee-p1363 ECDSA signatures and pss salt len (#24981)
- fix(ext/node): use pem private keys in createPublicKey (#24969)
- fix(ext/node/net): emit `error` before `close` when connection is refused
(#24656)
- fix(ext/web): make CompressionResource garbage collectable (#24884)
- fix(ext/web): make TextDecoderResource use cppgc (#24888)
- fix(ext/webgpu): assign missing `constants` property of shader about
`GPUDevice.createRenderPipeline[Async]` (#24803)
- fix(ext/webgpu): don't crash while constructing GPUOutOfMemoryError (#24807)
- fix(ext/webgpu): GPUDevice.createRenderPipelineAsync should return a Promise
(#24349)
- fix(ext/websocket): unhandled close rejection in WebsocketStream (#25125)
- fix(fmt): handle using stmt in for of stmt (#24834)
- fix(fmt): regression with pipe in code blocks in tables (#25098)
- fix(fmt): upgrade to dprint-plugin-markdown 0.17.4 (#25075)
- fix(fmt): was sometimes putting comments in front of commas in parameter lists
(#24650)
- fix(future): Emit `deno install` warning less often, suggest `deno install` in
error message (#24706)
- fix(http): Adjust hostname display for Windows when using 0.0.0.0 (#24698)
- fix(init): use bare specifier for `jsr:@std/assert` (#24581)
- fix(install): Properly handle dist tags when setting up node_modules (#24968)
- fix(lint): support linting tsx/jsx from stdin (#24955)
- fix(lsp): directly use file referrer when loading document (#24997)
- fix(lsp): don't always use byonm resolver when DENO_FUTURE=1 (#24865)
- fix(lsp): hang when caching failed (#24651)
- fix(lsp): import map lookup for jsr subpath auto import (#25025)
- fix(lsp): include scoped import map keys in completions (#25047)
- fix(lsp): resolve jsx import source with types mode (#25064)
- fix(lsp): rewrite import for 'infer return type' action (#24685)
- fix(lsp): scope attribution for asset documents (#24663)
- fix(lsp): support npm workspaces and fix some resolution issues (#24627)
- fix(node): better detection for when to surface node resolution errors
(#24653)
- fix(node): cjs pkg dynamically importing esm-only pkg fails (#24730)
- fix(node): Create additional pipes for child processes (#25016)
- fix(node): Fix `--allow-scripts` with no `deno.json` (#24533)
- fix(node): Fix node IPC serialization for objects with undefined values
(#24894)
- fix(node): revert invalid package target change (#24539)
- fix(node): Rework node:child_process IPC (#24763)
- fix(node): Run node compat tests listed in the `ignore` field (and fix the
ones that fail) (#24631)
- fix(node): support `tty.hasColors()` and `tty.getColorDepth()` (#24619)
- fix(node): support wildcards in package.json imports (#24794)
- fix(node/crypto): Assign publicKey and privateKey with let instead of const
(#24943)
- fix(node/fs): node:fs.read and write should accept typed arrays other than
Uint8Array (#25030)
- fix(node/fs): Use correct offset and length in node:fs.read and write (#25049)
- fix(node/fs/promises): watch should be async iterable (#24805)
- fix(node/http): wrong `req.url` value (#25081)
- fix(node/inspector): Session constructor should not throw (#25041)
- fix(node/timers/promises): add scheduler APIs (#24802)
- fix(node/tty): fix `tty.WriteStream.hasColor` with different args (#25094)
- fix(node/util): add missing `debug` alias of `debuglog` (#24944)
- fix(node/worker_threads): support `port.once()` (#24725)
- fix(npm): handle packages with only pre-released 0.0.0 versions (#24563)
- fix(npm): use start directory deno.json as "root deno.json config" in npm
workspace (#24538)
- fix(npmrc): skip loading .npmrc in home dir on permission error (#24758)
- fix(publish): show dirty files on dirty check failure (#24541)
- fix(publish): surface syntax errors when using --no-check (#24620)
- fix(publish): warn about missing license file (#24677)
- fix(publish): workspace included license file had incorrect path (#24747)
- fix(repl): Prevent panic on broken pipe (#21945)
- fix(runtime/windows): fix calculation of console size (#23873)
- fix(std/http2): release window capacity back to remote stream (#24576)
- fix(tls): print a warning if a system certificate can't be loaded (#25023)
- fix(types): Conform lib.deno_web.d.ts to lib.dom.d.ts and lib.webworker.d.ts
(#24599)
- fix(types): fix streams types (#24770)
- fix(unstable): move sloppy-import warnings to lint rule (#24710)
- fix(unstable): panic when running deno install with DENO_FUTURE=1 (#24866)
- fix(unstable/compile): handle byonm import in sub dir (#24755)
- fix(upgrade): better error message when check_exe fails (#25133)
- fix(upgrade): correctly compute latest version based on current release
channel (#25087)
- fix(upgrade): do not error if config in cwd invalid (#24689)
- fix(upgrade): fallback to Content-Length header for progress bar (#24923)
- fix(upgrade): return no RC versions if fetching fails (#25013)
- fix(upgrade): support RC release with --version flag (#25091)
- fix(upgrade): use proper version display (#25029)
- fix(urlpattern): correct typings for added APIs (#24881)
- fix(webgpu): Fix `GPUAdapter#isFallbackAdapter` and `GPUAdapter#info`
properties (#24914)
- fix(workspace): do not resolve to self for npm pkg depending on matching req
(#24591)
- fix(workspace): support resolving bare specifiers to npm pkgs within a
workspace (#24611)
- fix(workspaces/publish): include the license file from the workspace root if
not in pkg (#24714)
- perf: skip saving to emit cache after first failure (#24896)
- perf: update deno_ast to 0.41 (#24819)
- perf: update deno_doc (#24700)
- perf(ext/crypto): make randomUUID() 5x faster (#24510)
- perf(ext/fetch): speed up `resp.clone()` (#24812)
- perf(ext/http): Reduce size of `ResponseBytesInner` (#24840)
- perf(ext/node): improve `Buffer` from string performance (#24567)
- perf(ext/node): optimize fs.exists[Sync] (#24613)
- perf(lsp): remove fallback config scopes for workspace folders (#24868)
- refactor: `version` module exports a single const struct (#25014)
- refactor: decouple node resolution from deno_core (#24724)
- refactor: move importMap with imports/scopes diagnostic to deno_config
(#24553)
- refactor: remove version::is_canary(), use ReleaseChannel instead (#25053)
- refactor: show release channel in `deno --version` (#25061)
- refactor: update to deno_config 0.25 (#24645)
- refactor: update to use deno_package_json (#24688)
- refactor(ext/node): create separate ops for node:http module (#24788)
- refactor(fetch): reimplement fetch with hyper instead of reqwest (#24237)
- refactor(lint): move reporters to separate module (#24757)
- refactor(node): internally add `.code()` to node resolution errors (#24610)
- refactor(upgrade): cleanup pass (#24954)
- refactor(upgrade): make fetching latest version async (#24919)
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)
- Reland "refactor(fetch): reimplement fetch with hyper instead of reqwest"
(#24593)
### 1.45.5 / 2024.07.31 ### 1.45.5 / 2024.07.31
- fix(cli): Unhide publish subcommand help string (#24787) - fix(cli): Unhide publish subcommand help string (#24787)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.158.0" version = "0.168.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "1.46.0-rc.2" version = "2.0.3"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -38,6 +38,11 @@ path = "./bench/lsp_bench_standalone.rs"
[features] [features]
default = ["upgrade", "__vendored_zlib_ng"] default = ["upgrade", "__vendored_zlib_ng"]
# A feature that enables heap profiling with dhat on Linux.
# 1. Compile with `cargo build --profile=release-with-debug --features=dhat-heap`
# 2. Run the executable. It will output a dhat-heap.json file.
# 3. Open the json file in https://nnethercote.github.io/dh_view/dh_view.html
dhat-heap = ["dhat"]
# A feature that enables the upgrade subcommand and the background check for # A feature that enables the upgrade subcommand and the background check for
# available updates (of deno binary). This is typically disabled for (Linux) # available updates (of deno binary). This is typically disabled for (Linux)
# distribution packages. # distribution packages.
@ -65,44 +70,44 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir = { workspace = true }
deno_config = { version = "=0.30.0", features = ["workspace", "sync"] } deno_config = { version = "=0.37.2", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.146.0", features = ["html", "syntect"] } deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] }
deno_emit = "=0.44.0" deno_graph = { version = "=0.83.4" }
deno_graph = { version = "=0.81.2" } deno_lint = { version = "=0.67.0", features = ["docs"] }
deno_lint = { version = "=0.63.1", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm = "=0.23.0" deno_npm.workspace = true
deno_package_json.workspace = true deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver.workspace = true
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver = "=0.5.10" deno_semver.workspace = true
deno_task_shell = "=0.17.0" deno_task_shell = "=0.18.1"
deno_terminal.workspace = true deno_terminal.workspace = true
eszip = "=0.75.0" libsui = "0.4.0"
libsui = "0.3.0"
napi_sym.workspace = true
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14"
async-trait.workspace = true async-trait.workspace = true
base32.workspace = true
base64.workspace = true base64.workspace = true
bincode = "=1.3.3" bincode = "=1.3.3"
bytes.workspace = true bytes.workspace = true
cache_control.workspace = true cache_control.workspace = true
chrono = { workspace = true, features = ["now"] } chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.5.13", features = ["env", "string", "wrap_help"] } clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
clap_complete = "=4.5.12" clap_complete = "=4.5.24"
clap_complete_fig = "=4.5.2" clap_complete_fig = "=4.5.2"
color-print = "0.3.5" color-print.workspace = true
console_static_text.workspace = true console_static_text.workspace = true
dashmap = "5.5.3" dashmap.workspace = true
data-encoding.workspace = true data-encoding.workspace = true
dhat = { version = "0.3.3", optional = true }
dissimilar = "=1.0.4" dissimilar = "=1.0.4"
dotenvy = "0.15.7" dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.3" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.5" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.91.6" dprint-plugin-typescript = "=0.93.0"
env_logger = "=0.10.0" env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
@ -114,17 +119,17 @@ http.workspace = true
http-body.workspace = true http-body.workspace = true
http-body-util.workspace = true http-body-util.workspace = true
hyper-util.workspace = true hyper-util.workspace = true
import_map = { version = "=0.20.0", features = ["ext"] } import_map = { version = "=0.20.1", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser.workspace = true jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
lazy-regex.workspace = true lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
libz-sys.workspace = true libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.9.0" malva = "=0.11.0"
markup_fmt = "=0.12.0" markup_fmt = "=0.14.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true
@ -134,7 +139,7 @@ p256.workspace = true
pathdiff = "0.2.1" pathdiff = "0.2.1"
percent-encoding.workspace = true percent-encoding.workspace = true
phf.workspace = true phf.workspace = true
pretty_yaml = "=0.4.0" pretty_yaml = "=0.5.0"
quick-junit = "^0.3.5" quick-junit = "^0.3.5"
rand = { workspace = true, features = ["small_rng"] } rand = { workspace = true, features = ["small_rng"] }
regex.workspace = true regex.workspace = true
@ -155,6 +160,7 @@ thiserror.workspace = true
tokio.workspace = true tokio.workspace = true
tokio-util.workspace = true tokio-util.workspace = true
tower-lsp.workspace = true tower-lsp.workspace = true
tracing = { version = "0.1", features = ["log", "default"] }
twox-hash.workspace = true twox-hash.workspace = true
typed-arena = "=2.0.2" typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] } uuid = { workspace = true, features = ["serde"] }

View file

@ -2,6 +2,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use deno_config::deno_json::TsConfigForEmit;
use deno_core::serde_json; use deno_core::serde_json;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -105,3 +106,18 @@ fn values_to_set<'a>(
} }
entries entries
} }
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
if let Some(ignored_options) = &ts_config.maybe_ignored_options {
log::warn!("{}", ignored_options);
}
let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else {
return;
};
if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) {
log::warn!(
"{} experimentalDecorators compiler option is deprecated and may be removed at any time",
deno_runtime::colors::yellow("Warning"),
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -50,7 +50,7 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
out.push(format!("{}:{}", host, port.0)); out.push(format!("{}:{}", host, port.0));
} }
} else { } else {
host_and_port.parse::<NetDescriptor>().map_err(|e| { NetDescriptor::parse(&host_and_port).map_err(|e| {
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}"))
})?; })?;
out.push(host_and_port) out.push(host_and_port)

View file

@ -3,7 +3,6 @@
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_runtime::deno_permissions::PermissionsContainer;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
@ -17,7 +16,7 @@ pub async fn resolve_import_map_value_from_specifier(
Ok(serde_json::from_str(&data_url_text)?) Ok(serde_json::from_str(&data_url_text)?)
} else { } else {
let file = file_fetcher let file = file_fetcher
.fetch(specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(specifier)
.await? .await?
.into_text_decoded()?; .into_text_decoded()?;
Ok(serde_json::from_str(&file.source)?) Ok(serde_json::from_str(&file.source)?)

View file

@ -1,6 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeSet; use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use deno_config::deno_json::ConfigFile; use deno_config::deno_json::ConfigFile;
@ -12,6 +12,7 @@ use deno_core::parking_lot::MutexGuard;
use deno_lockfile::WorkspaceMemberConfig; use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq;
use crate::cache; use crate::cache;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
@ -23,11 +24,20 @@ use crate::args::InstallKind;
use deno_lockfile::Lockfile; use deno_lockfile::Lockfile;
#[derive(Debug)]
pub struct CliLockfileReadFromPathOptions {
pub file_path: PathBuf,
pub frozen: bool,
/// Causes the lockfile to only be read from, but not written to.
pub skip_write: bool,
}
#[derive(Debug)] #[derive(Debug)]
pub struct CliLockfile { pub struct CliLockfile {
lockfile: Mutex<Lockfile>, lockfile: Mutex<Lockfile>,
pub filename: PathBuf, pub filename: PathBuf,
pub frozen: bool, frozen: bool,
skip_write: bool,
} }
pub struct Guard<'a, T> { pub struct Guard<'a, T> {
@ -49,15 +59,6 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
} }
impl CliLockfile { impl CliLockfile {
pub fn new(lockfile: Lockfile, frozen: bool) -> Self {
let filename = lockfile.filename.clone();
Self {
lockfile: Mutex::new(lockfile),
filename,
frozen,
}
}
/// Get the inner deno_lockfile::Lockfile. /// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<Lockfile> { pub fn lock(&self) -> Guard<Lockfile> {
Guard { Guard {
@ -77,6 +78,10 @@ impl CliLockfile {
} }
pub fn write_if_changed(&self) -> Result<(), AnyError> { pub fn write_if_changed(&self) -> Result<(), AnyError> {
if self.skip_write {
return Ok(());
}
self.error_if_changed()?; self.error_if_changed()?;
let mut lockfile = self.lockfile.lock(); let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else { let Some(bytes) = lockfile.resolve_write_bytes() else {
@ -98,7 +103,9 @@ impl CliLockfile {
flags: &Flags, flags: &Flags,
workspace: &Workspace, workspace: &Workspace,
) -> Result<Option<CliLockfile>, AnyError> { ) -> Result<Option<CliLockfile>, AnyError> {
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> { fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>,
) -> HashSet<JsrDepPackageReq> {
let Some(pkg_json) = maybe_pkg_json else { let Some(pkg_json) = maybe_pkg_json else {
return Default::default(); return Default::default();
}; };
@ -107,21 +114,21 @@ impl CliLockfile {
.values() .values()
.filter_map(|dep| dep.as_ref().ok()) .filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep { .filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => Some(req), PackageJsonDepValue::Req(req) => {
Some(JsrDepPackageReq::npm(req.clone()))
}
PackageJsonDepValue::Workspace(_) => None, PackageJsonDepValue::Workspace(_) => None,
}) })
.map(|r| format!("npm:{}", r))
.collect() .collect()
} }
fn deno_json_deps( fn deno_json_deps(
maybe_deno_json: Option<&ConfigFile>, maybe_deno_json: Option<&ConfigFile>,
) -> BTreeSet<String> { ) -> HashSet<JsrDepPackageReq> {
maybe_deno_json maybe_deno_json
.map(|c| { .map(|c| {
crate::args::deno_json::deno_json_deps(c) crate::args::deno_json::deno_json_deps(c)
.into_iter() .into_iter()
.map(|req| req.to_string())
.collect() .collect()
}) })
.unwrap_or_default() .unwrap_or_default()
@ -139,7 +146,7 @@ impl CliLockfile {
return Ok(None); return Ok(None);
} }
let filename = match flags.lock { let file_path = match flags.lock {
Some(ref lock) => PathBuf::from(lock), Some(ref lock) => PathBuf::from(lock),
None => match workspace.resolve_lockfile_path()? { None => match workspace.resolve_lockfile_path()? {
Some(path) => path, Some(path) => path,
@ -157,15 +164,11 @@ impl CliLockfile {
.unwrap_or(false) .unwrap_or(false)
}); });
let lockfile = if flags.lock_write { let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions {
log::warn!( file_path,
"{} \"--lock-write\" flag is deprecated and will be removed in Deno 2.", frozen,
crate::colors::yellow("Warning") skip_write: flags.internal.lockfile_skip_write,
); })?;
CliLockfile::new(Lockfile::new_empty(filename, true), frozen)
} else {
Self::read_from_path(filename, frozen)?
};
// initialize the lockfile with the workspace's configuration // initialize the lockfile with the workspace's configuration
let root_url = workspace.root_dir(); let root_url = workspace.root_dir();
@ -215,40 +218,31 @@ impl CliLockfile {
Ok(Some(lockfile)) Ok(Some(lockfile))
} }
pub fn read_from_path( pub fn read_from_path(
file_path: PathBuf, opts: CliLockfileReadFromPathOptions,
frozen: bool,
) -> Result<CliLockfile, AnyError> { ) -> Result<CliLockfile, AnyError> {
match std::fs::read_to_string(&file_path) { let lockfile = match std::fs::read_to_string(&opts.file_path) {
Ok(text) => Ok(CliLockfile::new( Ok(text) => Lockfile::new(deno_lockfile::NewLockfileOptions {
Lockfile::new(deno_lockfile::NewLockfileOptions { file_path: opts.file_path,
file_path,
content: &text, content: &text,
overwrite: false, overwrite: false,
is_deno_future: *super::DENO_FUTURE,
})?, })?,
frozen,
)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => { Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
Ok(CliLockfile::new( Lockfile::new_empty(opts.file_path, false)
if *super::DENO_FUTURE {
// force version 4 for deno future
Lockfile::new(deno_lockfile::NewLockfileOptions {
file_path,
content: r#"{"version":"4"}"#,
overwrite: false,
is_deno_future: true,
})?
} else {
Lockfile::new_empty(file_path, false)
},
frozen,
))
} }
Err(err) => Err(err).with_context(|| { Err(err) => {
format!("Failed reading lockfile '{}'", file_path.display()) return Err(err).with_context(|| {
}), format!("Failed reading lockfile '{}'", opts.file_path.display())
});
} }
};
Ok(CliLockfile {
filename: lockfile.filename.clone(),
lockfile: Mutex::new(lockfile),
frozen: opts.frozen,
skip_write: opts.skip_write,
})
} }
pub fn error_if_changed(&self) -> Result<(), AnyError> { pub fn error_if_changed(&self) -> Result<(), AnyError> {
@ -257,12 +251,6 @@ impl CliLockfile {
} }
let lockfile = self.lockfile.lock(); let lockfile = self.lockfile.lock();
if lockfile.has_content_changed { if lockfile.has_content_changed {
let suggested = if *super::DENO_FUTURE {
"`deno cache --frozen=false`, `deno install --frozen=false`,"
} else {
"`deno cache --frozen=false`"
};
let contents = let contents =
std::fs::read_to_string(&lockfile.filename).unwrap_or_default(); std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
let new_contents = lockfile.as_json_string(); let new_contents = lockfile.as_json_string();
@ -270,7 +258,7 @@ impl CliLockfile {
// has an extra newline at the end // has an extra newline at the end
let diff = diff.trim_end(); let diff = diff.trim_end();
Err(deno_core::anyhow::anyhow!( Err(deno_core::anyhow::anyhow!(
"The lockfile is out of date. Run {suggested} or rerun with `--frozen=false` to update it.\nchanges:\n{diff}" "The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
)) ))
} else { } else {
Ok(()) Ok(())

View file

@ -8,7 +8,9 @@ mod lockfile;
mod package_json; mod package_json;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions; use deno_config::workspace::CreateResolverOptions;
use deno_config::workspace::FolderConfigs;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::VendorEnablement; use deno_config::workspace::VendorEnablement;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
@ -18,14 +20,13 @@ use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_config::workspace::WorkspaceDiscoverStart; use deno_config::workspace::WorkspaceDiscoverStart;
use deno_config::workspace::WorkspaceLintConfig; use deno_config::workspace::WorkspaceLintConfig;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::normalize_path;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_npm::npm_rc::NpmRc; use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use import_map::resolve_import_map_value_from_specifier; use import_map::resolve_import_map_value_from_specifier;
@ -40,9 +41,11 @@ pub use deno_config::deno_json::TsConfigForEmit;
pub use deno_config::deno_json::TsConfigType; pub use deno_config::deno_json::TsConfigType;
pub use deno_config::deno_json::TsTypeLib; pub use deno_config::deno_json::TsTypeLib;
pub use deno_config::glob::FilePatterns; pub use deno_config::glob::FilePatterns;
pub use deno_json::check_warn_tsconfig;
pub use flags::*; pub use flags::*;
pub use lockfile::CliLockfile; pub use lockfile::CliLockfile;
pub use package_json::PackageJsonInstallDepsProvider; pub use lockfile::CliLockfileReadFromPathOptions;
pub use package_json::NpmInstallDepsProvider;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -50,7 +53,6 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs; use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls; use deno_runtime::deno_tls::rustls;
@ -63,10 +65,13 @@ use dotenvy::from_filename;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::io::BufReader; use std::io::BufReader;
use std::io::Cursor; use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
@ -116,9 +121,6 @@ pub static DENO_DISABLE_PEDANTIC_NODE_WARNINGS: Lazy<bool> = Lazy::new(|| {
.is_some() .is_some()
}); });
pub static DENO_FUTURE: Lazy<bool> =
Lazy::new(|| std::env::var("DENO_FUTURE").ok().is_some());
pub fn jsr_url() -> &'static Url { pub fn jsr_url() -> &'static Url {
static JSR_URL: Lazy<Url> = Lazy::new(|| { static JSR_URL: Lazy<Url> = Lazy::new(|| {
let env_var_name = "JSR_URL"; let env_var_name = "JSR_URL";
@ -282,10 +284,7 @@ impl BenchOptions {
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] #[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct UnstableFmtOptions { pub struct UnstableFmtOptions {
pub css: bool,
pub html: bool,
pub component: bool, pub component: bool,
pub yaml: bool,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -318,10 +317,7 @@ impl FmtOptions {
Self { Self {
options: resolve_fmt_options(fmt_flags, fmt_config.options), options: resolve_fmt_options(fmt_flags, fmt_config.options),
unstable: UnstableFmtOptions { unstable: UnstableFmtOptions {
css: unstable.css || fmt_flags.unstable_css,
html: unstable.html || fmt_flags.unstable_html,
component: unstable.component || fmt_flags.unstable_component, component: unstable.component || fmt_flags.unstable_component,
yaml: unstable.yaml || fmt_flags.unstable_yaml,
}, },
files: fmt_config.files, files: fmt_config.files,
} }
@ -370,7 +366,7 @@ pub struct WorkspaceTestOptions {
pub doc: bool, pub doc: bool,
pub no_run: bool, pub no_run: bool,
pub fail_fast: Option<NonZeroUsize>, pub fail_fast: Option<NonZeroUsize>,
pub allow_none: bool, pub permit_no_files: bool,
pub filter: Option<String>, pub filter: Option<String>,
pub shuffle: Option<u64>, pub shuffle: Option<u64>,
pub concurrent_jobs: NonZeroUsize, pub concurrent_jobs: NonZeroUsize,
@ -383,7 +379,7 @@ pub struct WorkspaceTestOptions {
impl WorkspaceTestOptions { impl WorkspaceTestOptions {
pub fn resolve(test_flags: &TestFlags) -> Self { pub fn resolve(test_flags: &TestFlags) -> Self {
Self { Self {
allow_none: test_flags.allow_none, permit_no_files: test_flags.permit_no_files,
concurrent_jobs: test_flags concurrent_jobs: test_flags
.concurrent_jobs .concurrent_jobs
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap()), .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()),
@ -582,6 +578,7 @@ fn discover_npmrc(
let resolved = npmrc let resolved = npmrc
.as_resolved(npm_registry_url()) .as_resolved(npm_registry_url())
.context("Failed to resolve .npmrc options")?; .context("Failed to resolve .npmrc options")?;
log::debug!(".npmrc found at: '{}'", path.display());
Ok(Arc::new(resolved)) Ok(Arc::new(resolved))
} }
@ -749,15 +746,33 @@ pub enum NpmProcessStateKind {
Byonm, Byonm,
} }
pub(crate) const NPM_RESOLUTION_STATE_ENV_VAR_NAME: &str =
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| { static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
let state = std::env::var(NPM_RESOLUTION_STATE_ENV_VAR_NAME).ok()?; use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let state: NpmProcessState = serde_json::from_str(&state).ok()?; let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
// remove the environment variable so that sub processes std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
// that are spawned do not also use this. let fd = fd.parse::<usize>().ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_ENV_VAR_NAME); let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state) Some(state)
}); });
@ -776,13 +791,12 @@ pub struct CliOptions {
// application need not concern itself with, so keep these private // application need not concern itself with, so keep these private
flags: Arc<Flags>, flags: Arc<Flags>,
initial_cwd: PathBuf, initial_cwd: PathBuf,
main_module_cell: std::sync::OnceLock<Result<ModuleSpecifier, AnyError>>,
maybe_node_modules_folder: Option<PathBuf>, maybe_node_modules_folder: Option<PathBuf>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
overrides: CliOptionOverrides, overrides: CliOptionOverrides,
pub start_dir: Arc<WorkspaceDirectory>, pub start_dir: Arc<WorkspaceDirectory>,
pub disable_deprecated_api_warning: bool,
pub verbose_deprecated_api_warning: bool,
pub deno_dir_provider: Arc<DenoDirProvider>, pub deno_dir_provider: Arc<DenoDirProvider>,
} }
@ -813,27 +827,18 @@ impl CliOptions {
} }
let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache); let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache);
let root_folder = start_dir.workspace.root_folder_configs();
let deno_dir_provider = let deno_dir_provider =
Arc::new(DenoDirProvider::new(flags.cache_path.clone())); Arc::new(DenoDirProvider::new(flags.internal.cache_path.clone()));
let maybe_node_modules_folder = resolve_node_modules_folder( let maybe_node_modules_folder = resolve_node_modules_folder(
&initial_cwd, &initial_cwd,
&flags, &flags,
root_folder.deno_json.as_deref(), &start_dir.workspace,
root_folder.pkg_json.as_deref(),
&deno_dir_provider, &deno_dir_provider,
) )
.with_context(|| "Resolving node_modules folder.")?; .with_context(|| "Resolving node_modules folder.")?;
load_env_variables_from_env_file(flags.env_file.as_ref()); load_env_variables_from_env_file(flags.env_file.as_ref());
let disable_deprecated_api_warning = flags.log_level
== Some(log::Level::Error)
|| std::env::var("DENO_NO_DEPRECATION_WARNINGS").ok().is_some();
let verbose_deprecated_api_warning =
std::env::var("DENO_VERBOSE_WARNINGS").ok().is_some();
Ok(Self { Ok(Self {
flags, flags,
initial_cwd, initial_cwd,
@ -841,9 +846,8 @@ impl CliOptions {
npmrc, npmrc,
maybe_node_modules_folder, maybe_node_modules_folder,
overrides: Default::default(), overrides: Default::default(),
main_module_cell: std::sync::OnceLock::new(),
start_dir, start_dir,
disable_deprecated_api_warning,
verbose_deprecated_api_warning,
deno_dir_provider, deno_dir_provider,
}) })
} }
@ -960,6 +964,9 @@ impl CliOptions {
match self.sub_command() { match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly, DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(_),
}) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(), _ => self.type_check_mode().as_graph_kind(),
} }
} }
@ -1083,27 +1090,13 @@ impl CliOptions {
None => None, None => None,
} }
}; };
Ok( Ok(self.workspace().create_resolver(
self
.workspace()
.create_resolver(
CreateResolverOptions { CreateResolverOptions {
pkg_json_dep_resolution, pkg_json_dep_resolution,
specified_import_map: cli_arg_specified_import_map, specified_import_map: cli_arg_specified_import_map,
}, },
|specifier| { |path| Ok(std::fs::read_to_string(path)?),
let specifier = specifier.clone(); )?)
async move {
let file = file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all())
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
)
.await?,
)
} }
pub fn node_ipc_fd(&self) -> Option<i64> { pub fn node_ipc_fd(&self) -> Option<i64> {
@ -1137,34 +1130,23 @@ impl CliOptions {
self.flags.env_file.as_ref() self.flags.env_file.as_ref()
} }
pub fn enable_future_features(&self) -> bool { pub fn resolve_main_module(&self) -> Result<&ModuleSpecifier, AnyError> {
*DENO_FUTURE self
} .main_module_cell
.get_or_init(|| {
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> {
let main_module = match &self.flags.subcommand { let main_module = match &self.flags.subcommand {
DenoSubcommand::Bundle(bundle_flags) => {
resolve_url_or_path(&bundle_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Compile(compile_flags) => { DenoSubcommand::Compile(compile_flags) => {
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())? resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
} }
DenoSubcommand::Eval(_) => { DenoSubcommand::Eval(_) => {
resolve_url_or_path("./$deno$eval", self.initial_cwd())? resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
} }
DenoSubcommand::Repl(_) => { DenoSubcommand::Repl(_) => {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())? resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
} }
DenoSubcommand::Run(run_flags) => { DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() { if run_flags.is_stdin() {
std::env::current_dir() resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())?
.context("Unable to get CWD")
.and_then(|cwd| {
resolve_url_or_path("./$deno$stdin.ts", &cwd)
.map_err(AnyError::from)
})?
} else if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
ModuleSpecifier::parse(&run_flags.script)?
} else { } else {
resolve_url_or_path(&run_flags.script, self.initial_cwd())? resolve_url_or_path(&run_flags.script, self.initial_cwd())?
} }
@ -1178,6 +1160,9 @@ impl CliOptions {
}; };
Ok(main_module) Ok(main_module)
})
.as_ref()
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
} }
pub fn resolve_file_header_overrides( pub fn resolve_file_header_overrides(
@ -1198,7 +1183,7 @@ impl CliOptions {
(maybe_main_specifier, maybe_content_type) (maybe_main_specifier, maybe_content_type)
{ {
HashMap::from([( HashMap::from([(
main_specifier, main_specifier.clone(),
HashMap::from([("content-type".to_string(), content_type.to_string())]), HashMap::from([("content-type".to_string(), content_type.to_string())]),
)]) )])
} else { } else {
@ -1227,11 +1212,6 @@ impl CliOptions {
NPM_PROCESS_STATE.is_some() NPM_PROCESS_STATE.is_some()
} }
/// Overrides the import map specifier to use.
pub fn set_import_map_specifier(&mut self, path: Option<ModuleSpecifier>) {
self.overrides.import_map_specifier = Some(path);
}
pub fn has_node_modules_dir(&self) -> bool { pub fn has_node_modules_dir(&self) -> bool {
self.maybe_node_modules_folder.is_some() self.maybe_node_modules_folder.is_some()
} }
@ -1240,26 +1220,13 @@ impl CliOptions {
self.maybe_node_modules_folder.as_ref() self.maybe_node_modules_folder.as_ref()
} }
pub fn with_node_modules_dir_path(&self, path: PathBuf) -> Self { pub fn node_modules_dir(
Self { &self,
flags: self.flags.clone(), ) -> Result<Option<NodeModulesDirMode>, AnyError> {
initial_cwd: self.initial_cwd.clone(), if let Some(flag) = self.flags.node_modules_dir {
maybe_node_modules_folder: Some(path), return Ok(Some(flag));
npmrc: self.npmrc.clone(),
maybe_lockfile: self.maybe_lockfile.clone(),
start_dir: self.start_dir.clone(),
overrides: self.overrides.clone(),
disable_deprecated_api_warning: self.disable_deprecated_api_warning,
verbose_deprecated_api_warning: self.verbose_deprecated_api_warning,
deno_dir_provider: self.deno_dir_provider.clone(),
} }
} self.workspace().node_modules_dir().map_err(Into::into)
pub fn node_modules_dir_enablement(&self) -> Option<bool> {
self
.flags
.node_modules_dir
.or_else(|| self.workspace().node_modules_dir())
} }
pub fn vendor_dir_path(&self) -> Option<&PathBuf> { pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
@ -1270,23 +1237,7 @@ impl CliOptions {
&self, &self,
config_type: TsConfigType, config_type: TsConfigType,
) -> Result<TsConfigForEmit, AnyError> { ) -> Result<TsConfigForEmit, AnyError> {
let result = self.workspace().resolve_ts_config_for_emit(config_type); self.workspace().resolve_ts_config_for_emit(config_type)
match result {
Ok(mut ts_config_for_emit) => {
if matches!(self.flags.subcommand, DenoSubcommand::Bundle(..)) {
// For backwards compatibility, force `experimentalDecorators` setting
// to true.
*ts_config_for_emit
.ts_config
.0
.get_mut("experimentalDecorators")
.unwrap() = serde_json::Value::Bool(true);
}
Ok(ts_config_for_emit)
}
Err(err) => Err(err),
}
} }
pub fn resolve_inspector_server( pub fn resolve_inspector_server(
@ -1354,10 +1305,7 @@ impl CliOptions {
pub fn resolve_config_unstable_fmt_options(&self) -> UnstableFmtOptions { pub fn resolve_config_unstable_fmt_options(&self) -> UnstableFmtOptions {
let workspace = self.workspace(); let workspace = self.workspace();
UnstableFmtOptions { UnstableFmtOptions {
css: workspace.has_unstable("fmt-css"),
html: workspace.has_unstable("fmt-html"),
component: workspace.has_unstable("fmt-component"), component: workspace.has_unstable("fmt-component"),
yaml: workspace.has_unstable("fmt-yaml"),
} }
} }
@ -1398,11 +1346,9 @@ impl CliOptions {
)?; )?;
Ok(deno_lint::linter::LintConfig { Ok(deno_lint::linter::LintConfig {
default_jsx_factory: transpile_options default_jsx_factory: (!transpile_options.jsx_automatic)
.jsx_automatic
.then(|| transpile_options.jsx_factory.clone()), .then(|| transpile_options.jsx_factory.clone()),
default_jsx_fragment_factory: transpile_options default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
.jsx_automatic
.then(|| transpile_options.jsx_fragment_factory.clone()), .then(|| transpile_options.jsx_fragment_factory.clone()),
}) })
} }
@ -1555,8 +1501,35 @@ impl CliOptions {
&self.flags.permissions &self.flags.permissions
} }
pub fn permissions_options(&self) -> Result<PermissionsOptions, AnyError> { pub fn permissions_options(&self) -> PermissionsOptions {
self.flags.permissions.to_options(Some(&self.initial_cwd)) fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
files
.iter()
.filter_map(|f| Url::parse(f).ok().map(Cow::Owned))
.collect()
}
// get a list of urls to imply for --allow-import
let cli_arg_urls = self
.resolve_main_module()
.ok()
.map(|url| vec![Cow::Borrowed(url)])
.or_else(|| match &self.flags.subcommand {
DenoSubcommand::Cache(cache_flags) => {
Some(files_to_urls(&cache_flags.files))
}
DenoSubcommand::Check(check_flags) => {
Some(files_to_urls(&check_flags.files))
}
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(flags),
}) => Url::parse(&flags.module_url)
.ok()
.map(|url| vec![Cow::Owned(url)]),
_ => None,
})
.unwrap_or_default();
self.flags.permissions.to_options(&cli_arg_urls)
} }
pub fn reload_flag(&self) -> bool { pub fn reload_flag(&self) -> bool {
@ -1602,18 +1575,28 @@ impl CliOptions {
&self.flags.unsafely_ignore_certificate_errors &self.flags.unsafely_ignore_certificate_errors
} }
pub fn legacy_unstable_flag(&self) -> bool {
self.flags.unstable_config.legacy_flag_enabled
}
pub fn unstable_bare_node_builtins(&self) -> bool { pub fn unstable_bare_node_builtins(&self) -> bool {
self.flags.unstable_config.bare_node_builtins self.flags.unstable_config.bare_node_builtins
|| self.workspace().has_unstable("bare-node-builtins") || self.workspace().has_unstable("bare-node-builtins")
} }
pub fn unstable_detect_cjs(&self) -> bool {
self.flags.unstable_config.detect_cjs
|| self.workspace().has_unstable("detect-cjs")
}
fn byonm_enabled(&self) -> bool {
// check if enabled via unstable
self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual)
|| NPM_PROCESS_STATE
.as_ref()
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
.unwrap_or(false)
}
pub fn use_byonm(&self) -> bool { pub fn use_byonm(&self) -> bool {
if self.enable_future_features() if self.node_modules_dir().ok().flatten().is_none()
&& self.node_modules_dir_enablement().is_none() && self.maybe_node_modules_folder.is_some()
&& self && self
.workspace() .workspace()
.config_folders() .config_folders()
@ -1623,13 +1606,7 @@ impl CliOptions {
return true; return true;
} }
// check if enabled via unstable self.byonm_enabled()
self.flags.unstable_config.byonm
|| NPM_PROCESS_STATE
.as_ref()
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
.unwrap_or(false)
|| self.workspace().has_unstable("byonm")
} }
pub fn unstable_sloppy_imports(&self) -> bool { pub fn unstable_sloppy_imports(&self) -> bool {
@ -1651,38 +1628,18 @@ impl CliOptions {
} }
}); });
if *DENO_FUTURE {
let future_features = [
deno_runtime::deno_ffi::UNSTABLE_FEATURE_NAME.to_string(),
deno_runtime::deno_fs::UNSTABLE_FEATURE_NAME.to_string(),
deno_runtime::deno_webgpu::UNSTABLE_FEATURE_NAME.to_string(),
];
future_features.iter().for_each(|future_feature| {
if !from_config_file.contains(future_feature) {
from_config_file.push(future_feature.to_string());
}
});
}
if !from_config_file.is_empty() { if !from_config_file.is_empty() {
// collect unstable granular flags let all_valid_unstable_flags: Vec<&str> = crate::UNSTABLE_GRANULAR_FLAGS
let mut all_valid_unstable_flags: Vec<&str> =
crate::UNSTABLE_GRANULAR_FLAGS
.iter() .iter()
.map(|granular_flag| granular_flag.0) .map(|granular_flag| granular_flag.name)
.collect(); .chain([
let mut another_unstable_flags = Vec::from([
"sloppy-imports", "sloppy-imports",
"byonm", "byonm",
"bare-node-builtins", "bare-node-builtins",
"fmt-css",
"fmt-html",
"fmt-component", "fmt-component",
"fmt-yaml", "detect-cjs",
]); ])
// add more unstable flags to the same vector holding granular flags .collect();
all_valid_unstable_flags.append(&mut another_unstable_flags);
// check and warn if the unstable flag of config file isn't supported, by // check and warn if the unstable flag of config file isn't supported, by
// iterating through the vector holding the unstable flags // iterating through the vector holding the unstable flags
@ -1745,14 +1702,14 @@ impl CliOptions {
pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig { pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig {
LifecycleScriptsConfig { LifecycleScriptsConfig {
allowed: self.flags.allow_scripts.clone(), allowed: self.flags.allow_scripts.clone(),
initial_cwd: if matches!( initial_cwd: self.initial_cwd.clone(),
self.flags.allow_scripts, root_dir: self.workspace().root_dir_path(),
PackagesAllowedScripts::None explicit_install: matches!(
) { self.sub_command(),
None DenoSubcommand::Install(_)
} else { | DenoSubcommand::Cache(_)
Some(self.initial_cwd.clone()) | DenoSubcommand::Add(_)
}, ),
} }
} }
} }
@ -1761,42 +1718,55 @@ impl CliOptions {
fn resolve_node_modules_folder( fn resolve_node_modules_folder(
cwd: &Path, cwd: &Path,
flags: &Flags, flags: &Flags,
maybe_config_file: Option<&ConfigFile>, workspace: &Workspace,
maybe_package_json: Option<&PackageJson>,
deno_dir_provider: &Arc<DenoDirProvider>, deno_dir_provider: &Arc<DenoDirProvider>,
) -> Result<Option<PathBuf>, AnyError> { ) -> Result<Option<PathBuf>, AnyError> {
let use_node_modules_dir = flags fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf {
.node_modules_dir root_folder
.or_else(|| maybe_config_file.and_then(|c| c.json.node_modules_dir)) .deno_json
.as_ref()
.map(|c| Cow::Owned(c.dir_path()))
.or_else(|| {
root_folder
.pkg_json
.as_ref()
.map(|c| Cow::Borrowed(c.dir_path()))
})
.unwrap_or(Cow::Borrowed(cwd))
.join("node_modules")
}
let root_folder = workspace.root_folder_configs();
let use_node_modules_dir = if let Some(mode) = flags.node_modules_dir {
Some(mode.uses_node_modules_dir())
} else {
workspace
.node_modules_dir()?
.map(|m| m.uses_node_modules_dir())
.or(flags.vendor) .or(flags.vendor)
.or_else(|| maybe_config_file.and_then(|c| c.json.vendor)); .or_else(|| root_folder.deno_json.as_ref().and_then(|c| c.json.vendor))
};
let path = if use_node_modules_dir == Some(false) { let path = if use_node_modules_dir == Some(false) {
return Ok(None); return Ok(None);
} else if let Some(state) = &*NPM_PROCESS_STATE { } else if let Some(state) = &*NPM_PROCESS_STATE {
return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from)); return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from));
} else if let Some(package_json_path) = maybe_package_json.map(|c| &c.path) { } else if root_folder.pkg_json.is_some() {
let node_modules_dir = resolve_from_root(root_folder, cwd);
if let Ok(deno_dir) = deno_dir_provider.get_or_create() { if let Ok(deno_dir) = deno_dir_provider.get_or_create() {
// `deno_dir.root` can be symlink in macOS // `deno_dir.root` can be symlink in macOS
if let Ok(root) = canonicalize_path_maybe_not_exists(&deno_dir.root) { if let Ok(root) = canonicalize_path_maybe_not_exists(&deno_dir.root) {
if package_json_path.starts_with(root) { if node_modules_dir.starts_with(root) {
// if the package.json is in deno_dir, then do not use node_modules // if the package.json is in deno_dir, then do not use node_modules
// next to it as local node_modules dir // next to it as local node_modules dir
return Ok(None); return Ok(None);
} }
} }
} }
// auto-discover the local_node_modules_folder when a package.json exists node_modules_dir
// and it's not in deno_dir
package_json_path.parent().unwrap().join("node_modules")
} else if use_node_modules_dir.is_none() { } else if use_node_modules_dir.is_none() {
return Ok(None); return Ok(None);
} else if let Some(config_path) = maybe_config_file
.as_ref()
.and_then(|c| c.specifier.to_file_path().ok())
{
config_path.parent().unwrap().join("node_modules")
} else { } else {
cwd.join("node_modules") resolve_from_root(root_folder, cwd)
}; };
Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
} }
@ -1886,19 +1856,18 @@ pub fn npm_pkg_req_ref_to_binary_command(
pub fn config_to_deno_graph_workspace_member( pub fn config_to_deno_graph_workspace_member(
config: &ConfigFile, config: &ConfigFile,
) -> Result<deno_graph::WorkspaceMember, AnyError> { ) -> Result<deno_graph::WorkspaceMember, AnyError> {
let nv = deno_semver::package::PackageNv { let name = match &config.json.name {
name: match &config.json.name {
Some(name) => name.clone(), Some(name) => name.clone(),
None => bail!("Missing 'name' field in config file."), None => bail!("Missing 'name' field in config file."),
}, };
version: match &config.json.version { let version = match &config.json.version {
Some(name) => deno_semver::Version::parse_standard(name)?, Some(name) => Some(deno_semver::Version::parse_standard(name)?),
None => bail!("Missing 'version' field in config file."), None => None,
},
}; };
Ok(deno_graph::WorkspaceMember { Ok(deno_graph::WorkspaceMember {
base: config.specifier.join("./").unwrap(), base: config.specifier.join("./").unwrap(),
nv, name,
version,
exports: config.to_exports_config()?.into_map(), exports: config.to_exports_config()?.into_map(),
}) })
} }

View file

@ -4,48 +4,94 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_core::serde_json;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmRemotePkg { pub struct InstallNpmRemotePkg {
pub alias: String, pub alias: Option<String>,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf, pub base_dir: PathBuf,
pub req: PackageReq, pub req: PackageReq,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmWorkspacePkg { pub struct InstallNpmWorkspacePkg {
pub alias: String, pub alias: Option<String>,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf,
pub target_dir: PathBuf, pub target_dir: PathBuf,
} }
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct PackageJsonInstallDepsProvider { pub struct NpmInstallDepsProvider {
remote_pkgs: Vec<InstallNpmRemotePkg>, remote_pkgs: Vec<InstallNpmRemotePkg>,
workspace_pkgs: Vec<InstallNpmWorkspacePkg>, workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>,
} }
impl PackageJsonInstallDepsProvider { impl NpmInstallDepsProvider {
pub fn empty() -> Self { pub fn empty() -> Self {
Self::default() Self::default()
} }
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self { pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
// todo(dsherret): estimate capacity?
let mut workspace_pkgs = Vec::new(); let mut workspace_pkgs = Vec::new();
let mut remote_pkgs = Vec::new(); let mut remote_pkgs = Vec::new();
let mut pkg_json_dep_errors = Vec::new();
let workspace_npm_pkgs = workspace.npm_packages(); let workspace_npm_pkgs = workspace.npm_packages();
for pkg_json in workspace.package_jsons() {
for (_, folder) in workspace.config_folders() {
// deal with the deno.json first because it takes precedence during resolution
if let Some(deno_json) = &folder.deno_json {
// don't bother with externally referenced import maps as users
// should inline their import map to get this behaviour
if let Some(serde_json::Value::Object(obj)) = &deno_json.json.imports {
let mut pkg_pkgs = Vec::with_capacity(obj.len());
for (_alias, value) in obj {
let serde_json::Value::String(specifier) = value else {
continue;
};
let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier)
else {
continue;
};
let pkg_req = npm_req_ref.into_inner().req;
let workspace_pkg = workspace_npm_pkgs
.iter()
.find(|pkg| pkg.matches_req(&pkg_req));
if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: None,
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_pkgs.push(InstallNpmRemotePkg {
alias: None,
base_dir: deno_json.dir_path(),
req: pkg_req,
});
}
}
// sort within each package (more like npm resolution)
pkg_pkgs.sort_by(|a, b| a.req.cmp(&b.req));
remote_pkgs.extend(pkg_pkgs);
}
}
if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps(); let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(deps.len()); let mut pkg_pkgs = Vec::with_capacity(deps.len());
for (alias, dep) in deps { for (alias, dep) in deps {
let Ok(dep) = dep else { let dep = match dep {
Ok(dep) => dep,
Err(err) => {
pkg_json_dep_errors.push(err);
continue; continue;
}
}; };
match dep { match dep {
PackageJsonDepValue::Req(pkg_req) => { PackageJsonDepValue::Req(pkg_req) => {
@ -57,13 +103,12 @@ impl PackageJsonInstallDepsProvider {
if let Some(pkg) = workspace_pkg { if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg { workspace_pkgs.push(InstallNpmWorkspacePkg {
alias, alias: Some(alias),
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), target_dir: pkg.pkg_json.dir_path().to_path_buf(),
}); });
} else { } else {
pkg_pkgs.push(InstallNpmRemotePkg { pkg_pkgs.push(InstallNpmRemotePkg {
alias, alias: Some(alias),
base_dir: pkg_json.dir_path().to_path_buf(), base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req, req: pkg_req,
}); });
@ -74,32 +119,38 @@ impl PackageJsonInstallDepsProvider {
pkg.matches_name_and_version_req(&alias, &version_req) pkg.matches_name_and_version_req(&alias, &version_req)
}) { }) {
workspace_pkgs.push(InstallNpmWorkspacePkg { workspace_pkgs.push(InstallNpmWorkspacePkg {
alias, alias: Some(alias),
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), target_dir: pkg.pkg_json.dir_path().to_path_buf(),
}); });
} }
} }
} }
} }
// sort within each package
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
// sort within each package as npm does
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
remote_pkgs.extend(pkg_pkgs); remote_pkgs.extend(pkg_pkgs);
} }
}
remote_pkgs.shrink_to_fit(); remote_pkgs.shrink_to_fit();
workspace_pkgs.shrink_to_fit(); workspace_pkgs.shrink_to_fit();
Self { Self {
remote_pkgs, remote_pkgs,
workspace_pkgs, workspace_pkgs,
pkg_json_dep_errors,
} }
} }
pub fn remote_pkgs(&self) -> &Vec<InstallNpmRemotePkg> { pub fn remote_pkgs(&self) -> &[InstallNpmRemotePkg] {
&self.remote_pkgs &self.remote_pkgs
} }
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> { pub fn workspace_pkgs(&self) -> &[InstallNpmWorkspacePkg] {
&self.workspace_pkgs &self.workspace_pkgs
} }
pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] {
&self.pkg_json_dep_errors
}
} }

View file

@ -123,19 +123,19 @@ impl AuthTokens {
pub fn new(maybe_tokens_str: Option<String>) -> Self { pub fn new(maybe_tokens_str: Option<String>) -> Self {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
if let Some(tokens_str) = maybe_tokens_str { if let Some(tokens_str) = maybe_tokens_str {
for token_str in tokens_str.split(';') { for token_str in tokens_str.trim().split(';') {
if token_str.contains('@') { if token_str.contains('@') {
let pair: Vec<&str> = token_str.rsplitn(2, '@').collect(); let mut iter = token_str.rsplitn(2, '@');
let token = pair[1]; let host = AuthDomain::from(iter.next().unwrap());
let host = AuthDomain::from(pair[0]); let token = iter.next().unwrap();
if token.contains(':') { if token.contains(':') {
let pair: Vec<&str> = token.rsplitn(2, ':').collect(); let mut iter = token.rsplitn(2, ':');
let username = pair[1].to_string(); let password = iter.next().unwrap().to_owned();
let password = pair[0].to_string(); let username = iter.next().unwrap().to_owned();
tokens.push(AuthToken { tokens.push(AuthToken {
host, host,
token: AuthTokenData::Basic { username, password }, token: AuthTokenData::Basic { username, password },
}) });
} else { } else {
tokens.push(AuthToken { tokens.push(AuthToken {
host, host,
@ -211,6 +211,40 @@ mod tests {
); );
} }
#[test]
fn test_auth_tokens_space() {
let auth_tokens = AuthTokens::new(Some(
" abc123@deno.land;def456@example.com\t".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_newline() {
let auth_tokens = AuthTokens::new(Some(
"\nabc123@deno.land;def456@example.com\n".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test] #[test]
fn test_auth_tokens_port() { fn test_auth_tokens_port() {
let auth_tokens = let auth_tokens =

View file

@ -1,3 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const count = 100000; const count = 100000;
for (let i = 0; i < count; i++) console.log("Hello World"); for (let i = 0; i < count; i++) console.log("Hello World");

View file

@ -46,8 +46,7 @@ Deno.bench("b64_rt_short", { n: 1e6 }, () => {
const buf = new Uint8Array(100); const buf = new Uint8Array(100);
const file = Deno.openSync("/dev/zero"); const file = Deno.openSync("/dev/zero");
Deno.bench("read_zero", { n: 5e5 }, () => { Deno.bench("read_zero", { n: 5e5 }, () => {
// deno-lint-ignore no-deprecated-deno-api file.readSync(buf);
Deno.readSync(file.rid, buf);
}); });
} }

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -1,167 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::net::TcpStream;
use std::path::Path;
use std::process::Command;
use std::sync::atomic::AtomicU16;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use super::Result;
pub use test_util::parse_wrk_output;
pub use test_util::WrkOutput as HttpBenchmarkResult;
// Some of the benchmarks in this file have been renamed. In case the history
// somehow gets messed up:
// "node_http" was once called "node"
// "deno_tcp" was once called "deno"
// "deno_http" was once called "deno_net_http"
const DURATION: &str = "10s";
pub fn benchmark(
target_path: &Path,
) -> Result<HashMap<String, HttpBenchmarkResult>> {
let deno_exe = test_util::deno_exe_path();
let deno_exe = deno_exe.to_string();
let hyper_hello_exe = target_path.join("test_server");
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
let mut res = HashMap::new();
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let http_dir = manifest_dir.join("bench").join("http");
for entry in std::fs::read_dir(&http_dir)? {
let entry = entry?;
let pathbuf = entry.path();
let path = pathbuf.to_str().unwrap();
if path.ends_with(".lua") {
continue;
}
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
let lua_script = http_dir.join(format!("{file_stem}.lua"));
let mut maybe_lua = None;
if lua_script.exists() {
maybe_lua = Some(lua_script.to_str().unwrap());
}
let port = get_port();
// deno run -A --unstable <path> <addr>
res.insert(
file_stem.to_string(),
run(
&[
deno_exe.as_str(),
"run",
"--allow-all",
"--unstable",
"--enable-testing-features-do-not-use",
path,
&server_addr(port),
],
port,
None,
None,
maybe_lua,
)?,
);
}
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
Ok(res)
}
fn run(
server_cmd: &[&str],
port: u16,
env: Option<Vec<(String, String)>>,
origin_cmd: Option<&[&str]>,
lua_script: Option<&str>,
) -> Result<HttpBenchmarkResult> {
// Wait for port 4544 to become available.
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
std::thread::sleep(Duration::from_secs(5));
let mut origin = None;
if let Some(cmd) = origin_cmd {
let mut com = Command::new(cmd[0]);
com.args(&cmd[1..]);
if let Some(env) = env.clone() {
com.envs(env);
}
origin = Some(com.spawn()?);
};
println!("{}", server_cmd.join(" "));
let mut server = {
let mut com = Command::new(server_cmd[0]);
com.args(&server_cmd[1..]);
if let Some(env) = env {
com.envs(env);
}
com.spawn()?
};
// Wait for server to wake up.
let now = Instant::now();
let addr = format!("127.0.0.1:{port}");
while now.elapsed().as_secs() < 30 {
if TcpStream::connect(&addr).is_ok() {
break;
}
std::thread::sleep(Duration::from_millis(10));
}
TcpStream::connect(&addr).expect("Failed to connect to server in time");
println!("Server took {} ms to start", now.elapsed().as_millis());
let wrk = test_util::prebuilt_tool_path("wrk");
assert!(wrk.is_file());
let addr = format!("http://{addr}/");
let wrk = wrk.to_string();
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
if let Some(lua_script) = lua_script {
wrk_cmd.push("-s");
wrk_cmd.push(lua_script);
}
println!("{}", wrk_cmd.join(" "));
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
println!("{output}");
assert!(
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
"server ended with error"
);
server.kill()?;
if let Some(mut origin) = origin {
origin.kill()?;
}
Ok(parse_wrk_output(&output))
}
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
pub(crate) fn get_port() -> u16 {
let p = NEXT_PORT.load(Ordering::SeqCst);
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
p
}
fn server_addr(port: u16) -> String {
format!("0.0.0.0:{port}")
}
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
let port = get_port();
println!("http_benchmark testing RUST hyper");
run(&[exe, &port.to_string()], port, None, None, None)
}

View file

@ -1,10 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const app = new Hono();
app.get("/", (c) => c.text("Hello, World!"));
Deno.serve({ port: Number(port), hostname }, app.fetch);

View file

@ -1,14 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
function handler() {
const file = Deno.openSync(path);
return new Response(file.readable);
}
serve({ hostname, port: Number(port) }, handler);

View file

@ -1,5 +0,0 @@
wrk.headers["foo"] = "bar"
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
wrk.headers["Viewport-Width"] = "1920"
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
wrk.headers["Accept-Language"] = "en,la;q=0.9"

View file

@ -1,11 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] ?? "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
function handler() {
return new Response("Hello World");
}
serve({ hostname, port: Number(port), reusePort: true }, handler);

View file

@ -1,5 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/octet-stream"
file = io.open("./cli/bench/testdata/128k.bin", "rb")
wrk.body = file:read("*a")

View file

@ -1,3 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/json"
wrk.body = '{"hello":"deno"}'

View file

@ -1,25 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { renderToReadableStream } from "https://esm.run/react-dom/server";
import * as React from "https://esm.run/react";
const { serve } = Deno;
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const App = () => (
<html>
<body>
<h1>Hello World</h1>
</body>
</html>
);
const headers = {
headers: {
"Content-Type": "text/html",
},
};
serve({ hostname, port: Number(port) }, async () => {
return new Response(await renderToReadableStream(<App />), headers);
});

View file

@ -1,33 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Used for benchmarking Deno's networking.
// TODO(bartlomieju): Replace this with a real HTTP server once
// https://github.com/denoland/deno/issues/726 is completed.
// Note: this is a keep-alive server.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const listener = Deno.listen({ hostname, port: Number(port) });
const response = new TextEncoder().encode(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
);
async function handle(conn: Deno.Conn): Promise<void> {
const buffer = new Uint8Array(1024);
try {
while (true) {
await conn.read(buffer);
await conn.write(response);
}
} catch (e) {
if (
!(e instanceof Deno.errors.BrokenPipe) &&
!(e instanceof Deno.errors.ConnectionReset)
) {
throw e;
}
}
conn.close();
}
console.log("Listening on", addr);
for await (const conn of listener) {
handle(conn);
}

View file

@ -4,9 +4,10 @@ use deno_core::serde::Deserialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::url::Url; use lsp_types::Uri;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use std::time::Duration; use std::time::Duration;
use test_util::lsp::LspClientBuilder; use test_util::lsp::LspClientBuilder;
use test_util::PathRef; use test_util::PathRef;
@ -91,7 +92,7 @@ fn bench_deco_apps_edits(deno_exe: &Path) -> Duration {
.build(); .build();
client.initialize(|c| { client.initialize(|c| {
c.set_workspace_folders(vec![lsp_types::WorkspaceFolder { c.set_workspace_folders(vec![lsp_types::WorkspaceFolder {
uri: Url::from_file_path(&apps).unwrap(), uri: apps.uri_dir(),
name: "apps".to_string(), name: "apps".to_string(),
}]); }]);
c.set_deno_enable(true); c.set_deno_enable(true);
@ -149,7 +150,11 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",
@ -205,6 +210,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { client.change_configuration(json!({ "deno": {
"enable": true, "enable": true,
"codeLens": { "codeLens": {
@ -213,6 +220,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
"test": true, "test": true,
}, },
} })); } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",
@ -256,7 +265,11 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
for i in 0..10 { for i in 0..10 {
client.write_notification( client.write_notification(
@ -283,7 +296,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
"textDocument/didChange", "textDocument/didChange",
lsp::DidChangeTextDocumentParams { lsp::DidChangeTextDocumentParams {
text_document: lsp::VersionedTextDocumentIdentifier { text_document: lsp::VersionedTextDocumentIdentifier {
uri: Url::parse(&file_name).unwrap(), uri: Uri::from_str(&file_name).unwrap(),
version: 2, version: 2,
}, },
content_changes: vec![lsp::TextDocumentContentChangeEvent { content_changes: vec![lsp::TextDocumentContentChangeEvent {
@ -310,7 +323,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
"textDocument/formatting", "textDocument/formatting",
lsp::DocumentFormattingParams { lsp::DocumentFormattingParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: Url::parse(&file_name).unwrap(), uri: Uri::from_str(&file_name).unwrap(),
}, },
options: lsp::FormattingOptions { options: lsp::FormattingOptions {
tab_size: 2, tab_size: 2,
@ -340,7 +353,11 @@ fn bench_startup_shutdown(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",

View file

@ -13,7 +13,11 @@ use test_util::lsp::LspClientBuilder;
fn incremental_change_wait(bench: &mut Bencher) { fn incremental_change_wait(bench: &mut Bencher) {
let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build(); let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",

View file

@ -17,7 +17,6 @@ use std::process::Stdio;
use std::time::SystemTime; use std::time::SystemTime;
use test_util::PathRef; use test_util::PathRef;
mod http;
mod lsp; mod lsp;
fn read_json(filename: &Path) -> Result<Value> { fn read_json(filename: &Path) -> Result<Value> {
@ -143,29 +142,6 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
], ],
None, None,
), ),
(
"bundle",
&[
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
),
(
"bundle_no_check",
&[
"bundle",
"--no-check",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
),
]; ];
const RESULT_KEYS: &[&str] = const RESULT_KEYS: &[&str] =
@ -314,40 +290,6 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> {
Ok(sizes) Ok(sizes)
} }
const BUNDLES: &[(&str, &str)] = &[
("file_server", "./tests/util/std/http/file_server.ts"),
("welcome", "./tests/testdata/welcome.ts"),
];
fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
let mut sizes = HashMap::<String, i64>::new();
for (name, url) in BUNDLES {
let path = format!("{name}.bundle.js");
test_util::run(
&[
deno_exe.to_str().unwrap(),
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
url,
&path,
],
None,
None,
None,
true,
);
let file = PathBuf::from(path);
assert!(file.is_file());
sizes.insert(name.to_string(), file.metadata()?.len() as i64);
let _ = fs::remove_file(file);
}
Ok(sizes)
}
fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> { fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
let mut results = HashMap::<String, i64>::new(); let mut results = HashMap::<String, i64>::new();
@ -402,9 +344,11 @@ struct BenchResult {
binary_size: HashMap<String, i64>, binary_size: HashMap<String, i64>,
bundle_size: HashMap<String, i64>, bundle_size: HashMap<String, i64>,
cargo_deps: usize, cargo_deps: usize,
// TODO(bartlomieju): remove
max_latency: HashMap<String, f64>, max_latency: HashMap<String, f64>,
max_memory: HashMap<String, i64>, max_memory: HashMap<String, i64>,
lsp_exec_time: HashMap<String, i64>, lsp_exec_time: HashMap<String, i64>,
// TODO(bartlomieju): remove
req_per_sec: HashMap<String, i64>, req_per_sec: HashMap<String, i64>,
syscall_count: HashMap<String, i64>, syscall_count: HashMap<String, i64>,
thread_count: HashMap<String, i64>, thread_count: HashMap<String, i64>,
@ -415,12 +359,10 @@ async fn main() -> Result<()> {
let mut args = env::args(); let mut args = env::args();
let mut benchmarks = vec![ let mut benchmarks = vec![
"bundle",
"exec_time", "exec_time",
"binary_size", "binary_size",
"cargo_deps", "cargo_deps",
"lsp", "lsp",
"http",
"strace", "strace",
"mem_usage", "mem_usage",
]; ];
@ -465,11 +407,6 @@ async fn main() -> Result<()> {
..Default::default() ..Default::default()
}; };
if benchmarks.contains(&"bundle") {
let bundle_size = bundle_benchmark(&deno_exe)?;
new_data.bundle_size = bundle_size;
}
if benchmarks.contains(&"exec_time") { if benchmarks.contains(&"exec_time") {
let exec_times = run_exec_time(&deno_exe, &target_dir)?; let exec_times = run_exec_time(&deno_exe, &target_dir)?;
new_data.benchmark = exec_times; new_data.benchmark = exec_times;
@ -490,21 +427,6 @@ async fn main() -> Result<()> {
new_data.lsp_exec_time = lsp_exec_times; new_data.lsp_exec_time = lsp_exec_times;
} }
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
let stats = http::benchmark(target_dir.as_path())?;
let req_per_sec = stats
.iter()
.map(|(name, result)| (name.clone(), result.requests as i64))
.collect();
new_data.req_per_sec = req_per_sec;
let max_latency = stats
.iter()
.map(|(name, result)| (name.clone(), result.latency))
.collect();
new_data.max_latency = max_latency;
}
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
use std::io::Read; use std::io::Read;

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -2,6 +2,8 @@
// //
// From https://github.com/just-js/benchmarks/tree/main/01-stdio // From https://github.com/just-js/benchmarks/tree/main/01-stdio
// deno-lint-ignore-file no-console
const blocksize = parseInt(Deno.args[0] || 65536); const blocksize = parseInt(Deno.args[0] || 65536);
const buf = new Uint8Array(blocksize); const buf = new Uint8Array(blocksize);
let size = 0; let size = 0;

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,5 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
// Note: when benchmarking across different Deno version, make sure to clear // Note: when benchmarking across different Deno version, make sure to clear
// the DENO_DIR cache. // the DENO_DIR cache.
let [total, count] = typeof Deno !== "undefined" ? Deno.args : []; let [total, count] = typeof Deno !== "undefined" ? Deno.args : [];

View file

@ -1,4 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -13,7 +13,6 @@ mod ts {
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::op2; use deno_core::op2;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use serde::Serialize; use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
@ -25,7 +24,6 @@ mod ts {
struct BuildInfoResponse { struct BuildInfoResponse {
build_specifier: String, build_specifier: String,
libs: Vec<String>, libs: Vec<String>,
node_built_in_module_names: Vec<String>,
} }
#[op2] #[op2]
@ -37,14 +35,9 @@ mod ts {
.iter() .iter()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect(); .collect();
let node_built_in_module_names = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|s| s.to_string())
.collect();
BuildInfoResponse { BuildInfoResponse {
build_specifier, build_specifier,
libs: build_libs, libs: build_libs,
node_built_in_module_names,
} }
} }
@ -243,6 +236,7 @@ mod ts {
"esnext.decorators", "esnext.decorators",
"esnext.disposable", "esnext.disposable",
"esnext.intl", "esnext.intl",
"esnext.iterator",
"esnext.object", "esnext.object",
"esnext.promise", "esnext.promise",
"esnext.regexp", "esnext.regexp",
@ -371,6 +365,9 @@ fn main() {
return; return;
} }
deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
// Host snapshots won't work when cross compiling. // Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap(); let target = env::var("TARGET").unwrap();
let host = env::var("HOST").unwrap(); let host = env::var("HOST").unwrap();
@ -380,56 +377,6 @@ fn main() {
panic!("Cross compiling with snapshot is not supported."); panic!("Cross compiling with snapshot is not supported.");
} }
let symbols_file_name = match env::consts::OS {
"android" | "freebsd" | "openbsd" => {
"generated_symbol_exports_list_linux.def".to_string()
}
os => format!("generated_symbol_exports_list_{}.def", os),
};
let symbols_path = std::path::Path::new("napi")
.join(symbols_file_name)
.canonicalize()
.expect(
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
);
#[cfg(target_os = "windows")]
println!(
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
symbols_path.display()
);
#[cfg(target_os = "macos")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
symbols_path.display()
);
#[cfg(target_os = "linux")]
{
// If a custom compiler is set, the glibc version is not reliable.
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
if env::var("CC").is_err()
&& glibc_version::get_version()
.map(|ver| ver.major <= 2 && ver.minor < 35)
.unwrap_or(false)
{
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
} else {
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
}
}
#[cfg(target_os = "android")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
// To debug snapshot issues uncomment: // To debug snapshot issues uncomment:
// op_fetch_asset::trace_serializer(); // op_fetch_asset::trace_serializer();
@ -446,7 +393,7 @@ fn main() {
); );
let ts_version = ts::version(); let ts_version = ts::version();
debug_assert_eq!(ts_version, "5.5.2"); // bump this assertion when it changes debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
println!("cargo:rustc-env=TS_VERSION={}", ts_version); println!("cargo:rustc-env=TS_VERSION={}", ts_version);
println!("cargo:rerun-if-env-changed=TS_VERSION"); println!("cargo:rerun-if-env-changed=TS_VERSION");

View file

@ -80,10 +80,6 @@ impl CodeCache {
data, data,
)); ));
} }
pub fn remove_code_cache(&self, specifier: &str) {
Self::ensure_ok(self.inner.remove_code_cache(specifier))
}
} }
impl code_cache::CodeCache for CodeCache { impl code_cache::CodeCache for CodeCache {
@ -162,15 +158,6 @@ impl CodeCacheInner {
self.conn.execute(sql, params)?; self.conn.execute(sql, params)?;
Ok(()) Ok(())
} }
pub fn remove_code_cache(&self, specifier: &str) -> Result<(), AnyError> {
let sql = "
DELETE FROM codecache
WHERE specifier=$1;";
let params = params![specifier];
self.conn.execute(sql, params)?;
Ok(())
}
} }
fn serialize_code_cache_type( fn serialize_code_cache_type(

View file

@ -126,9 +126,9 @@ impl DenoDir {
self.root.join("registries") self.root.join("registries")
} }
/// Path to the dependencies cache folder. /// Path to the remote cache folder.
pub fn deps_folder_path(&self) -> PathBuf { pub fn remote_folder_path(&self) -> PathBuf {
self.root.join("deps") self.root.join("remote")
} }
/// Path to the origin data cache folder. /// Path to the origin data cache folder.

157
cli/cache/emit.rs vendored
View file

@ -5,33 +5,25 @@ use std::path::PathBuf;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::unsync::sync::AtomicFlag; use deno_core::unsync::sync::AtomicFlag;
use serde::Deserialize;
use serde::Serialize;
use super::DiskCache; use super::DiskCache;
use super::FastInsecureHasher;
#[derive(Debug, Deserialize, Serialize)]
struct EmitMetadata {
pub source_hash: u64,
pub emit_hash: u64,
}
/// The cache that stores previously emitted files. /// The cache that stores previously emitted files.
pub struct EmitCache { pub struct EmitCache {
disk_cache: DiskCache, disk_cache: DiskCache,
cli_version: &'static str,
emit_failed_flag: AtomicFlag, emit_failed_flag: AtomicFlag,
file_serializer: EmitFileSerializer,
} }
impl EmitCache { impl EmitCache {
pub fn new(disk_cache: DiskCache) -> Self { pub fn new(disk_cache: DiskCache) -> Self {
Self { Self {
disk_cache, disk_cache,
cli_version: crate::version::DENO_VERSION_INFO.deno,
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer {
cli_version: crate::version::DENO_VERSION_INFO.deno,
},
} }
} }
@ -47,38 +39,12 @@ impl EmitCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let meta_filename = self.get_meta_filename(specifier)?;
let emit_filename = self.get_emit_filename(specifier)?; let emit_filename = self.get_emit_filename(specifier)?;
let bytes = self.disk_cache.get(&emit_filename).ok()?;
// load and verify the meta data file is for this source and CLI version
let bytes = self.disk_cache.get(&meta_filename).ok()?;
let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?;
if meta.source_hash != expected_source_hash {
return None;
}
// load and verify the emit is for the meta data
let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
if meta.emit_hash != compute_emit_hash(&emit_bytes, self.cli_version) {
return None;
}
// everything looks good, return it
Some(emit_bytes)
}
/// Gets the filepath which stores the emit.
pub fn get_emit_filepath(
&self,
specifier: &ModuleSpecifier,
) -> Option<PathBuf> {
Some(
self self
.disk_cache .file_serializer
.location .deserialize(bytes, expected_source_hash)
.join(self.get_emit_filename(specifier)?),
)
} }
/// Sets the emit code in the cache. /// Sets the emit code in the cache.
@ -107,34 +73,15 @@ impl EmitCache {
return Ok(()); return Ok(());
} }
let meta_filename = self
.get_meta_filename(specifier)
.ok_or_else(|| anyhow!("Could not get meta filename."))?;
let emit_filename = self let emit_filename = self
.get_emit_filename(specifier) .get_emit_filename(specifier)
.ok_or_else(|| anyhow!("Could not get emit filename."))?; .ok_or_else(|| anyhow!("Could not get emit filename."))?;
let cache_data = self.file_serializer.serialize(code, source_hash);
// save the metadata self.disk_cache.set(&emit_filename, &cache_data)?;
let metadata = EmitMetadata {
source_hash,
emit_hash: compute_emit_hash(code, self.cli_version),
};
self
.disk_cache
.set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
// save the emit source
self.disk_cache.set(&emit_filename, code)?;
Ok(()) Ok(())
} }
fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self
.disk_cache
.get_cache_filename_with_extension(specifier, "meta")
}
fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> { fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self self
.disk_cache .disk_cache
@ -142,15 +89,68 @@ impl EmitCache {
} }
} }
fn compute_emit_hash(bytes: &[u8], cli_version: &str) -> u64 { const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
struct EmitFileSerializer {
cli_version: &'static str,
}
impl EmitFileSerializer {
pub fn deserialize(
&self,
mut bytes: Vec<u8>,
expected_source_hash: u64,
) -> Option<String> {
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
let (content, last_line) = bytes.split_at(last_newline_index);
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
let hashes = String::from_utf8_lossy(hashes);
let (source_hash, emit_hash) = hashes.split_once(',')?;
// verify the meta data file is for this source and CLI version
let source_hash = source_hash.parse::<u64>().ok()?;
if source_hash != expected_source_hash {
return None;
}
let emit_hash = emit_hash.parse::<u64>().ok()?;
// prevent using an emit from a different cli version or emits that were tampered with
if emit_hash != self.compute_emit_hash(content) {
return None;
}
// everything looks good, truncate and return it
bytes.truncate(content.len());
String::from_utf8(bytes).ok()
}
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
let source_hash = source_hash.to_string();
let emit_hash = self.compute_emit_hash(code).to_string();
let capacity = code.len()
+ LAST_LINE_PREFIX.len()
+ source_hash.len()
+ 1
+ emit_hash.len();
let mut cache_data = Vec::with_capacity(capacity);
cache_data.extend(code);
cache_data.extend(LAST_LINE_PREFIX.as_bytes());
cache_data.extend(source_hash.as_bytes());
cache_data.push(b',');
cache_data.extend(emit_hash.as_bytes());
debug_assert_eq!(cache_data.len(), capacity);
cache_data
}
fn compute_emit_hash(&self, bytes: &[u8]) -> u64 {
// it's ok to use an insecure hash here because // it's ok to use an insecure hash here because
// if someone can change the emit source then they // if someone can change the emit source then they
// can also change the version hash // can also change the version hash
FastInsecureHasher::new_without_deno_version() // use cli_version param instead crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
.write(bytes) .write(bytes)
// emit should not be re-used between cli versions // emit should not be re-used between cli versions
.write_str(cli_version) .write_str(self.cli_version)
.finish() .finish()
}
} }
#[cfg(test)] #[cfg(test)]
@ -165,11 +165,11 @@ mod test {
let disk_cache = DiskCache::new(temp_dir.path().as_path()); let disk_cache = DiskCache::new(temp_dir.path().as_path());
let cache = EmitCache { let cache = EmitCache {
disk_cache: disk_cache.clone(), disk_cache: disk_cache.clone(),
file_serializer: EmitFileSerializer {
cli_version: "1.0.0", cli_version: "1.0.0",
},
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
let specifier1 = let specifier1 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
@ -186,18 +186,17 @@ mod test {
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash // providing the correct source hash
assert_eq!( assert_eq!(
cache.get_emit_code(&specifier1, 10).map(to_string), cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()), Some(emit_code1.clone()),
); );
assert_eq!( assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
cache.get_emit_code(&specifier2, 2).map(to_string),
Some(emit_code2)
);
// try changing the cli version (should not load previous ones) // try changing the cli version (should not load previous ones)
let cache = EmitCache { let cache = EmitCache {
disk_cache: disk_cache.clone(), disk_cache: disk_cache.clone(),
file_serializer: EmitFileSerializer {
cli_version: "2.0.0", cli_version: "2.0.0",
},
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
assert_eq!(cache.get_emit_code(&specifier1, 10), None); assert_eq!(cache.get_emit_code(&specifier1, 10), None);
@ -206,21 +205,17 @@ mod test {
// recreating the cache should still load the data because the CLI version is the same // recreating the cache should still load the data because the CLI version is the same
let cache = EmitCache { let cache = EmitCache {
disk_cache, disk_cache,
file_serializer: EmitFileSerializer {
cli_version: "2.0.0", cli_version: "2.0.0",
},
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
cache.get_emit_code(&specifier1, 5).map(to_string),
Some(emit_code1)
);
// adding when already exists should not cause issue // adding when already exists should not cause issue
let emit_code3 = "asdf".to_string(); let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
cache.get_emit_code(&specifier1, 20).map(to_string),
Some(emit_code3)
);
} }
} }

214
cli/cache/mod.rs vendored
View file

@ -1,13 +1,21 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use crate::file_fetcher::FetchNoFollowOptions; use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect; use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliNodeResolver;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::path::specifier_has_extension;
use crate::util::text_encoding::arc_str_to_bytes;
use crate::util::text_encoding::from_utf8_lossy_owned;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::futures; use deno_core::futures;
@ -52,6 +60,7 @@ pub use fast_check::FastCheckCache;
pub use incremental::IncrementalCache; pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache; pub use module_info::ModuleInfoCache;
pub use node::NodeAnalysisCache; pub use node::NodeAnalysisCache;
pub use parsed_source::EsmOrCjsChecker;
pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::LazyGraphSourceParser;
pub use parsed_source::ParsedSourceCache; pub use parsed_source::ParsedSourceCache;
@ -62,12 +71,8 @@ pub const CACHE_PERM: u32 = 0o644;
pub struct RealDenoCacheEnv; pub struct RealDenoCacheEnv;
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv { impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Option<Vec<u8>>> { fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
match std::fs::read(path) { std::fs::read(path)
Ok(s) => Ok(Some(s)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
} }
fn atomic_write_file( fn atomic_write_file(
@ -78,6 +83,14 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
atomic_write_file_with_retries(path, bytes, CACHE_PERM) atomic_write_file_with_retries(path, bytes, CACHE_PERM)
} }
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
crate::util::fs::canonicalize_path(path)
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(path)
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> { fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
match std::fs::metadata(path) { match std::fs::metadata(path) {
Ok(metadata) => Ok(Some( Ok(metadata) => Ok(Some(
@ -97,43 +110,117 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
} }
} }
#[derive(Debug, Clone)]
pub struct DenoCacheEnvFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
self
.0
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn atomic_write_file(
&self,
path: &Path,
bytes: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.0,
write_mode: CACHE_PERM,
},
path,
bytes,
)
}
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
self
.0
.mkdir_sync(path, true, None)
.map_err(|e| e.into_io_error())
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
self
.0
.stat_sync(path)
.map(|stat| {
stat
.mtime
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
})
.map_err(|e| e.into_io_error())
}
fn is_file(&self, path: &Path) -> bool {
self.0.is_file_sync(path)
}
fn time_now(&self) -> SystemTime {
SystemTime::now()
}
}
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>; pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>; pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
pub type LocalLspHttpCache = pub type LocalLspHttpCache =
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>; deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
pub use deno_cache_dir::HttpCache; pub use deno_cache_dir::HttpCache;
pub struct FetchCacherOptions {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`.
pub is_deno_publish: bool,
pub unstable_detect_cjs: bool,
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs. /// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher { pub struct FetchCacher {
emit_cache: Arc<EmitCache>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, permissions: PermissionsContainer,
is_deno_publish: bool,
unstable_detect_cjs: bool,
cache_info_enabled: bool, cache_info_enabled: bool,
} }
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
emit_cache: Arc<EmitCache>, esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, options: FetchCacherOptions,
) -> Self { ) -> Self {
Self { Self {
emit_cache,
file_fetcher, file_fetcher,
file_header_overrides, esm_or_cjs_checker,
global_http_cache, global_http_cache,
node_resolver,
npm_resolver, npm_resolver,
module_info_cache, module_info_cache,
permissions, file_header_overrides: options.file_header_overrides,
permissions: options.permissions,
is_deno_publish: options.is_deno_publish,
unstable_detect_cjs: options.unstable_detect_cjs,
cache_info_enabled: false, cache_info_enabled: false,
} }
} }
@ -144,15 +231,7 @@ impl FetchCacher {
self.cache_info_enabled = true; self.cache_info_enabled = true;
} }
// DEPRECATED: Where the file is stored and how it's stored should be an implementation /// Only use this for `deno info`.
// detail of the cache.
//
// todo(dsheret): remove once implementing
// * https://github.com/denoland/deno/issues/17707
// * https://github.com/denoland/deno/issues/17703
#[deprecated(
note = "There should not be a way to do this because the file may not be cached at a local path in the future."
)]
fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> { fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic // TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
// modules // modules
@ -179,15 +258,7 @@ impl Loader for FetchCacher {
#[allow(deprecated)] #[allow(deprecated)]
let local = self.get_local_path(specifier)?; let local = self.get_local_path(specifier)?;
if local.is_file() { if local.is_file() {
let emit = self Some(CacheInfo { local: Some(local) })
.emit_cache
.get_emit_filepath(specifier)
.filter(|p| p.is_file());
Some(CacheInfo {
local: Some(local),
emit,
map: None,
})
} else { } else {
None None
} }
@ -200,9 +271,8 @@ impl Loader for FetchCacher {
) -> LoadFuture { ) -> LoadFuture {
use deno_graph::source::CacheSetting as LoaderCacheSetting; use deno_graph::source::CacheSetting as LoaderCacheSetting;
if specifier.scheme() == "file" if specifier.scheme() == "file" {
&& specifier.path().contains("/node_modules/") if specifier.path().contains("/node_modules/") {
{
// The specifier might be in a completely different symlinked tree than // The specifier might be in a completely different symlinked tree than
// what the node_modules url is in (ex. `/my-project-1/node_modules` // what the node_modules url is in (ex. `/my-project-1/node_modules`
// symlinked to `/my-project-2/node_modules`), so first we checked if the path // symlinked to `/my-project-2/node_modules`), so first we checked if the path
@ -217,10 +287,74 @@ impl Loader for FetchCacher {
} }
} }
// make local CJS modules external to the graph
if specifier_has_extension(specifier, "cjs") {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") {
if let Ok(Some(pkg_json)) =
self.node_resolver.get_closest_package_json(specifier)
{
if pkg_json.typ == "commonjs" {
if let Ok(path) = specifier.to_file_path() {
if let Ok(bytes) = std::fs::read(&path) {
let text: Arc<str> = from_utf8_lossy_owned(bytes).into();
let is_es_module = match self.esm_or_cjs_checker.is_esm(
specifier,
text.clone(),
MediaType::JavaScript,
) {
Ok(value) => value,
Err(err) => {
return Box::pin(futures::future::ready(Err(err.into())));
}
};
if !is_es_module {
self.node_resolver.mark_cjs_resolution(specifier.clone());
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
} else {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::Module {
specifier: specifier.clone(),
content: arc_str_to_bytes(text),
maybe_headers: None,
},
))));
}
}
}
}
}
}
}
if self.is_deno_publish
&& matches!(specifier.scheme(), "http" | "https")
&& !specifier.as_str().starts_with(jsr_url().as_str())
{
// mark non-JSR remote modules as external so we don't need --allow-import
// permissions as these will error out later when publishing
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let file_header_overrides = self.file_header_overrides.clone(); let file_header_overrides = self.file_header_overrides.clone();
let permissions = self.permissions.clone(); let permissions = self.permissions.clone();
let specifier = specifier.clone(); let specifier = specifier.clone();
let is_statically_analyzable = !options.was_dynamic_root;
async move { async move {
let maybe_cache_setting = match options.cache_setting { let maybe_cache_setting = match options.cache_setting {
@ -239,7 +373,12 @@ impl Loader for FetchCacher {
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &permissions, permissions: if is_statically_analyzable {
FetchPermissionsOptionRef::StaticContainer(&permissions)
} else {
FetchPermissionsOptionRef::DynamicContainer(&permissions)
},
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(), maybe_cache_setting: maybe_cache_setting.as_ref(),
}, },
@ -293,6 +432,7 @@ impl Loader for FetchCacher {
fn cache_module_info( fn cache_module_info(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<[u8]>, source: &Arc<[u8]>,
module_info: &deno_graph::ModuleInfo, module_info: &deno_graph::ModuleInfo,
) { ) {
@ -300,7 +440,7 @@ impl Loader for FetchCacher {
let source_hash = CacheDBHash::from_source(source); let source_hash = CacheDBHash::from_source(source);
let result = self.module_info_cache.set_module_info( let result = self.module_info_cache.set_module_info(
specifier, specifier,
MediaType::from_specifier(specifier), media_type,
source_hash, source_hash,
module_info, module_info,
); );

View file

@ -5,6 +5,7 @@ use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnostic;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_graph::CapturingModuleParser; use deno_graph::CapturingModuleParser;
@ -149,3 +150,42 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache {
} }
} }
} }
pub struct EsmOrCjsChecker {
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl EsmOrCjsChecker {
pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self {
Self {
parsed_source_cache,
}
}
pub fn is_esm(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<bool, ParseDiagnostic> {
// todo(dsherret): add a file cache here to avoid parsing with swc on each run
let source = match self.parsed_source_cache.get_parsed_source(specifier) {
Some(source) => source.clone(),
None => {
let source = deno_ast::parse_program(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: true, // capture because it's used for cjs export analysis
scope_analysis: false,
maybe_syntax: None,
})?;
self
.parsed_source_cache
.set_parsed_source(specifier.clone(), source.clone());
source
}
};
Ok(source.is_module())
}
}

View file

@ -4,3 +4,6 @@ disallowed-methods = [
disallowed-types = [ disallowed-types = [
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
] ]
ignore-interior-mutability = [
"lsp_types::Uri",
]

View file

@ -5,12 +5,14 @@ use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_ast::SourceRange;
use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileResult; use deno_ast::TranspileResult;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::ModuleCodeBytes;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::Module; use deno_graph::Module;
@ -57,6 +59,7 @@ impl Emitter {
continue; continue;
}; };
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable()
let is_emittable = matches!( let is_emittable = matches!(
module.media_type, module.media_type,
MediaType::TypeScript MediaType::TypeScript
@ -90,7 +93,7 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: &str, source: &str,
) -> Option<Vec<u8>> { ) -> Option<String> {
let source_hash = self.get_source_hash(source); let source_hash = self.get_source_hash(source);
self.emit_cache.get_emit_code(specifier, source_hash) self.emit_cache.get_emit_code(specifier, source_hash)
} }
@ -100,7 +103,7 @@ impl Emitter {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, source) {
@ -109,7 +112,7 @@ impl Emitter {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
let transpile_and_emit_options = let transpile_and_emit_options =
self.transpile_and_emit_options.clone(); self.transpile_and_emit_options.clone();
let transpile_result = deno_core::unsync::spawn_blocking({ let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source = source.clone(); let source = source.clone();
move || -> Result<_, AnyError> { move || -> Result<_, AnyError> {
@ -125,11 +128,12 @@ impl Emitter {
}) })
.await .await
.unwrap()?; .unwrap()?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -139,13 +143,13 @@ impl Emitter {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpile_result = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache, &self.parsed_source_cache,
specifier, specifier,
source.clone(), source.clone(),
@ -153,11 +157,12 @@ impl Emitter {
&self.transpile_and_emit_options.0, &self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1, &self.transpile_and_emit_options.1,
)?; )?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -223,7 +228,7 @@ impl Emitter {
} }
enum PreEmitResult { enum PreEmitResult {
Cached(ModuleCodeBytes), Cached(String),
NotCached { source_hash: u64 }, NotCached { source_hash: u64 },
} }
@ -241,7 +246,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
{ {
PreEmitResult::Cached(emit_code.into_boxed_slice().into()) PreEmitResult::Cached(emit_code)
} else { } else {
PreEmitResult::NotCached { source_hash } PreEmitResult::NotCached { source_hash }
} }
@ -254,20 +259,14 @@ impl<'a> EmitParsedSourceHelper<'a> {
media_type: MediaType, media_type: MediaType,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<TranspileResult, AnyError> { ) -> Result<String, AnyError> {
// nothing else needs the parsed source at this point, so remove from // nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned // the cache in order to not transpile owned
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
.remove_or_parse_module(specifier, source, media_type)?; .remove_or_parse_module(specifier, source, media_type)?;
Ok(parsed_source.transpile(transpile_options, emit_options)?) ensure_no_import_assertion(&parsed_source)?;
} let transpile_result =
parsed_source.transpile(transpile_options, emit_options)?;
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpile_result: TranspileResult,
source_hash: u64,
) -> ModuleCodeBytes {
let transpiled_source = match transpile_result { let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source, TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => { TranspileResult::Cloned(source) => {
@ -276,11 +275,90 @@ impl<'a> EmitParsedSourceHelper<'a> {
} }
}; };
debug_assert!(transpiled_source.source_map.is_none()); debug_assert!(transpiled_source.source_map.is_none());
let text = String::from_utf8(transpiled_source.source)?;
Ok(text)
}
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpiled_source: &str,
source_hash: u64,
) {
self.0.emit_cache.set_emit_code( self.0.emit_cache.set_emit_code(
specifier, specifier,
source_hash, source_hash,
&transpiled_source.source, transpiled_source.as_bytes(),
); );
transpiled_source.source.into_boxed_slice().into()
} }
} }
// todo(dsherret): this is a temporary measure until we have swc erroring for this
fn ensure_no_import_assertion(
parsed_source: &deno_ast::ParsedSource,
) -> Result<(), AnyError> {
fn has_import_assertion(text: &str) -> bool {
// good enough
text.contains(" assert ") && !text.contains(" with ")
}
fn create_err(
parsed_source: &deno_ast::ParsedSource,
range: SourceRange,
) -> AnyError {
let text_info = parsed_source.text_info_lazy();
let loc = text_info.line_and_column_display(range.start);
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
msg.push_str("\n\n");
msg.push_str(range.text_fast(text_info));
msg.push_str("\n\n");
msg.push_str(&format!(
" at {}:{}:{}\n",
parsed_source.specifier(),
loc.line_number,
loc.column_number,
));
deno_core::anyhow::anyhow!("{}", msg)
}
let Some(module) = parsed_source.program_ref().as_module() else {
return Ok(());
};
for item in &module.body {
match item {
deno_ast::swc::ast::ModuleItem::ModuleDecl(decl) => match decl {
deno_ast::swc::ast::ModuleDecl::Import(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportAll(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportNamed(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultExpr(_)
| deno_ast::swc::ast::ModuleDecl::TsImportEquals(_)
| deno_ast::swc::ast::ModuleDecl::TsExportAssignment(_)
| deno_ast::swc::ast::ModuleDecl::TsNamespaceExport(_) => {}
},
deno_ast::swc::ast::ModuleItem::Stmt(_) => {}
}
}
Ok(())
}

View file

@ -1,11 +1,12 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::check_warn_tsconfig;
use crate::args::get_root_cert_store; use crate::args::get_root_cert_store;
use crate::args::CaData; use crate::args::CaData;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
use crate::args::PackageJsonInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver; use crate::args::StorageKeyResolver;
use crate::args::TsConfigType; use crate::args::TsConfigType;
use crate::cache::Caches; use crate::cache::Caches;
@ -13,6 +14,7 @@ use crate::cache::CodeCache;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache; use crate::cache::HttpCache;
use crate::cache::LocalHttpCache; use crate::cache::LocalHttpCache;
@ -31,17 +33,19 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::resolver::CjsResolutionStore;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter; use crate::standalone::DenoCompileBinaryWriter;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector; use crate::tools::coverage::CoverageCollector;
@ -64,10 +68,13 @@ use deno_core::FeatureChecker;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use log::warn; use log::warn;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
@ -165,6 +172,7 @@ struct CliFactoryServices {
http_client_provider: Deferred<Arc<HttpClientProvider>>, http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<Arc<EmitCache>>, emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>, emitter: Deferred<Arc<Emitter>>,
esm_or_cjs_checker: Deferred<Arc<EsmOrCjsChecker>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>, fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>, main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>, maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
@ -180,7 +188,9 @@ struct CliFactoryServices {
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>, node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>, node_resolver: Deferred<Arc<NodeResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>, npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>, permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>, text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>, type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>, cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
@ -290,10 +300,16 @@ impl CliFactory {
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
} }
pub fn esm_or_cjs_checker(&self) -> &Arc<EsmOrCjsChecker> {
self.services.esm_or_cjs_checker.get_or_init(|| {
Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone()))
})
}
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> { pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| { self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new( Ok(Arc::new(GlobalHttpCache::new(
self.deno_dir()?.deps_folder_path(), self.deno_dir()?.remote_folder_path(),
crate::cache::RealDenoCacheEnv, crate::cache::RealDenoCacheEnv,
))) )))
}) })
@ -304,8 +320,11 @@ impl CliFactory {
let global_cache = self.global_http_cache()?.clone(); let global_cache = self.global_http_cache()?.clone();
match self.cli_options()?.vendor_dir_path() { match self.cli_options()?.vendor_dir_path() {
Some(local_path) => { Some(local_path) => {
let local_cache = let local_cache = LocalHttpCache::new(
LocalHttpCache::new(local_path.clone(), global_cache); local_path.clone(),
global_cache,
deno_cache_dir::GlobalToLocalCopy::Allow,
);
Ok(Arc::new(local_cache)) Ok(Arc::new(local_cache))
} }
None => Ok(global_cache), None => Ok(global_cache),
@ -351,8 +370,8 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. // For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory.
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: fs.clone(), fs: CliDenoResolverFs(fs.clone()),
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { root_node_modules_dir: Some(match cli_options.node_modules_dir_path() {
Some(node_modules_path) => node_modules_path.to_path_buf(), Some(node_modules_path) => node_modules_path.to_path_buf(),
// path needs to be canonicalized for node resolution // path needs to be canonicalized for node resolution
@ -383,9 +402,7 @@ impl CliFactory {
cache_setting: cli_options.cache_setting(), cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(), text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(),
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace( npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())),
cli_options.workspace(),
)),
npm_system_info: cli_options.npm_system_info(), npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(), npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(), lifecycle_scripts: cli_options.lifecycle_scripts_config(),
@ -397,17 +414,16 @@ impl CliFactory {
pub fn sloppy_imports_resolver( pub fn sloppy_imports_resolver(
&self, &self,
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> { ) -> Result<Option<&Arc<CliSloppyImportsResolver>>, AnyError> {
self self
.services .services
.sloppy_imports_resolver .sloppy_imports_resolver
.get_or_try_init(|| { .get_or_try_init(|| {
Ok( Ok(self.cli_options()?.unstable_sloppy_imports().then(|| {
self Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
.cli_options()? self.fs().clone(),
.unstable_sloppy_imports() )))
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))), }))
)
}) })
.map(|maybe| maybe.as_ref()) .map(|maybe| maybe.as_ref())
} }
@ -519,9 +535,7 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let ts_config_result = let ts_config_result =
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?; cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
if let Some(ignored_options) = ts_config_result.maybe_ignored_options { check_warn_tsconfig(&ts_config_result);
warn!("{}", ignored_options);
}
let (transpile_options, emit_options) = let (transpile_options, emit_options) =
crate::args::ts_config_to_transpile_and_emit_options( crate::args::ts_config_to_transpile_and_emit_options(
ts_config_result.ts_config, ts_config_result.ts_config,
@ -568,8 +582,13 @@ impl CliFactory {
let caches = self.caches()?; let caches = self.caches()?;
let node_analysis_cache = let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db()); NodeAnalysisCache::new(caches.node_analysis_db());
let cjs_esm_analyzer = let node_resolver = self.cli_node_resolver().await?.clone();
CliCjsCodeAnalyzer::new(node_analysis_cache, self.fs().clone()); let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
self.fs().clone(),
node_resolver,
Some(self.parsed_source_cache().clone()),
);
Ok(Arc::new(NodeCodeTranslator::new( Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer, cjs_esm_analyzer,
@ -609,16 +628,18 @@ impl CliFactory {
Ok(Arc::new(ModuleGraphBuilder::new( Ok(Arc::new(ModuleGraphBuilder::new(
cli_options.clone(), cli_options.clone(),
self.caches()?.clone(), self.caches()?.clone(),
self.esm_or_cjs_checker().clone(),
self.fs().clone(), self.fs().clone(),
self.resolver().await?.clone(), self.resolver().await?.clone(),
self.cli_node_resolver().await?.clone(),
self.npm_resolver().await?.clone(), self.npm_resolver().await?.clone(),
self.module_info_cache()?.clone(), self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(), self.maybe_file_watcher_reporter().clone(),
self.emit_cache()?.clone(),
self.file_fetcher()?.clone(), self.file_fetcher()?.clone(),
self.global_http_cache()?.clone(), self.global_http_cache()?.clone(),
self.root_permissions_container()?.clone(),
))) )))
}) })
.await .await
@ -652,6 +673,7 @@ impl CliFactory {
Ok(Arc::new(MainModuleGraphContainer::new( Ok(Arc::new(MainModuleGraphContainer::new(
self.cli_options()?.clone(), self.cli_options()?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
self.root_permissions_container()?.clone(),
))) )))
}) })
.await .await
@ -709,20 +731,24 @@ impl CliFactory {
.await .await
} }
pub fn permission_desc_parser(
&self,
) -> Result<&Arc<RuntimePermissionDescriptorParser>, AnyError> {
self.services.permission_desc_parser.get_or_try_init(|| {
let fs = self.fs().clone();
Ok(Arc::new(RuntimePermissionDescriptorParser::new(fs)))
})
}
pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> { pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> {
self.services.feature_checker.get_or_try_init(|| { self.services.feature_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let mut checker = FeatureChecker::default(); let mut checker = FeatureChecker::default();
checker.set_exit_cb(Box::new(crate::unstable_exit_cb)); checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
checker.set_warn_cb(Box::new(crate::unstable_warn_cb));
if cli_options.legacy_unstable_flag() {
checker.enable_legacy_unstable();
checker.warn_on_legacy_unstable();
}
let unstable_features = cli_options.unstable_features(); let unstable_features = cli_options.unstable_features();
for (flag_name, _, _) in crate::UNSTABLE_GRANULAR_FLAGS { for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
if unstable_features.contains(&flag_name.to_string()) { if unstable_features.contains(&granular_flag.name.to_string()) {
checker.enable_feature(flag_name); checker.enable_feature(granular_flag.name);
} }
} }
@ -736,6 +762,7 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new( Ok(DenoCompileBinaryWriter::new(
self.deno_dir()?, self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?, self.file_fetcher()?,
self.http_client_provider(), self.http_client_provider(),
self.npm_resolver().await?.as_ref(), self.npm_resolver().await?.as_ref(),
@ -744,6 +771,22 @@ impl CliFactory {
)) ))
} }
pub fn root_permissions_container(
&self,
) -> Result<&PermissionsContainer, AnyError> {
self
.services
.root_permissions_container
.get_or_try_init(|| {
let desc_parser = self.permission_desc_parser()?.clone();
let permissions = Permissions::from_options(
desc_parser.as_ref(),
&self.cli_options()?.permissions_options(),
)?;
Ok(PermissionsContainer::new(desc_parser, permissions))
})
}
pub async fn create_cli_main_worker_factory( pub async fn create_cli_main_worker_factory(
&self, &self,
) -> Result<CliMainWorkerFactory, AnyError> { ) -> Result<CliMainWorkerFactory, AnyError> {
@ -752,6 +795,7 @@ impl CliFactory {
let npm_resolver = self.npm_resolver().await?; let npm_resolver = self.npm_resolver().await?;
let fs = self.fs(); let fs = self.fs();
let cli_node_resolver = self.cli_node_resolver().await?; let cli_node_resolver = self.cli_node_resolver().await?;
let cli_npm_resolver = self.npm_resolver().await?.clone();
let maybe_file_watcher_communicator = if cli_options.has_hmr() { let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap()) Some(self.watcher_communicator.clone().unwrap())
} else { } else {
@ -759,11 +803,18 @@ impl CliFactory {
}; };
Ok(CliMainWorkerFactory::new( Ok(CliMainWorkerFactory::new(
StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(),
npm_resolver.clone(),
node_resolver.clone(),
self.blob_store().clone(), self.blob_store().clone(),
self.cjs_resolutions().clone(),
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
self.feature_checker()?.clone(),
self.fs().clone(),
maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(),
cli_options.maybe_lockfile().cloned(),
Box::new(CliModuleLoaderFactory::new( Box::new(CliModuleLoaderFactory::new(
cli_options, cli_options,
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
@ -775,6 +826,7 @@ impl CliFactory {
self.main_module_graph_container().await?.clone(), self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
cli_node_resolver.clone(), cli_node_resolver.clone(),
cli_npm_resolver.clone(),
NpmModuleLoader::new( NpmModuleLoader::new(
self.cjs_resolutions().clone(), self.cjs_resolutions().clone(),
self.node_code_translator().await?.clone(), self.node_code_translator().await?.clone(),
@ -784,26 +836,13 @@ impl CliFactory {
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
self.resolver().await?.clone(), self.resolver().await?.clone(),
)), )),
node_resolver.clone(),
npm_resolver.clone(),
self.root_cert_store_provider().clone(), self.root_cert_store_provider().clone(),
self.fs().clone(), self.root_permissions_container()?.clone(),
maybe_file_watcher_communicator, StorageKeyResolver::from_options(cli_options),
self.maybe_inspector_server()?.clone(), cli_options.sub_command().clone(),
cli_options.maybe_lockfile().cloned(),
self.feature_checker()?.clone(),
self.create_cli_main_worker_options()?, self.create_cli_main_worker_options()?,
cli_options.node_ipc_fd(),
cli_options.serve_port(),
cli_options.serve_host(),
cli_options.enable_future_features(),
// TODO(bartlomieju): temporarily disabled
// cli_options.disable_deprecated_api_warning,
true,
cli_options.verbose_deprecated_api_warning,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
)) ))
} }
@ -865,9 +904,12 @@ impl CliFactory {
unsafely_ignore_certificate_errors: cli_options unsafely_ignore_certificate_errors: cli_options
.unsafely_ignore_certificate_errors() .unsafely_ignore_certificate_errors()
.clone(), .clone(),
unstable: cli_options.legacy_unstable_flag(),
create_hmr_runner, create_hmr_runner,
create_coverage_collector, create_coverage_collector,
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
unstable_detect_cjs: cli_options.unstable_detect_cjs(),
}) })
} }
} }

View file

@ -11,7 +11,6 @@ use crate::http_util::HttpClientProvider;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::generic_error; use deno_core::error::generic_error;
@ -22,8 +21,10 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::LoaderChecksum; use deno_graph::source::LoaderChecksum;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use http::header;
use log::debug; use log::debug;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
@ -52,6 +53,25 @@ pub enum FileOrRedirect {
Redirect(ModuleSpecifier), Redirect(ModuleSpecifier),
} }
impl FileOrRedirect {
fn from_deno_cache_entry(
specifier: &ModuleSpecifier,
cache_entry: deno_cache_dir::CacheEntry,
) -> Result<Self, AnyError> {
if let Some(redirect_to) = cache_entry.metadata.headers.get("location") {
let redirect =
deno_core::resolve_import(redirect_to, specifier.as_str())?;
Ok(FileOrRedirect::Redirect(redirect))
} else {
Ok(FileOrRedirect::File(File {
specifier: specifier.clone(),
maybe_headers: Some(cache_entry.metadata.headers),
source: Arc::from(cache_entry.content),
}))
}
}
}
/// A structure representing a source file. /// A structure representing a source file.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct File { pub struct File {
@ -117,14 +137,23 @@ impl MemoryFiles {
/// Fetch a source file from the local file system. /// Fetch a source file from the local file system.
fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> { fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| { let local = url_to_file_path(specifier).map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {specifier}")) uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?; })?;
// If it doesnt have a extension, we want to treat it as typescript by default
let headers = if local.extension().is_none() {
Some(HashMap::from([(
"content-type".to_string(),
"application/typescript".to_string(),
)]))
} else {
None
};
let bytes = fs::read(local)?; let bytes = fs::read(local)?;
Ok(File { Ok(File {
specifier: specifier.clone(), specifier: specifier.clone(),
maybe_headers: None, maybe_headers: headers,
source: bytes.into(), source: bytes.into(),
}) })
} }
@ -143,9 +172,17 @@ fn get_validated_scheme(
} }
} }
#[derive(Debug, Copy, Clone)]
pub enum FetchPermissionsOptionRef<'a> {
AllowAll,
DynamicContainer(&'a PermissionsContainer),
StaticContainer(&'a PermissionsContainer),
}
pub struct FetchOptions<'a> { pub struct FetchOptions<'a> {
pub specifier: &'a ModuleSpecifier, pub specifier: &'a ModuleSpecifier,
pub permissions: &'a PermissionsContainer, pub permissions: FetchPermissionsOptionRef<'a>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>, pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>, pub maybe_cache_setting: Option<&'a CacheSetting>,
} }
@ -238,45 +275,32 @@ impl FileFetcher {
); );
let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once
let Some(headers) = self.http_cache.read_headers(&cache_key)? else { let result = self.http_cache.get(
return Ok(None);
};
if let Some(redirect_to) = headers.get("location") {
let redirect =
deno_core::resolve_import(redirect_to, specifier.as_str())?;
return Ok(Some(FileOrRedirect::Redirect(redirect)));
}
let result = self.http_cache.read_file_bytes(
&cache_key, &cache_key,
maybe_checksum maybe_checksum
.as_ref() .as_ref()
.map(|c| deno_cache_dir::Checksum::new(c.as_str())), .map(|c| deno_cache_dir::Checksum::new(c.as_str())),
deno_cache_dir::GlobalToLocalCopy::Allow,
); );
let bytes = match result { match result {
Ok(Some(bytes)) => bytes, Ok(Some(cache_data)) => Ok(Some(FileOrRedirect::from_deno_cache_entry(
Ok(None) => return Ok(None), specifier, cache_data,
)?)),
Ok(None) => Ok(None),
Err(err) => match err { Err(err) => match err {
deno_cache_dir::CacheReadFileError::Io(err) => return Err(err.into()), deno_cache_dir::CacheReadFileError::Io(err) => Err(err.into()),
deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => { deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it // convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph // enhances it if this is passed to deno_graph
return Err( Err(
deno_graph::source::ChecksumIntegrityError { deno_graph::source::ChecksumIntegrityError {
actual: err.actual, actual: err.actual,
expected: err.expected, expected: err.expected,
} }
.into(), .into(),
); )
} }
}, },
}; }
Ok(Some(FileOrRedirect::File(File {
specifier: specifier.clone(),
maybe_headers: Some(headers),
source: Arc::from(bytes),
})))
} }
/// Convert a data URL into a file, resulting in an error if the URL is /// Convert a data URL into a file, resulting in an error if the URL is
@ -311,7 +335,7 @@ impl FileFetcher {
) )
})?; })?;
let bytes = blob.read_all().await?; let bytes = blob.read_all().await;
let headers = let headers =
HashMap::from([("content-type".to_string(), blob.media_type.clone())]); HashMap::from([("content-type".to_string(), blob.media_type.clone())]);
@ -328,6 +352,7 @@ impl FileFetcher {
maybe_accept: Option<&str>, maybe_accept: Option<&str>,
cache_setting: &CacheSetting, cache_setting: &CacheSetting,
maybe_checksum: Option<&LoaderChecksum>, maybe_checksum: Option<&LoaderChecksum>,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<FileOrRedirect, AnyError> { ) -> Result<FileOrRedirect, AnyError> {
debug!( debug!(
"FileFetcher::fetch_remote_no_follow - specifier: {}", "FileFetcher::fetch_remote_no_follow - specifier: {}",
@ -363,12 +388,30 @@ impl FileFetcher {
); );
} }
let maybe_etag = self let maybe_etag_cache_entry = self
.http_cache .http_cache
.cache_item_key(specifier) .cache_item_key(specifier)
.ok() .ok()
.and_then(|key| self.http_cache.read_headers(&key).ok().flatten()) .and_then(|key| {
.and_then(|headers| headers.get("etag").cloned()); self
.http_cache
.get(
&key,
maybe_checksum
.as_ref()
.map(|c| deno_cache_dir::Checksum::new(c.as_str())),
)
.ok()
.flatten()
})
.and_then(|cache_entry| {
cache_entry
.metadata
.headers
.get("etag")
.cloned()
.map(|etag| (cache_entry, etag))
});
let maybe_auth_token = self.auth_tokens.get(specifier); let maybe_auth_token = self.auth_tokens.get(specifier);
async fn handle_request_or_server_error( async fn handle_request_or_server_error(
@ -390,7 +433,6 @@ impl FileFetcher {
} }
} }
let mut maybe_etag = maybe_etag;
let mut retried = false; // retry intermittent failures let mut retried = false; // retry intermittent failures
let result = loop { let result = loop {
let result = match self let result = match self
@ -399,31 +441,18 @@ impl FileFetcher {
.fetch_no_follow(FetchOnceArgs { .fetch_no_follow(FetchOnceArgs {
url: specifier.clone(), url: specifier.clone(),
maybe_accept: maybe_accept.map(ToOwned::to_owned), maybe_accept: maybe_accept.map(ToOwned::to_owned),
maybe_etag: maybe_etag.clone(), maybe_etag: maybe_etag_cache_entry
.as_ref()
.map(|(_, etag)| etag.clone()),
maybe_auth_token: maybe_auth_token.clone(), maybe_auth_token: maybe_auth_token.clone(),
maybe_auth: maybe_auth.clone(),
maybe_progress_guard: maybe_progress_guard.as_ref(), maybe_progress_guard: maybe_progress_guard.as_ref(),
}) })
.await? .await?
{ {
FetchOnceResult::NotModified => { FetchOnceResult::NotModified => {
let file_or_redirect = let (cache_entry, _) = maybe_etag_cache_entry.unwrap();
self.fetch_cached_no_follow(specifier, maybe_checksum)?; FileOrRedirect::from_deno_cache_entry(specifier, cache_entry)
match file_or_redirect {
Some(file_or_redirect) => Ok(file_or_redirect),
None => {
// Someone may have deleted the body from the cache since
// it's currently stored in a separate file from the headers,
// so delete the etag and try again
if maybe_etag.is_some() {
debug!("Cache body not found. Trying again without etag.");
maybe_etag = None;
continue;
} else {
// should never happen
bail!("Your deno cache directory is in an unrecoverable state. Please delete it and try again.")
}
}
}
} }
FetchOnceResult::Redirect(redirect_url, headers) => { FetchOnceResult::Redirect(redirect_url, headers) => {
self.http_cache.set(specifier, headers, &[])?; self.http_cache.set(specifier, headers, &[])?;
@ -507,16 +536,54 @@ impl FileFetcher {
} }
} }
#[inline(always)]
pub async fn fetch_bypass_permissions(
&self,
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
.await
}
#[inline(always)]
pub async fn fetch_bypass_permissions_with_maybe_auth(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
.await
}
/// Fetch a source file and asynchronously return it. /// Fetch a source file and asynchronously return it.
#[inline(always)]
pub async fn fetch( pub async fn fetch(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
permissions: &PermissionsContainer, permissions: &PermissionsContainer,
) -> Result<File, AnyError> {
self
.fetch_inner(
specifier,
None,
FetchPermissionsOptionRef::StaticContainer(permissions),
)
.await
}
async fn fetch_inner(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
permissions: FetchPermissionsOptionRef<'_>,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier, specifier,
permissions, permissions,
maybe_auth,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: None, maybe_cache_setting: None,
}) })
@ -536,12 +603,14 @@ impl FileFetcher {
max_redirect: usize, max_redirect: usize,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
let mut specifier = Cow::Borrowed(options.specifier); let mut specifier = Cow::Borrowed(options.specifier);
let mut maybe_auth = options.maybe_auth.clone();
for _ in 0..=max_redirect { for _ in 0..=max_redirect {
match self match self
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: options.permissions, permissions: options.permissions,
maybe_auth: maybe_auth.clone(),
maybe_accept: options.maybe_accept, maybe_accept: options.maybe_accept,
maybe_cache_setting: options.maybe_cache_setting, maybe_cache_setting: options.maybe_cache_setting,
}, },
@ -553,6 +622,10 @@ impl FileFetcher {
return Ok(file); return Ok(file);
} }
FileOrRedirect::Redirect(redirect_specifier) => { FileOrRedirect::Redirect(redirect_specifier) => {
// If we were redirected to another origin, don't send the auth header anymore.
if redirect_specifier.origin() != specifier.origin() {
maybe_auth = None;
}
specifier = Cow::Owned(redirect_specifier); specifier = Cow::Owned(redirect_specifier);
} }
} }
@ -575,7 +648,23 @@ impl FileFetcher {
specifier specifier
); );
let scheme = get_validated_scheme(specifier)?; let scheme = get_validated_scheme(specifier)?;
options.permissions.check_specifier(specifier)?; match options.permissions {
FetchPermissionsOptionRef::AllowAll => {
// allow
}
FetchPermissionsOptionRef::StaticContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Static,
)?;
}
FetchPermissionsOptionRef::DynamicContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic,
)?;
}
}
if let Some(file) = self.memory_files.get(specifier) { if let Some(file) = self.memory_files.get(specifier) {
Ok(FileOrRedirect::File(file)) Ok(FileOrRedirect::File(file))
} else if scheme == "file" { } else if scheme == "file" {
@ -601,6 +690,7 @@ impl FileFetcher {
options.maybe_accept, options.maybe_accept,
options.maybe_cache_setting.unwrap_or(&self.cache_setting), options.maybe_cache_setting.unwrap_or(&self.cache_setting),
maybe_checksum, maybe_checksum,
options.maybe_auth,
) )
.await .await
} }
@ -661,7 +751,7 @@ mod tests {
maybe_temp_dir: Option<TempDir>, maybe_temp_dir: Option<TempDir>,
) -> (FileFetcher, TempDir, Arc<BlobStore>) { ) -> (FileFetcher, TempDir, Arc<BlobStore>) {
let temp_dir = maybe_temp_dir.unwrap_or_default(); let temp_dir = maybe_temp_dir.unwrap_or_default();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let blob_store: Arc<BlobStore> = Default::default(); let blob_store: Arc<BlobStore> = Default::default();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)),
@ -676,9 +766,7 @@ mod tests {
async fn test_fetch(specifier: &ModuleSpecifier) -> (File, FileFetcher) { async fn test_fetch(specifier: &ModuleSpecifier) -> (File, FileFetcher) {
let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None); let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None);
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(specifier).await;
.fetch(specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
(result.unwrap(), file_fetcher) (result.unwrap(), file_fetcher)
} }
@ -692,7 +780,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier, specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -788,9 +877,7 @@ mod tests {
}; };
file_fetcher.insert_memory_files(file.clone()); file_fetcher.insert_memory_files(file.clone());
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let result_file = result.unwrap(); let result_file = result.unwrap();
assert_eq!(result_file, file); assert_eq!(result_file, file);
@ -801,9 +888,7 @@ mod tests {
let (file_fetcher, _) = setup(CacheSetting::Use, None); let (file_fetcher, _) = setup(CacheSetting::Use, None);
let specifier = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); let specifier = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -832,9 +917,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -854,9 +937,7 @@ mod tests {
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/subdir/mod2.ts").unwrap(); ModuleSpecifier::parse("http://localhost:4545/subdir/mod2.ts").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -874,9 +955,7 @@ mod tests {
.set(&specifier, headers.clone(), file.source.as_bytes()) .set(&specifier, headers.clone(), file.source.as_bytes())
.unwrap(); .unwrap();
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -900,9 +979,7 @@ mod tests {
.set(&specifier, headers.clone(), file.source.as_bytes()) .set(&specifier, headers.clone(), file.source.as_bytes())
.unwrap(); .unwrap();
let result = file_fetcher_02 let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -913,7 +990,7 @@ mod tests {
// This creates a totally new instance, simulating another Deno process // This creates a totally new instance, simulating another Deno process
// invocation and indicates to "cache bust". // invocation and indicates to "cache bust".
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(
location, location,
@ -925,9 +1002,7 @@ mod tests {
Default::default(), Default::default(),
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -941,7 +1016,7 @@ mod tests {
async fn test_fetch_uses_cache() { async fn test_fetch_uses_cache() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let specifier = let specifier =
resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap();
@ -958,9 +1033,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = let cache_key =
file_fetcher.http_cache.cache_item_key(&specifier).unwrap(); file_fetcher.http_cache.cache_item_key(&specifier).unwrap();
@ -994,9 +1067,7 @@ mod tests {
Default::default(), Default::default(),
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = let cache_key =
@ -1033,9 +1104,7 @@ mod tests {
resolve_url("http://localhost:4545/subdir/redirects/redirect1.js") resolve_url("http://localhost:4545/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_specifier); assert_eq!(file.specifier, redirected_specifier);
@ -1074,9 +1143,7 @@ mod tests {
resolve_url("http://localhost:4545/subdir/redirects/redirect1.js") resolve_url("http://localhost:4545/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_02_specifier); assert_eq!(file.specifier, redirected_02_specifier);
@ -1115,7 +1182,7 @@ mod tests {
async fn test_fetch_uses_cache_with_redirects() { async fn test_fetch_uses_cache_with_redirects() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let specifier = let specifier =
resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap();
let redirected_specifier = let redirected_specifier =
@ -1134,9 +1201,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = file_fetcher let cache_key = file_fetcher
@ -1174,7 +1239,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher
.fetch(&redirected_specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&redirected_specifier)
.await; .await;
assert!(result.is_ok()); assert!(result.is_ok());
@ -1215,7 +1280,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1228,7 +1294,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1256,9 +1323,7 @@ mod tests {
resolve_url("http://localhost:4550/subdir/redirects/redirect1.js") resolve_url("http://localhost:4550/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_specifier); assert_eq!(file.specifier, redirected_specifier);
@ -1287,7 +1352,7 @@ mod tests {
async fn test_fetch_no_remote() { async fn test_fetch_no_remote() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(
location, location,
@ -1302,9 +1367,7 @@ mod tests {
let specifier = let specifier =
resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); resolve_url("http://localhost:4545/run/002_hello.ts").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_err()); assert!(result.is_err());
let err = result.unwrap_err(); let err = result.unwrap_err();
assert_eq!(get_custom_error_class(&err), Some("NoRemote")); assert_eq!(get_custom_error_class(&err), Some("NoRemote"));
@ -1315,7 +1378,7 @@ mod tests {
async fn test_fetch_cache_only() { async fn test_fetch_cache_only() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher_01 = FileFetcher::new( let file_fetcher_01 = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)),
CacheSetting::Only, CacheSetting::Only,
@ -1335,22 +1398,16 @@ mod tests {
let specifier = let specifier =
resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); resolve_url("http://localhost:4545/run/002_hello.ts").unwrap();
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_err()); assert!(result.is_err());
let err = result.unwrap_err(); let err = result.unwrap_err();
assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified."); assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified.");
assert_eq!(get_custom_error_class(&err), Some("NotCached")); assert_eq!(get_custom_error_class(&err), Some("NotCached"));
let result = file_fetcher_02 let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -1360,17 +1417,13 @@ mod tests {
let fixture_path = temp_dir.path().join("mod.ts"); let fixture_path = temp_dir.path().join("mod.ts");
let specifier = ModuleSpecifier::from_file_path(&fixture_path).unwrap(); let specifier = ModuleSpecifier::from_file_path(&fixture_path).unwrap();
fs::write(fixture_path.clone(), r#"console.log("hello deno");"#).unwrap(); fs::write(fixture_path.clone(), r#"console.log("hello deno");"#).unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!(&*file.source, r#"console.log("hello deno");"#); assert_eq!(&*file.source, r#"console.log("hello deno");"#);
fs::write(fixture_path, r#"console.log("goodbye deno");"#).unwrap(); fs::write(fixture_path, r#"console.log("goodbye deno");"#).unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!(&*file.source, r#"console.log("goodbye deno");"#); assert_eq!(&*file.source, r#"console.log("goodbye deno");"#);
@ -1384,18 +1437,14 @@ mod tests {
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/dynamic").unwrap(); ModuleSpecifier::parse("http://localhost:4545/dynamic").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let first = file.source; let first = file.source;
let (file_fetcher, _) = let (file_fetcher, _) =
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let second = file.source; let second = file.source;
@ -1411,18 +1460,14 @@ mod tests {
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/dynamic_cache").unwrap(); ModuleSpecifier::parse("http://localhost:4545/dynamic_cache").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let first = file.source; let first = file.source;
let (file_fetcher, _) = let (file_fetcher, _) =
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let second = file.source; let second = file.source;
@ -1480,13 +1525,10 @@ mod tests {
let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap(); let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap();
let bytes = file_fetcher let bytes = file_fetcher
.http_cache .http_cache
.read_file_bytes( .get(&cache_key, None)
&cache_key,
None,
deno_cache_dir::GlobalToLocalCopy::Allow,
)
.unwrap() .unwrap()
.unwrap(); .unwrap()
.content;
String::from_utf8(bytes).unwrap() String::from_utf8(bytes).unwrap()
} }

View file

@ -3,15 +3,18 @@
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
use deno_core::resolve_url_or_path;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_runtime::colors; use deno_runtime::colors;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::module_loader::ModuleLoadPreparer; use crate::module_loader::ModuleLoadPreparer;
use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
pub trait ModuleGraphContainer: Clone + 'static { pub trait ModuleGraphContainer: Clone + 'static {
/// Acquires a permit to modify the module graph without other code /// Acquires a permit to modify the module graph without other code
@ -42,12 +45,14 @@ pub struct MainModuleGraphContainer {
inner: Arc<RwLock<Arc<ModuleGraph>>>, inner: Arc<RwLock<Arc<ModuleGraph>>>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
} }
impl MainModuleGraphContainer { impl MainModuleGraphContainer {
pub fn new( pub fn new(
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
update_queue: Default::default(), update_queue: Default::default(),
@ -56,12 +61,14 @@ impl MainModuleGraphContainer {
)))), )))),
cli_options, cli_options,
module_load_preparer, module_load_preparer,
root_permissions,
} }
} }
pub async fn check_specifiers( pub async fn check_specifiers(
&self, &self,
specifiers: &[ModuleSpecifier], specifiers: &[ModuleSpecifier],
ext_overwrite: Option<&String>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let mut graph_permit = self.acquire_update_permit().await; let mut graph_permit = self.acquire_update_permit().await;
let graph = graph_permit.graph_mut(); let graph = graph_permit.graph_mut();
@ -72,7 +79,8 @@ impl MainModuleGraphContainer {
specifiers, specifiers,
false, false,
self.cli_options.ts_type_lib_window(), self.cli_options.ts_type_lib_window(),
PermissionsContainer::allow_all(), self.root_permissions.clone(),
ext_overwrite,
) )
.await?; .await?;
graph_permit.commit(); graph_permit.commit();
@ -91,7 +99,7 @@ impl MainModuleGraphContainer {
log::warn!("{} No matching files found.", colors::yellow("Warning")); log::warn!("{} No matching files found.", colors::yellow("Warning"));
} }
self.check_specifiers(&specifiers).await self.check_specifiers(&specifiers, None).await
} }
pub fn collect_specifiers( pub fn collect_specifiers(
@ -99,24 +107,20 @@ impl MainModuleGraphContainer {
files: &[String], files: &[String],
) -> Result<Vec<ModuleSpecifier>, AnyError> { ) -> Result<Vec<ModuleSpecifier>, AnyError> {
let excludes = self.cli_options.workspace().resolve_config_excludes()?; let excludes = self.cli_options.workspace().resolve_config_excludes()?;
Ok( let include_patterns =
files PathOrPatternSet::from_include_relative_path_or_patterns(
.iter() self.cli_options.initial_cwd(),
.filter_map(|file| { files,
let file_url = )?;
resolve_url_or_path(file, self.cli_options.initial_cwd()).ok()?; let file_patterns = FilePatterns {
if file_url.scheme() != "file" { base: self.cli_options.initial_cwd().to_path_buf(),
return Some(file_url); include: Some(include_patterns),
} exclude: excludes,
// ignore local files that match any of files listed in `exclude` option };
let file_path = file_url.to_file_path().ok()?; collect_specifiers(
if excludes.matches_path(&file_path) { file_patterns,
None self.cli_options.vendor_dir_path().map(ToOwned::to_owned),
} else { |e| is_script_ext(e.path),
Some(file_url)
}
})
.collect::<Vec<_>>(),
) )
} }
} }

View file

@ -6,6 +6,7 @@ use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache; use crate::cache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache; use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
@ -14,51 +15,55 @@ use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::SloppyImportsResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
use deno_config::workspace::JsrPackageConfig; use deno_config::workspace::JsrPackageConfig;
use deno_emit::LoaderChecksum; use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError; use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError; use deno_graph::ModuleLoadError;
use deno_graph::WorkspaceFastCheckOption; use deno_graph::WorkspaceFastCheckOption;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::anyhow::bail;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleError; use deno_graph::ModuleError;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError; use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use import_map::ImportMapError; use import_map::ImportMapError;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error;
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
#[derive(Clone, Copy)] #[derive(Clone)]
pub struct GraphValidOptions { pub struct GraphValidOptions {
pub check_js: bool, pub check_js: bool,
pub follow_type_only: bool, pub kind: GraphKind,
pub is_vendoring: bool, /// Whether to exit the process for integrity check errors such as
/// Whether to exit the process for lockfile errors. /// lockfile checksum mismatches and JSR integrity failures.
/// Otherwise, surfaces lockfile errors as errors. /// Otherwise, surfaces integrity errors as errors.
pub exit_lockfile_errors: bool, pub exit_integrity_errors: bool,
} }
/// Check if `roots` and their deps are available. Returns `Ok(())` if /// Check if `roots` and their deps are available. Returns `Ok(())` if
@ -74,17 +79,54 @@ pub fn graph_valid(
roots: &[ModuleSpecifier], roots: &[ModuleSpecifier],
options: GraphValidOptions, options: GraphValidOptions,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
if options.exit_lockfile_errors { if options.exit_integrity_errors {
graph_exit_lock_errors(graph); graph_exit_integrity_errors(graph);
} }
let mut errors = graph let mut errors = graph_walk_errors(
graph,
fs,
roots,
GraphWalkErrorsOptions {
check_js: options.check_js,
kind: options.kind,
},
);
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(
get_error_class_name(err),
format_deno_graph_error(err.as_ref().deref()),
));
}
Ok(())
}
}
#[derive(Clone)]
pub struct GraphWalkErrorsOptions {
pub check_js: bool,
pub kind: GraphKind,
}
/// Walks the errors found in the module graph that should be surfaced to users
/// and enhances them with CLI information.
pub fn graph_walk_errors<'a>(
graph: &'a ModuleGraph,
fs: &'a Arc<dyn FileSystem>,
roots: &'a [ModuleSpecifier],
options: GraphWalkErrorsOptions,
) -> impl Iterator<Item = AnyError> + 'a {
graph
.walk( .walk(
roots.iter(), roots.iter(),
deno_graph::WalkOptions { deno_graph::WalkOptions {
check_js: options.check_js, check_js: options.check_js,
follow_type_only: options.follow_type_only, kind: options.kind,
follow_dynamic: options.is_vendoring, follow_dynamic: false,
prefer_fast_check_graph: false, prefer_fast_check_graph: false,
}, },
) )
@ -108,9 +150,9 @@ pub fn graph_valid(
) )
} }
ModuleGraphError::ModuleError(error) => { ModuleGraphError::ModuleError(error) => {
enhanced_lockfile_error_message(error) enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(fs, error)) .or_else(|| enhanced_sloppy_imports_error_message(fs, error))
.unwrap_or_else(|| format!("{}", error)) .unwrap_or_else(|| format_deno_graph_error(error))
} }
}; };
@ -131,53 +173,18 @@ pub fn graph_valid(
return None; return None;
} }
if options.is_vendoring {
// warn about failing dynamic imports when vendoring, but don't fail completely
if matches!(
error,
ModuleGraphError::ModuleError(ModuleError::MissingDynamic(_, _))
) {
log::warn!("Ignoring: {}", message);
return None;
}
// ignore invalid downgrades and invalid local imports when vendoring
match &error {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
if matches!(
err,
ResolutionError::InvalidDowngrade { .. }
| ResolutionError::InvalidLocalImport { .. }
) {
return None;
}
}
ModuleGraphError::ModuleError(_) => {}
}
}
Some(custom_error(get_error_class_name(&error.into()), message)) Some(custom_error(get_error_class_name(&error.into()), message))
}); })
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(get_error_class_name(err), format!("{}", err)));
}
Ok(())
}
} }
pub fn graph_exit_lock_errors(graph: &ModuleGraph) { pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
for error in graph.module_errors() { for error in graph.module_errors() {
exit_for_lockfile_error(error); exit_for_integrity_error(error);
} }
} }
fn exit_for_lockfile_error(err: &ModuleError) { fn exit_for_integrity_error(err: &ModuleError) {
if let Some(err_message) = enhanced_lockfile_error_message(err) { if let Some(err_message) = enhanced_integrity_error_message(err) {
log::error!("{} {}", colors::red("error:"), err_message); log::error!("{} {}", colors::red("error:"), err_message);
std::process::exit(10); std::process::exit(10);
} }
@ -245,6 +252,19 @@ impl ModuleGraphCreator {
package_configs: &[JsrPackageConfig], package_configs: &[JsrPackageConfig],
build_fast_check_graph: bool, build_fast_check_graph: bool,
) -> Result<ModuleGraph, AnyError> { ) -> Result<ModuleGraph, AnyError> {
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
// Earlier on, we marked external non-JSR modules as external.
// If the graph contains any of those, it would cause type checking
// to crash, so since publishing is going to fail anyway, skip type
// checking.
graph.modules().any(|module| match module {
deno_graph::Module::External(external_module) => {
matches!(external_module.specifier.scheme(), "http" | "https")
}
_ => false,
})
}
let mut roots = Vec::new(); let mut roots = Vec::new();
for package_config in package_configs { for package_config in package_configs {
roots.extend(package_config.config_file.resolve_export_value_urls()?); roots.extend(package_config.config_file.resolve_export_value_urls()?);
@ -258,9 +278,12 @@ impl ModuleGraphCreator {
}) })
.await?; .await?;
self.graph_valid(&graph)?; self.graph_valid(&graph)?;
if self.options.type_check_mode().is_true() { if self.options.type_check_mode().is_true()
&& !graph_has_external_remote(&graph)
{
self.type_check_graph(graph.clone()).await?; self.type_check_graph(graph.clone()).await?;
} }
if build_fast_check_graph { if build_fast_check_graph {
let fast_check_workspace_members = package_configs let fast_check_workspace_members = package_configs
.iter() .iter()
@ -275,6 +298,7 @@ impl ModuleGraphCreator {
}, },
)?; )?;
} }
Ok(graph) Ok(graph)
} }
@ -357,16 +381,18 @@ pub struct BuildFastCheckGraphOptions<'a> {
pub struct ModuleGraphBuilder { pub struct ModuleGraphBuilder {
options: Arc<CliOptions>, options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliGraphResolver>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: Arc<cache::EmitCache>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
root_permissions_container: PermissionsContainer,
} }
impl ModuleGraphBuilder { impl ModuleGraphBuilder {
@ -374,30 +400,34 @@ impl ModuleGraphBuilder {
pub fn new( pub fn new(
options: Arc<CliOptions>, options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliGraphResolver>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: Arc<cache::EmitCache>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
root_permissions_container: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
options, options,
caches, caches,
esm_or_cjs_checker,
fs, fs,
resolver, resolver,
node_resolver,
npm_resolver, npm_resolver,
module_info_cache, module_info_cache,
parsed_source_cache, parsed_source_cache,
lockfile, lockfile,
maybe_file_watcher_reporter, maybe_file_watcher_reporter,
emit_cache,
file_fetcher, file_fetcher,
global_http_cache, global_http_cache,
root_permissions_container,
} }
} }
@ -463,7 +493,7 @@ impl ModuleGraphBuilder {
.content .content
.packages .packages
.jsr .jsr
.get(&package_nv.to_string()) .get(package_nv)
.map(|s| LoaderChecksum::new(s.integrity.clone())) .map(|s| LoaderChecksum::new(s.integrity.clone()))
} }
@ -477,7 +507,7 @@ impl ModuleGraphBuilder {
self self
.0 .0
.lock() .lock()
.insert_package(package_nv.to_string(), checksum.into_string()); .insert_package(package_nv.clone(), checksum.into_string());
} }
} }
@ -535,7 +565,12 @@ impl ModuleGraphBuilder {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
if self.options.node_modules_dir_enablement() == Some(true) { if self
.options
.node_modules_dir()?
.map(|m| m.uses_node_modules_dir())
.unwrap_or(false)
{
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?; npm_resolver.ensure_top_level_package_json_install().await?;
} }
@ -547,28 +582,19 @@ impl ModuleGraphBuilder {
// populate the information from the lockfile // populate the information from the lockfile
if let Some(lockfile) = &self.lockfile { if let Some(lockfile) = &self.lockfile {
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for (from, to) in &lockfile.content.redirects { graph.fill_from_lockfile(FillFromLockfileOptions {
if let Ok(from) = ModuleSpecifier::parse(from) { redirects: lockfile
if let Ok(to) = ModuleSpecifier::parse(to) { .content
if !matches!(from.scheme(), "file" | "npm" | "jsr") { .redirects
graph.redirects.insert(from, to); .iter()
} .map(|(from, to)| (from.as_str(), to.as_str())),
} package_specifiers: lockfile
} .content
} .packages
for (key, value) in &lockfile.content.packages.specifiers { .specifiers
if let Some(key) = key .iter()
.strip_prefix("jsr:") .map(|(dep, id)| (dep, id.as_str())),
.and_then(|key| PackageReq::from_str(key).ok()) });
{
if let Some(value) = value
.strip_prefix("jsr:")
.and_then(|value| PackageNv::from_str(value).ok())
{
graph.packages.add_nv(key, value);
}
}
}
} }
} }
@ -576,6 +602,12 @@ impl ModuleGraphBuilder {
let initial_package_deps_len = graph.packages.package_deps_sum(); let initial_package_deps_len = graph.packages.package_deps_sum();
let initial_package_mappings_len = graph.packages.mappings().len(); let initial_package_mappings_len = graph.packages.mappings().len();
if roots.iter().any(|r| r.scheme() == "npm")
&& self.npm_resolver.as_byonm().is_some()
{
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
}
graph.build(roots, loader, options).await; graph.build(roots, loader, options).await;
let has_redirects_changed = graph.redirects.len() != initial_redirects_len; let has_redirects_changed = graph.redirects.len() != initial_redirects_len;
@ -603,16 +635,15 @@ impl ModuleGraphBuilder {
if has_jsr_package_mappings_changed { if has_jsr_package_mappings_changed {
for (from, to) in graph.packages.mappings() { for (from, to) in graph.packages.mappings() {
lockfile.insert_package_specifier( lockfile.insert_package_specifier(
format!("jsr:{}", from), JsrDepPackageReq::jsr(from.clone()),
format!("jsr:{}", to), to.version.to_string(),
); );
} }
} }
// jsr packages // jsr packages
if has_jsr_package_deps_changed { if has_jsr_package_deps_changed {
for (name, deps) in graph.packages.packages_with_deps() { for (nv, deps) in graph.packages.packages_with_deps() {
lockfile lockfile.add_package_deps(nv, deps.cloned());
.add_package_deps(&name.to_string(), deps.map(|s| s.to_string()));
} }
} }
} }
@ -660,7 +691,7 @@ impl ModuleGraphBuilder {
/// Creates the default loader used for creating a graph. /// Creates the default loader used for creating a graph.
pub fn create_graph_loader(&self) -> cache::FetchCacher { pub fn create_graph_loader(&self) -> cache::FetchCacher {
self.create_fetch_cacher(PermissionsContainer::allow_all()) self.create_fetch_cacher(self.root_permissions_container.clone())
} }
pub fn create_fetch_cacher( pub fn create_fetch_cacher(
@ -668,13 +699,21 @@ impl ModuleGraphBuilder {
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> cache::FetchCacher { ) -> cache::FetchCacher {
cache::FetchCacher::new( cache::FetchCacher::new(
self.emit_cache.clone(), self.esm_or_cjs_checker.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
self.options.resolve_file_header_overrides(),
self.global_http_cache.clone(), self.global_http_cache.clone(),
self.node_resolver.clone(),
self.npm_resolver.clone(), self.npm_resolver.clone(),
self.module_info_cache.clone(), self.module_info_cache.clone(),
cache::FetchCacherOptions {
file_header_overrides: self.options.resolve_file_header_overrides(),
permissions, permissions,
is_deno_publish: matches!(
self.options.sub_command(),
crate::args::DenoSubcommand::Publish { .. }
),
unstable_detect_cjs: self.options.unstable_detect_cjs(),
},
) )
} }
@ -698,42 +737,41 @@ impl ModuleGraphBuilder {
&self.fs, &self.fs,
roots, roots,
GraphValidOptions { GraphValidOptions {
is_vendoring: false, kind: if self.options.type_check_mode().is_true() {
follow_type_only: self.options.type_check_mode().is_true(), GraphKind::All
} else {
GraphKind::CodeOnly
},
check_js: self.options.check_js(), check_js: self.options.check_js(),
exit_lockfile_errors: true, exit_integrity_errors: true,
}, },
) )
} }
} }
pub fn error_for_any_npm_specifier(
graph: &ModuleGraph,
) -> Result<(), AnyError> {
for module in graph.modules() {
match module {
Module::Npm(module) => {
bail!("npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier)
}
Module::Node(module) => {
bail!("Node specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name)
}
Module::Js(_) | Module::Json(_) | Module::External(_) => {}
}
}
Ok(())
}
/// Adds more explanatory information to a resolution error. /// Adds more explanatory information to a resolution error.
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
let mut message = format!("{error}"); let mut message = format_deno_graph_error(error);
if let Some(specifier) = get_resolution_error_bare_node_specifier(error) { let maybe_hint = if let Some(specifier) =
get_resolution_error_bare_node_specifier(error)
{
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS { if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
message.push_str(&format!( Some(format!("If you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."))
"\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")." } else {
)); None
} }
} else {
get_import_prefix_missing_error(error).map(|specifier| {
format!(
"If you want to use a JSR or npm package, try running `deno add jsr:{}` or `deno add npm:{}`",
specifier, specifier
)
})
};
if let Some(hint) = maybe_hint {
message.push_str(&format!("\n {} {}", colors::cyan("hint:"), hint));
} }
message message
@ -746,8 +784,8 @@ fn enhanced_sloppy_imports_error_message(
match error { match error {
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => { | ModuleError::Missing(specifier, _) => {
let additional_message = SloppyImportsResolver::new(fs.clone()) let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
.resolve(specifier, ResolutionMode::Execution)? .resolve(specifier, SloppyImportsResolutionMode::Execution)?
.as_suggestion_message(); .as_suggestion_message();
Some(format!( Some(format!(
"{} {} or run with --unstable-sloppy-imports", "{} {} or run with --unstable-sloppy-imports",
@ -759,7 +797,7 @@ fn enhanced_sloppy_imports_error_message(
} }
} }
fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> { fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err { match err {
ModuleError::LoadingErr( ModuleError::LoadingErr(
specifier, specifier,
@ -803,7 +841,7 @@ fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
"This could be caused by:\n", "This could be caused by:\n",
" * the lock file may be corrupt\n", " * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n", " * the source itself may be corrupt\n\n",
"Use the --lock-write flag to regenerate the lockfile or --reload to reload the source code from the server." "Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
), ),
package_nv, package_nv,
checksum_err.actual, checksum_err.actual,
@ -824,7 +862,7 @@ fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
"This could be caused by:\n", "This could be caused by:\n",
" * the lock file may be corrupt\n", " * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n", " * the source itself may be corrupt\n\n",
"Use the --lock-write flag to regenerate the lockfile or --reload to reload the source code from the server." "Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
), ),
specifier, specifier,
checksum_err.actual, checksum_err.actual,
@ -868,6 +906,50 @@ fn get_resolution_error_bare_specifier(
} }
} }
fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
let mut maybe_specifier = None;
if let ResolutionError::InvalidSpecifier {
error: SpecifierError::ImportPrefixMissing { specifier, .. },
range,
} = error
{
if range.specifier.scheme() == "file" {
maybe_specifier = Some(specifier);
}
} else if let ResolutionError::ResolverError { error, range, .. } = error {
if range.specifier.scheme() == "file" {
match error.as_ref() {
ResolveError::Specifier(specifier_error) => {
if let SpecifierError::ImportPrefixMissing { specifier, .. } =
specifier_error
{
maybe_specifier = Some(specifier);
}
}
ResolveError::Other(other_error) => {
if let Some(SpecifierError::ImportPrefixMissing {
specifier, ..
}) = other_error.downcast_ref::<SpecifierError>()
{
maybe_specifier = Some(specifier);
}
}
}
}
}
// NOTE(bartlomieju): For now, return None if a specifier contains a dot or a space. This is because
// suggesting to `deno add bad-module.ts` makes no sense and is worse than not providing
// a suggestion at all. This should be improved further in the future
if let Some(specifier) = maybe_specifier {
if specifier.contains('.') || specifier.contains(' ') {
return None;
}
}
maybe_specifier.map(|s| s.as_str())
}
/// Gets if any of the specified root's "file:" dependents are in the /// Gets if any of the specified root's "file:" dependents are in the
/// provided changed set. /// provided changed set.
pub fn has_graph_root_local_dependent_changed( pub fn has_graph_root_local_dependent_changed(
@ -879,13 +961,13 @@ pub fn has_graph_root_local_dependent_changed(
std::iter::once(root), std::iter::once(root),
deno_graph::WalkOptions { deno_graph::WalkOptions {
follow_dynamic: true, follow_dynamic: true,
follow_type_only: true, kind: GraphKind::All,
prefer_fast_check_graph: true, prefer_fast_check_graph: true,
check_js: true, check_js: true,
}, },
); );
while let Some((s, _)) = dependent_specifiers.next() { while let Some((s, _)) = dependent_specifiers.next() {
if let Ok(path) = specifier_to_file_path(s) { if let Ok(path) = url_to_file_path(s) {
if let Ok(path) = canonicalize_path(&path) { if let Ok(path) = canonicalize_path(&path) {
if canonicalized_changed_paths.contains(&path) { if canonicalized_changed_paths.contains(&path) {
return true; return true;
@ -1022,6 +1104,49 @@ impl deno_graph::source::JsrUrlProvider for CliJsrUrlProvider {
} }
} }
// todo(dsherret): We should change ModuleError to use thiserror so that
// we don't need to do this.
fn format_deno_graph_error(err: &dyn Error) -> String {
use std::fmt::Write;
let mut message = format!("{}", err);
let mut maybe_source = err.source();
if maybe_source.is_some() {
let mut past_message = message.clone();
let mut count = 0;
let mut display_count = 0;
while let Some(source) = maybe_source {
let current_message = format!("{}", source);
maybe_source = source.source();
// sometimes an error might be repeated due to
// being boxed multiple times in another AnyError
if current_message != past_message {
write!(message, "\n {}: ", display_count,).unwrap();
for (i, line) in current_message.lines().enumerate() {
if i > 0 {
write!(message, "\n {}", line).unwrap();
} else {
write!(message, "{}", line).unwrap();
}
}
display_count += 1;
}
if count > 8 {
write!(message, "\n {}: ...", count).unwrap();
break;
}
past_message = current_message;
count += 1;
}
}
message
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::Arc; use std::sync::Arc;

View file

@ -19,6 +19,7 @@ use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_fetch::CreateHttpClientOptions;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use http::header;
use http::header::HeaderName; use http::header::HeaderName;
use http::header::HeaderValue; use http::header::HeaderValue;
use http::header::ACCEPT; use http::header::ACCEPT;
@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> {
pub maybe_accept: Option<String>, pub maybe_accept: Option<String>,
pub maybe_etag: Option<String>, pub maybe_etag: Option<String>,
pub maybe_auth_token: Option<AuthToken>, pub maybe_auth_token: Option<AuthToken>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_progress_guard: Option<&'a UpdateGuard>, pub maybe_progress_guard: Option<&'a UpdateGuard>,
} }
@ -382,6 +384,8 @@ impl HttpClient {
request request
.headers_mut() .headers_mut()
.insert(AUTHORIZATION, authorization_val); .insert(AUTHORIZATION, authorization_val);
} else if let Some((header, value)) = args.maybe_auth {
request.headers_mut().insert(header, value);
} }
if let Some(accept) = args.maybe_accept { if let Some(accept) = args.maybe_accept {
let accepts_val = HeaderValue::from_str(&accept)?; let accepts_val = HeaderValue::from_str(&accept)?;
@ -470,14 +474,22 @@ impl HttpClient {
} }
} }
pub async fn download_with_progress( pub async fn download_with_progress_and_retries(
&self, &self,
url: Url, url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>, maybe_header: Option<(HeaderName, HeaderValue)>,
progress_guard: &UpdateGuard, progress_guard: &UpdateGuard,
) -> Result<Option<Vec<u8>>, DownloadError> { ) -> Result<Option<Vec<u8>>, DownloadError> {
self crate::util::retry::retry(
.download_inner(url, maybe_header, Some(progress_guard)) || {
self.download_inner(
url.clone(),
maybe_header.clone(),
Some(progress_guard),
)
},
|e| matches!(e, DownloadError::BadResponse(_) | DownloadError::Fetch(_)),
)
.await .await
} }
@ -784,6 +796,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -810,6 +823,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -837,6 +851,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -858,6 +873,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -877,6 +893,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -906,6 +923,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, _)) = result { if let Ok(FetchOnceResult::Code(body, _)) = result {
@ -931,6 +949,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Redirect(url, _)) = result { if let Ok(FetchOnceResult::Redirect(url, _)) = result {
@ -966,6 +985,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1013,6 +1033,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1075,6 +1096,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1128,6 +1150,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1169,6 +1192,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1191,6 +1215,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -1225,6 +1250,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1254,6 +1280,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert!(result.is_err()); assert!(result.is_err());
@ -1275,6 +1302,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1298,6 +1326,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;

View file

@ -104,12 +104,12 @@ function bench(
} }
if (optionsOrFn.fn != undefined) { if (optionsOrFn.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, bench function is already provided as the third argument.", "Unexpected 'fn' field in options, bench function is already provided as the third argument",
); );
} }
if (optionsOrFn.name != undefined) { if (optionsOrFn.name != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'name' field in options, bench name is already provided as the first argument.", "Unexpected 'name' field in options, bench name is already provided as the first argument",
); );
} }
benchDesc = { benchDesc = {
@ -141,7 +141,7 @@ function bench(
fn = optionsOrFn; fn = optionsOrFn;
if (nameOrFnOrOptions.fn != undefined) { if (nameOrFnOrOptions.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, bench function is already provided as the second argument.", "Unexpected 'fn' field in options, bench function is already provided as the second argument",
); );
} }
name = nameOrFnOrOptions.name ?? fn.name; name = nameOrFnOrOptions.name ?? fn.name;
@ -150,7 +150,7 @@ function bench(
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function" !nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
) { ) {
throw new TypeError( throw new TypeError(
"Expected 'fn' field in the first argument to be a bench function.", "Expected 'fn' field in the first argument to be a bench function",
); );
} }
fn = nameOrFnOrOptions.fn; fn = nameOrFnOrOptions.fn;
@ -385,12 +385,12 @@ function createBenchContext(desc) {
start() { start() {
if (currentBenchId !== desc.id) { if (currentBenchId !== desc.id) {
throw new TypeError( throw new TypeError(
"The benchmark which this context belongs to is not being executed.", "The benchmark which this context belongs to is not being executed",
); );
} }
if (currentBenchUserExplicitStart != null) { if (currentBenchUserExplicitStart != null) {
throw new TypeError( throw new TypeError(
"BenchContext::start() has already been invoked.", "BenchContext::start() has already been invoked",
); );
} }
currentBenchUserExplicitStart = benchNow(); currentBenchUserExplicitStart = benchNow();
@ -399,11 +399,11 @@ function createBenchContext(desc) {
const end = benchNow(); const end = benchNow();
if (currentBenchId !== desc.id) { if (currentBenchId !== desc.id) {
throw new TypeError( throw new TypeError(
"The benchmark which this context belongs to is not being executed.", "The benchmark which this context belongs to is not being executed",
); );
} }
if (currentBenchUserExplicitEnd != null) { if (currentBenchUserExplicitEnd != null) {
throw new TypeError("BenchContext::end() has already been invoked."); throw new TypeError("BenchContext::end() has already been invoked");
} }
currentBenchUserExplicitEnd = end; currentBenchUserExplicitEnd = end;
}, },

View file

@ -113,7 +113,7 @@ function assertExit(fn, isTest) {
throw new Error( throw new Error(
`${ `${
isTest ? "Test case" : "Bench" isTest ? "Test case" : "Bench"
} finished with exit code set to ${exitCode}.`, } finished with exit code set to ${exitCode}`,
); );
} }
if (innerResult) { if (innerResult) {
@ -242,12 +242,12 @@ function testInner(
} }
if (optionsOrFn.fn != undefined) { if (optionsOrFn.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, test function is already provided as the third argument.", "Unexpected 'fn' field in options, test function is already provided as the third argument",
); );
} }
if (optionsOrFn.name != undefined) { if (optionsOrFn.name != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'name' field in options, test name is already provided as the first argument.", "Unexpected 'name' field in options, test name is already provided as the first argument",
); );
} }
testDesc = { testDesc = {
@ -279,7 +279,7 @@ function testInner(
fn = optionsOrFn; fn = optionsOrFn;
if (nameOrFnOrOptions.fn != undefined) { if (nameOrFnOrOptions.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, test function is already provided as the second argument.", "Unexpected 'fn' field in options, test function is already provided as the second argument",
); );
} }
name = nameOrFnOrOptions.name ?? fn.name; name = nameOrFnOrOptions.name ?? fn.name;
@ -288,7 +288,7 @@ function testInner(
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function" !nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
) { ) {
throw new TypeError( throw new TypeError(
"Expected 'fn' field in the first argument to be a test function.", "Expected 'fn' field in the first argument to be a test function",
); );
} }
fn = nameOrFnOrOptions.fn; fn = nameOrFnOrOptions.fn;
@ -426,7 +426,7 @@ function createTestContext(desc) {
let stepDesc; let stepDesc;
if (typeof nameOrFnOrOptions === "string") { if (typeof nameOrFnOrOptions === "string") {
if (typeof maybeFn !== "function") { if (typeof maybeFn !== "function") {
throw new TypeError("Expected function for second argument."); throw new TypeError("Expected function for second argument");
} }
stepDesc = { stepDesc = {
name: nameOrFnOrOptions, name: nameOrFnOrOptions,
@ -434,7 +434,7 @@ function createTestContext(desc) {
}; };
} else if (typeof nameOrFnOrOptions === "function") { } else if (typeof nameOrFnOrOptions === "function") {
if (!nameOrFnOrOptions.name) { if (!nameOrFnOrOptions.name) {
throw new TypeError("The step function must have a name."); throw new TypeError("The step function must have a name");
} }
if (maybeFn != undefined) { if (maybeFn != undefined) {
throw new TypeError( throw new TypeError(
@ -449,7 +449,7 @@ function createTestContext(desc) {
stepDesc = nameOrFnOrOptions; stepDesc = nameOrFnOrOptions;
} else { } else {
throw new TypeError( throw new TypeError(
"Expected a test definition or name and function.", "Expected a test definition or name and function",
); );
} }
stepDesc.ignore ??= false; stepDesc.ignore ??= false;

View file

@ -6,7 +6,6 @@ use dashmap::DashMap;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo; use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use std::sync::Arc; use std::sync::Arc;
@ -68,10 +67,7 @@ impl JsrFetchResolver {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher.fetch_bypass_permissions(&meta_url).await.ok()
.fetch(&meta_url, &PermissionsContainer::allow_all())
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;
@ -96,10 +92,7 @@ impl JsrFetchResolver {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher.fetch_bypass_permissions(&meta_url).await.ok()
.fetch(&meta_url, &PermissionsContainer::allow_all())
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;

View file

@ -2,20 +2,22 @@
use super::diagnostics::DenoDiagnostic; use super::diagnostics::DenoDiagnostic;
use super::diagnostics::DiagnosticSource; use super::diagnostics::DiagnosticSource;
use super::documents::Document;
use super::documents::Documents; use super::documents::Documents;
use super::language_server; use super::language_server;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::tsc; use super::tsc;
use super::urls::url_to_uri;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use deno_config::workspace::MappedResolution;
use deno_lint::diagnostic::LintDiagnosticRange; use deno_lint::diagnostic::LintDiagnosticRange;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
@ -23,6 +25,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PathClean; use deno_runtime::deno_node::PathClean;
use deno_semver::jsr::JsrPackageNvReference; use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -36,10 +39,12 @@ use import_map::ImportMap;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use text_lines::LineAndColumnIndex;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::Position; use tower_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Range; use tower_lsp::lsp_types::Range;
@ -224,6 +229,7 @@ pub struct TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
file_referrer: ModuleSpecifier,
} }
impl<'a> TsResponseImportMapper<'a> { impl<'a> TsResponseImportMapper<'a> {
@ -231,11 +237,13 @@ impl<'a> TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
file_referrer: &ModuleSpecifier,
) -> Self { ) -> Self {
Self { Self {
documents, documents,
maybe_import_map, maybe_import_map,
resolver, resolver,
file_referrer: file_referrer.clone(),
} }
} }
@ -256,8 +264,6 @@ impl<'a> TsResponseImportMapper<'a> {
} }
} }
let file_referrer = self.documents.get_file_referrer(referrer);
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str()) if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
{ {
let mut segments = jsr_path.split('/'); let mut segments = jsr_path.split('/');
@ -272,7 +278,7 @@ impl<'a> TsResponseImportMapper<'a> {
let export = self.resolver.jsr_lookup_export_for_path( let export = self.resolver.jsr_lookup_export_for_path(
&nv, &nv,
&path, &path,
file_referrer.as_deref(), Some(&self.file_referrer),
)?; )?;
let sub_path = (export != ".").then_some(export); let sub_path = (export != ".").then_some(export);
let mut req = None; let mut req = None;
@ -298,7 +304,7 @@ impl<'a> TsResponseImportMapper<'a> {
req = req.or_else(|| { req = req.or_else(|| {
self self
.resolver .resolver
.jsr_lookup_req_for_nv(&nv, file_referrer.as_deref()) .jsr_lookup_req_for_nv(&nv, Some(&self.file_referrer))
}); });
let spec_str = if let Some(req) = req { let spec_str = if let Some(req) = req {
let req_ref = PackageReqReference { req, sub_path }; let req_ref = PackageReqReference { req, sub_path };
@ -328,7 +334,7 @@ impl<'a> TsResponseImportMapper<'a> {
if let Some(npm_resolver) = self if let Some(npm_resolver) = self
.resolver .resolver
.maybe_managed_npm_resolver(file_referrer.as_deref()) .maybe_managed_npm_resolver(Some(&self.file_referrer))
{ {
if npm_resolver.in_npm_package(specifier) { if npm_resolver.in_npm_package(specifier) {
if let Ok(Some(pkg_id)) = if let Ok(Some(pkg_id)) =
@ -400,7 +406,7 @@ impl<'a> TsResponseImportMapper<'a> {
.flatten()?; .flatten()?;
let root_folder = package_json.path.parent()?; let root_folder = package_json.path.parent()?;
let specifier_path = specifier_to_file_path(specifier).ok()?; let specifier_path = url_to_file_path(specifier).ok()?;
let mut search_paths = vec![specifier_path.clone()]; let mut search_paths = vec![specifier_path.clone()];
// TypeScript will provide a .js extension for quick fixes, so do // TypeScript will provide a .js extension for quick fixes, so do
// a search for the .d.ts file instead // a search for the .d.ts file instead
@ -464,6 +470,26 @@ impl<'a> TsResponseImportMapper<'a> {
} }
None None
} }
pub fn is_valid_import(
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
) -> bool {
self
.resolver
.as_graph_resolver(Some(&self.file_referrer))
.resolve(
specifier_text,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
deno_graph::source::ResolutionMode::Types,
)
.is_ok()
}
} }
fn try_reverse_map_package_json_exports( fn try_reverse_map_package_json_exports(
@ -572,29 +598,25 @@ pub fn fix_ts_import_changes(
/// Fix tsc import code actions so that the module specifier is correct for /// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension). /// resolution by Deno (includes the extension).
fn fix_ts_import_action( fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
action: &tsc::CodeFixAction, action: &'a tsc::CodeFixAction,
import_mapper: &TsResponseImportMapper, import_mapper: &TsResponseImportMapper,
) -> Result<tsc::CodeFixAction, AnyError> { ) -> Option<Cow<'a, tsc::CodeFixAction>> {
if matches!( if !matches!(
action.fix_name.as_str(), action.fix_name.as_str(),
"import" | "fixMissingFunctionDeclaration" "import" | "fixMissingFunctionDeclaration"
) { ) {
let change = action return Some(Cow::Borrowed(action));
.changes }
.first() let specifier = (|| {
.ok_or_else(|| anyhow!("Unexpected action changes."))?; let text_change = action.changes.first()?.text_changes.first()?;
let text_change = change let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
.text_changes Some(captures.get(1)?.as_str())
.first() })();
.ok_or_else(|| anyhow!("Missing text change."))?; let Some(specifier) = specifier else {
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) return Some(Cow::Borrowed(action));
{ };
let specifier = captures
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) = if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer) import_mapper.check_unresolved_specifier(specifier, referrer)
{ {
@ -619,19 +641,19 @@ fn fix_ts_import_action(
}) })
.collect(); .collect();
return Ok(tsc::CodeFixAction { Some(Cow::Owned(tsc::CodeFixAction {
description, description,
changes, changes,
commands: None, commands: None,
fix_name: action.fix_name.clone(), fix_name: action.fix_name.clone(),
fix_id: None, fix_id: None,
fix_all_description: None, fix_all_description: None,
}); }))
} else if !import_mapper.is_valid_import(specifier, referrer) {
None
} else {
Some(Cow::Borrowed(action))
} }
}
}
Ok(action.clone())
} }
/// Determines if two TypeScript diagnostic codes are effectively equivalent. /// Determines if two TypeScript diagnostic codes are effectively equivalent.
@ -750,10 +772,11 @@ impl CodeActionCollection {
.as_ref() .as_ref()
.and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok()) .and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok())
{ {
let uri = url_to_uri(specifier)?;
for quick_fix in data_quick_fixes { for quick_fix in data_quick_fixes {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri.clone(),
quick_fix quick_fix
.changes .changes
.into_iter() .into_iter()
@ -795,6 +818,7 @@ impl CodeActionCollection {
maybe_text_info: Option<&SourceTextInfo>, maybe_text_info: Option<&SourceTextInfo>,
maybe_parsed_source: Option<&deno_ast::ParsedSource>, maybe_parsed_source: Option<&deno_ast::ParsedSource>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let uri = url_to_uri(specifier)?;
let code = diagnostic let code = diagnostic
.code .code
.as_ref() .as_ref()
@ -811,7 +835,7 @@ impl CodeActionCollection {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri.clone(),
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: prepend_whitespace( new_text: prepend_whitespace(
format!("// deno-lint-ignore {code}\n"), format!("// deno-lint-ignore {code}\n"),
@ -892,7 +916,7 @@ impl CodeActionCollection {
} }
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]); changes.insert(uri.clone(), vec![lsp::TextEdit { new_text, range }]);
let ignore_file_action = lsp::CodeAction { let ignore_file_action = lsp::CodeAction {
title: format!("Disable {code} for the entire file"), title: format!("Disable {code} for the entire file"),
kind: Some(lsp::CodeActionKind::QUICKFIX), kind: Some(lsp::CodeActionKind::QUICKFIX),
@ -913,7 +937,7 @@ impl CodeActionCollection {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: "// deno-lint-ignore-file\n".to_string(), new_text: "// deno-lint-ignore-file\n".to_string(),
range: lsp::Range { range: lsp::Range {
@ -970,11 +994,13 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.", "The action returned from TypeScript is unsupported.",
)); ));
} }
let action = fix_ts_import_action( let Some(action) = fix_ts_import_action(
specifier, specifier,
action, action,
&language_server.get_ts_response_import_mapper(specifier), &language_server.get_ts_response_import_mapper(specifier),
)?; ) else {
return Ok(());
};
let edit = ts_changes_to_edit(&action.changes, language_server)?; let edit = ts_changes_to_edit(&action.changes, language_server)?;
let code_action = lsp::CodeAction { let code_action = lsp::CodeAction {
title: action.description.clone(), title: action.description.clone(),
@ -994,7 +1020,7 @@ impl CodeActionCollection {
}); });
self self
.actions .actions
.push(CodeActionKind::Tsc(code_action, action.clone())); .push(CodeActionKind::Tsc(code_action, action.as_ref().clone()));
if let Some(fix_id) = &action.fix_id { if let Some(fix_id) = &action.fix_id {
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =
@ -1148,6 +1174,191 @@ impl CodeActionCollection {
..Default::default() ..Default::default()
})); }));
} }
pub async fn add_source_actions(
&mut self,
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) {
fn import_start_from_specifier(
document: &Document,
import: &deno_graph::Import,
) -> Option<LineAndColumnIndex> {
// find the top level statement that contains the specifier
let parsed_source = document.maybe_parsed_source()?.as_ref().ok()?;
let text_info = parsed_source.text_info_lazy();
let specifier_range = SourceRange::new(
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.start.line,
column_index: import.specifier_range.start.character,
}),
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.end.line,
column_index: import.specifier_range.end.character,
}),
);
match parsed_source.program_ref() {
deno_ast::swc::ast::Program::Module(module) => module
.body
.iter()
.find(|i| i.range().contains(&specifier_range))
.map(|i| text_info.line_and_column_index(i.range().start)),
deno_ast::swc::ast::Program::Script(_) => None,
}
}
async fn deno_types_for_npm_action(
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) -> Option<lsp::CodeAction> {
let (dep_key, dependency, _) =
document.get_maybe_dependency(&range.end)?;
if dependency.maybe_deno_types_specifier.is_some() {
return None;
}
if dependency.maybe_code.maybe_specifier().is_none()
&& dependency.maybe_type.maybe_specifier().is_none()
{
// We're using byonm and the package is not cached.
return None;
}
let position = deno_graph::Position::new(
range.end.line as usize,
range.end.character as usize,
);
let import_start = dependency.imports.iter().find_map(|i| {
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
return None;
}
if !i.specifier_range.includes(&position) {
return None;
}
import_start_from_specifier(document, i)
})?;
let referrer = document.specifier();
let file_referrer = document.file_referrer();
let config_data = language_server
.config
.tree
.data_for_specifier(file_referrer?)?;
let workspace_resolver = config_data.resolver.clone();
let npm_ref = if let Ok(resolution) =
workspace_resolver.resolve(&dep_key, document.specifier())
{
let specifier = match resolution {
MappedResolution::Normal { specifier, .. }
| MappedResolution::ImportMap { specifier, .. } => specifier,
_ => {
return None;
}
};
NpmPackageReqReference::from_specifier(&specifier).ok()?
} else {
// Only resolve bare package.json deps for byonm.
if !config_data.byonm {
return None;
}
if !language_server
.resolver
.is_bare_package_json_dep(&dep_key, referrer)
{
return None;
}
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
};
let package_name = &npm_ref.req().name;
if package_name.starts_with("@types/") {
return None;
}
let managed_npm_resolver = language_server
.resolver
.maybe_managed_npm_resolver(file_referrer);
if let Some(npm_resolver) = managed_npm_resolver {
if !npm_resolver.is_pkg_req_folder_cached(npm_ref.req()) {
return None;
}
}
if language_server
.resolver
.npm_to_file_url(&npm_ref, document.specifier(), file_referrer)
.is_some()
{
// The package import has types.
return None;
}
let types_package_name = format!("@types/{package_name}");
let types_package_version = language_server
.npm_search_api
.versions(&types_package_name)
.await
.ok()
.and_then(|versions| versions.first().cloned())?;
let types_specifier_text =
if let Some(npm_resolver) = managed_npm_resolver {
let mut specifier_text = if let Some(req) =
npm_resolver.top_package_req_for_name(&types_package_name)
{
format!("npm:{req}")
} else {
format!("npm:{}@^{}", &types_package_name, types_package_version)
};
let specifier = ModuleSpecifier::parse(&specifier_text).ok()?;
if let Some(file_referrer) = file_referrer {
if let Some(text) = language_server
.get_ts_response_import_mapper(file_referrer)
.check_specifier(&specifier, referrer)
{
specifier_text = text;
}
}
specifier_text
} else {
types_package_name.clone()
};
let uri = language_server
.url_map
.specifier_to_uri(referrer, file_referrer)
.ok()?;
let position = lsp::Position {
line: import_start.line_index as u32,
character: import_start.column_index as u32,
};
let new_text = format!(
"{}// @deno-types=\"{}\"\n",
if position.character == 0 { "" } else { "\n" },
&types_specifier_text
);
let text_edit = lsp::TextEdit {
range: lsp::Range {
start: position,
end: position,
},
new_text,
};
Some(lsp::CodeAction {
title: format!(
"Add @deno-types directive for \"{}\"",
&types_specifier_text
),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some([(uri, vec![text_edit])].into_iter().collect()),
..Default::default()
}),
..Default::default()
})
}
if let Some(action) =
deno_types_for_npm_action(document, range, language_server).await
{
self.actions.push(CodeActionKind::Deno(action));
}
}
} }
/// Prepend the whitespace characters found at the start of line_content to content. /// Prepend the whitespace characters found at the start of line_content to content.

View file

@ -7,26 +7,16 @@ use crate::cache::LocalLspHttpCache;
use crate::lsp::config::Config; use crate::lsp::config::Config;
use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_log;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
/// In the LSP, we disallow the cache from automatically copying from
/// the global cache to the local cache for technical reasons.
///
/// 1. We need to verify the checksums from the lockfile are correct when
/// moving from the global to the local cache.
/// 2. We need to verify the checksums for JSR https specifiers match what
/// is found in the package's manifest.
pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy =
deno_cache_dir::GlobalToLocalCopy::Disallow;
pub fn calculate_fs_version( pub fn calculate_fs_version(
cache: &LspCache, cache: &LspCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -34,7 +24,7 @@ pub fn calculate_fs_version(
) -> Option<String> { ) -> Option<String> {
match specifier.scheme() { match specifier.scheme() {
"npm" | "node" | "data" | "blob" => None, "npm" | "node" | "data" | "blob" => None,
"file" => specifier_to_file_path(specifier) "file" => url_to_file_path(specifier)
.ok() .ok()
.and_then(|path| calculate_fs_version_at_path(&path)), .and_then(|path| calculate_fs_version_at_path(&path)),
_ => calculate_fs_version_in_cache(cache, specifier, file_referrer), _ => calculate_fs_version_in_cache(cache, specifier, file_referrer),
@ -92,7 +82,7 @@ impl Default for LspCache {
impl LspCache { impl LspCache {
pub fn new(global_cache_url: Option<Url>) -> Self { pub fn new(global_cache_url: Option<Url>) -> Self {
let global_cache_path = global_cache_url.and_then(|s| { let global_cache_path = global_cache_url.and_then(|s| {
specifier_to_file_path(&s) url_to_file_path(&s)
.inspect(|p| { .inspect(|p| {
lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy()); lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy());
}) })
@ -104,7 +94,7 @@ impl LspCache {
let deno_dir = DenoDir::new(global_cache_path) let deno_dir = DenoDir::new(global_cache_path)
.expect("should be infallible with absolute custom root"); .expect("should be infallible with absolute custom root");
let global = Arc::new(GlobalHttpCache::new( let global = Arc::new(GlobalHttpCache::new(
deno_dir.deps_folder_path(), deno_dir.remote_folder_path(),
crate::cache::RealDenoCacheEnv, crate::cache::RealDenoCacheEnv,
)); ));
Self { Self {
@ -175,7 +165,7 @@ impl LspCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> { ) -> Option<ModuleSpecifier> {
let path = specifier_to_file_path(specifier).ok()?; let path = url_to_file_path(specifier).ok()?;
let vendor = self let vendor = self
.vendors_by_scope .vendors_by_scope
.iter() .iter()
@ -186,7 +176,7 @@ impl LspCache {
} }
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool { pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
if let Ok(path) = specifier_to_file_path(specifier) { if let Ok(path) = url_to_file_path(specifier) {
if !path.starts_with(&self.deno_dir().root) { if !path.starts_with(&self.deno_dir().root) {
return true; return true;
} }

View file

@ -147,12 +147,14 @@ pub fn server_capabilities(
moniker_provider: None, moniker_provider: None,
experimental: Some(json!({ experimental: Some(json!({
"denoConfigTasks": true, "denoConfigTasks": true,
"testingApi":true, "testingApi": true,
"didRefreshDenoConfigurationTreeNotifications": true,
})), })),
inlay_hint_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)),
position_encoding: None, position_encoding: None,
// TODO(nayeemrmn): Support pull-based diagnostics.
diagnostic_provider: None, diagnostic_provider: None,
inline_value_provider: None, inline_value_provider: None,
inline_completion_provider: None,
notebook_document_sync: None,
} }
} }

View file

@ -8,6 +8,7 @@ use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use lsp_types::Uri;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::ConfigurationItem; use tower_lsp::lsp_types::ConfigurationItem;
@ -17,7 +18,6 @@ use super::config::WorkspaceSettings;
use super::config::SETTINGS_SECTION; use super::config::SETTINGS_SECTION;
use super::lsp_custom; use super::lsp_custom;
use super::testing::lsp_custom as testing_lsp_custom; use super::testing::lsp_custom as testing_lsp_custom;
use super::urls::LspClientUrl;
#[derive(Debug)] #[derive(Debug)]
pub enum TestingNotification { pub enum TestingNotification {
@ -52,14 +52,11 @@ impl Client {
pub async fn publish_diagnostics( pub async fn publish_diagnostics(
&self, &self,
uri: LspClientUrl, uri: Uri,
diags: Vec<lsp::Diagnostic>, diags: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
) { ) {
self self.0.publish_diagnostics(uri, diags, version).await;
.0
.publish_diagnostics(uri.into_url(), diags, version)
.await;
} }
pub fn send_registry_state_notification( pub fn send_registry_state_notification(
@ -95,6 +92,19 @@ impl Client {
}); });
} }
pub fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client
.send_did_refresh_deno_configuration_tree_notification(params)
.await;
});
}
pub fn send_did_change_deno_configuration_notification( pub fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -149,7 +159,7 @@ impl OutsideLockClient {
pub async fn workspace_configuration( pub async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
self.0.workspace_configuration(scopes).await self.0.workspace_configuration(scopes).await
} }
@ -159,7 +169,7 @@ impl OutsideLockClient {
trait ClientTrait: Send + Sync { trait ClientTrait: Send + Sync {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
uri: lsp::Url, uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>, diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
); );
@ -172,6 +182,10 @@ trait ClientTrait: Send + Sync {
params: lsp_custom::DiagnosticBatchNotificationParams, params: lsp_custom::DiagnosticBatchNotificationParams,
); );
async fn send_test_notification(&self, params: TestingNotification); async fn send_test_notification(&self, params: TestingNotification);
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
);
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -182,7 +196,7 @@ trait ClientTrait: Send + Sync {
); );
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError>; ) -> Result<Vec<WorkspaceSettings>, AnyError>;
async fn show_message(&self, message_type: lsp::MessageType, text: String); async fn show_message(&self, message_type: lsp::MessageType, text: String);
async fn register_capability( async fn register_capability(
@ -198,7 +212,7 @@ struct TowerClient(tower_lsp::Client);
impl ClientTrait for TowerClient { impl ClientTrait for TowerClient {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
uri: lsp::Url, uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>, diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
) { ) {
@ -252,6 +266,18 @@ impl ClientTrait for TowerClient {
} }
} }
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::DidRefreshDenoConfigurationTreeNotification>(
params,
)
.await
}
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -276,7 +302,7 @@ impl ClientTrait for TowerClient {
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
let config_response = self let config_response = self
.0 .0
@ -349,7 +375,7 @@ struct ReplClient;
impl ClientTrait for ReplClient { impl ClientTrait for ReplClient {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
_uri: lsp::Url, _uri: lsp::Uri,
_diagnostics: Vec<lsp::Diagnostic>, _diagnostics: Vec<lsp::Diagnostic>,
_version: Option<i32>, _version: Option<i32>,
) { ) {
@ -369,6 +395,12 @@ impl ClientTrait for ReplClient {
async fn send_test_notification(&self, _params: TestingNotification) {} async fn send_test_notification(&self, _params: TestingNotification) {}
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
_params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
}
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
_params: lsp_custom::DidChangeDenoConfigurationNotificationParams, _params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -383,7 +415,7 @@ impl ClientTrait for ReplClient {
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
Ok(vec![get_repl_workspace_settings(); scopes.len()]) Ok(vec![get_repl_workspace_settings(); scopes.len()])
} }

View file

@ -19,7 +19,6 @@ use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
use deno_graph::Range; use deno_graph::Range;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES; use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::LineAndColumnIndex; use deno_ast::LineAndColumnIndex;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
@ -30,6 +29,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::url::Position; use deno_core::url::Position;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use import_map::ImportMap; use import_map::ImportMap;
@ -200,15 +200,11 @@ pub async fn get_import_completions(
{ {
// completions for import map specifiers // completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list)) Some(lsp::CompletionResponse::List(completion_list))
} else if text.starts_with("./") } else if let Some(completion_list) =
|| text.starts_with("../") get_local_completions(specifier, &text, &range, resolver)
|| text.starts_with('/')
{ {
// completions for local relative modules // completions for local relative modules
Some(lsp::CompletionResponse::List(CompletionList { Some(lsp::CompletionResponse::List(completion_list))
is_incomplete: false,
items: get_local_completions(specifier, &text, &range, resolver)?,
}))
} else if !text.is_empty() { } else if !text.is_empty() {
// completion of modules from a module registry or cache // completion of modules from a module registry or cache
check_auto_config_registry( check_auto_config_registry(
@ -249,7 +245,7 @@ pub async fn get_import_completions(
.collect(); .collect();
let mut is_incomplete = false; let mut is_incomplete = false;
if let Some(import_map) = maybe_import_map { if let Some(import_map) = maybe_import_map {
items.extend(get_base_import_map_completions(import_map)); items.extend(get_base_import_map_completions(import_map, specifier));
} }
if let Some(origin_items) = if let Some(origin_items) =
module_registries.get_origin_completions(&text, &range) module_registries.get_origin_completions(&text, &range)
@ -268,20 +264,20 @@ pub async fn get_import_completions(
/// map as completion items. /// map as completion items.
fn get_base_import_map_completions( fn get_base_import_map_completions(
import_map: &ImportMap, import_map: &ImportMap,
referrer: &ModuleSpecifier,
) -> Vec<lsp::CompletionItem> { ) -> Vec<lsp::CompletionItem> {
import_map import_map
.imports() .entries_for_referrer(referrer)
.keys() .map(|entry| {
.map(|key| {
// for some strange reason, keys that start with `/` get stored in the // for some strange reason, keys that start with `/` get stored in the
// import map as `file:///`, and so when we pull the keys out, we need to // import map as `file:///`, and so when we pull the keys out, we need to
// change the behavior // change the behavior
let mut label = if key.starts_with("file://") { let mut label = if entry.key.starts_with("file://") {
FILE_PROTO_RE.replace(key, "").to_string() FILE_PROTO_RE.replace(entry.key, "").to_string()
} else { } else {
key.to_string() entry.key.to_string()
}; };
let kind = if key.ends_with('/') { let kind = if entry.key.ends_with('/') {
label.pop(); label.pop();
Some(lsp::CompletionItemKind::FOLDER) Some(lsp::CompletionItemKind::FOLDER)
} else { } else {
@ -363,15 +359,15 @@ fn get_local_completions(
text: &str, text: &str,
range: &lsp::Range, range: &lsp::Range,
resolver: &LspResolver, resolver: &LspResolver,
) -> Option<Vec<lsp::CompletionItem>> { ) -> Option<CompletionList> {
if base.scheme() != "file" { if base.scheme() != "file" {
return None; return None;
} }
let parent = base.join(text).ok()?.join(".").ok()?; let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
let resolved_parent = resolver let resolved_parent = resolver
.as_graph_resolver(Some(base)) .as_graph_resolver(Some(base))
.resolve( .resolve(
parent.as_str(), parent,
&Range { &Range {
specifier: base.clone(), specifier: base.clone(),
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
@ -380,14 +376,11 @@ fn get_local_completions(
ResolutionMode::Execution, ResolutionMode::Execution,
) )
.ok()?; .ok()?;
let resolved_parent_path = specifier_to_file_path(&resolved_parent).ok()?; let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
let raw_parent =
&text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
if resolved_parent_path.is_dir() { if resolved_parent_path.is_dir() {
let cwd = std::env::current_dir().ok()?; let cwd = std::env::current_dir().ok()?;
let items = std::fs::read_dir(resolved_parent_path).ok()?; let entries = std::fs::read_dir(resolved_parent_path).ok()?;
Some( let items = entries
items
.filter_map(|de| { .filter_map(|de| {
let de = de.ok()?; let de = de.ok()?;
let label = de.path().file_name()?.to_string_lossy().to_string(); let label = de.path().file_name()?.to_string_lossy().to_string();
@ -395,7 +388,7 @@ fn get_local_completions(
if entry_specifier == *base { if entry_specifier == *base {
return None; return None;
} }
let full_text = format!("{raw_parent}{label}"); let full_text = format!("{parent}{label}");
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range, range: *range,
new_text: full_text.clone(), new_text: full_text.clone(),
@ -435,8 +428,11 @@ fn get_local_completions(
_ => None, _ => None,
} }
}) })
.collect(), .collect();
) Some(CompletionList {
is_incomplete: false,
items,
})
} else { } else {
None None
} }
@ -838,7 +834,7 @@ mod tests {
fs_sources: &[(&str, &str)], fs_sources: &[(&str, &str)],
) -> Documents { ) -> Documents {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let mut documents = Documents::default(); let mut documents = Documents::default();
documents.update_config( documents.update_config(
&Default::default(), &Default::default(),
@ -859,7 +855,7 @@ mod tests {
.set(&specifier, HashMap::default(), source.as_bytes()) .set(&specifier, HashMap::default(), source.as_bytes())
.expect("could not cache file"); .expect("could not cache file");
let document = documents let document = documents
.get_or_load(&specifier, Some(&temp_dir.uri().join("$").unwrap())); .get_or_load(&specifier, Some(&temp_dir.url().join("$").unwrap()));
assert!(document.is_some(), "source could not be setup"); assert!(document.is_some(), "source could not be setup");
} }
documents documents
@ -921,11 +917,11 @@ mod tests {
}, },
}, },
&Default::default(), &Default::default(),
); )
assert!(actual.is_some()); .unwrap();
let actual = actual.unwrap(); assert!(!actual.is_incomplete);
assert_eq!(actual.len(), 3); assert_eq!(actual.items.len(), 3);
for item in actual { for item in actual.items {
match item.text_edit { match item.text_edit {
Some(lsp::CompletionTextEdit::Edit(text_edit)) => { Some(lsp::CompletionTextEdit::Edit(text_edit)) => {
assert!(["./b", "./f.mjs", "./g.json"] assert!(["./b", "./f.mjs", "./g.json"]

View file

@ -5,6 +5,7 @@ use deno_config::deno_json::DenoJsonCache;
use deno_config::deno_json::FmtConfig; use deno_config::deno_json::FmtConfig;
use deno_config::deno_json::FmtOptionsConfig; use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::LintConfig; use deno_config::deno_json::LintConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::TestConfig; use deno_config::deno_json::TestConfig;
use deno_config::deno_json::TsConfig; use deno_config::deno_json::TsConfig;
use deno_config::fs::DenoConfigFs; use deno_config::fs::DenoConfigFs;
@ -30,35 +31,39 @@ use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_lint::linter::LintConfig as DenoLintConfig; use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache; use deno_package_json::PackageJsonCache;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::fs_util::specifier_to_file_path;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp::Url;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref;
use std::ops::DerefMut;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use super::logging::lsp_log; use super::logging::lsp_log;
use super::lsp_custom;
use super::urls::url_to_uri;
use crate::args::discover_npmrc_from_workspace; use crate::args::discover_npmrc_from_workspace;
use crate::args::has_flag_env_var; use crate::args::has_flag_env_var;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::CliLockfileReadFromPathOptions;
use crate::args::ConfigFile; use crate::args::ConfigFile;
use crate::args::LintFlags; use crate::args::LintFlags;
use crate::args::LintOptions; use crate::args::LintOptions;
use crate::args::DENO_FUTURE;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::resolver::SloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions; use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
@ -70,6 +75,54 @@ fn is_true() -> bool {
true true
} }
/// Wrapper that defaults if it fails to deserialize. Good for individual
/// settings.
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct SafeValue<T> {
inner: T,
}
impl<'de, T: Default + for<'de2> Deserialize<'de2>> Deserialize<'de>
for SafeValue<T>
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self {
inner: Deserialize::deserialize(deserializer).unwrap_or_default(),
})
}
}
impl<T: Serialize> Serialize for SafeValue<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.inner.serialize(serializer)
}
}
impl<T> Deref for SafeValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T> DerefMut for SafeValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.inner
}
}
impl<T> SafeValue<T> {
pub fn as_deref(&self) -> &T {
&self.inner
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CodeLensSettings { pub struct CodeLensSettings {
@ -386,6 +439,8 @@ pub struct LanguagePreferences {
pub use_aliases_for_renames: bool, pub use_aliases_for_renames: bool,
#[serde(default)] #[serde(default)]
pub quote_style: QuoteStyle, pub quote_style: QuoteStyle,
#[serde(default)]
pub prefer_type_only_auto_imports: bool,
} }
impl Default for LanguagePreferences { impl Default for LanguagePreferences {
@ -396,6 +451,7 @@ impl Default for LanguagePreferences {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: Default::default(), quote_style: Default::default(),
prefer_type_only_auto_imports: false,
} }
} }
} }
@ -538,7 +594,7 @@ pub struct WorkspaceSettings {
pub unsafely_ignore_certificate_errors: Option<Vec<String>>, pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
#[serde(default)] #[serde(default)]
pub unstable: bool, pub unstable: SafeValue<Vec<String>>,
#[serde(default)] #[serde(default)]
pub javascript: LanguageWorkspaceSettings, pub javascript: LanguageWorkspaceSettings,
@ -568,7 +624,7 @@ impl Default for WorkspaceSettings {
testing: Default::default(), testing: Default::default(),
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
javascript: Default::default(), javascript: Default::default(),
typescript: Default::default(), typescript: Default::default(),
} }
@ -752,7 +808,7 @@ impl Settings {
/// Returns `None` if the value should be deferred to the presence of a /// Returns `None` if the value should be deferred to the presence of a
/// `deno.json` file. /// `deno.json` file.
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> { pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
// Non-file URLs are not disabled by these settings. // Non-file URLs are not disabled by these settings.
return Some(true); return Some(true);
}; };
@ -761,7 +817,7 @@ impl Settings {
let mut disable_paths = vec![]; let mut disable_paths = vec![];
let mut enable_paths = None; let mut enable_paths = None;
if let Some(folder_uri) = folder_uri { if let Some(folder_uri) = folder_uri {
if let Ok(folder_path) = specifier_to_file_path(folder_uri) { if let Ok(folder_path) = url_to_file_path(folder_uri) {
disable_paths = settings disable_paths = settings
.disable_paths .disable_paths
.iter() .iter()
@ -798,12 +854,12 @@ impl Settings {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) { ) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
return (&self.unscoped, self.first_folder.as_ref()); return (&self.unscoped, self.first_folder.as_ref());
}; };
for (folder_uri, settings) in self.by_workspace_folder.iter().rev() { for (folder_uri, settings) in self.by_workspace_folder.iter().rev() {
if let Some(settings) = settings { if let Some(settings) = settings {
let Ok(folder_path) = specifier_to_file_path(folder_uri) else { let Ok(folder_path) = url_to_file_path(folder_uri) else {
continue; continue;
}; };
if path.starts_with(folder_path) { if path.starts_with(folder_path) {
@ -844,14 +900,17 @@ pub struct Config {
impl Config { impl Config {
#[cfg(test)] #[cfg(test)]
pub fn new_with_roots(root_uris: impl IntoIterator<Item = Url>) -> Self { pub fn new_with_roots(root_urls: impl IntoIterator<Item = Url>) -> Self {
use super::urls::url_to_uri;
let mut config = Self::default(); let mut config = Self::default();
let mut folders = vec![]; let mut folders = vec![];
for root_uri in root_uris { for root_url in root_urls {
let name = root_uri.path_segments().and_then(|s| s.last()); let root_uri = url_to_uri(&root_url).unwrap();
let name = root_url.path_segments().and_then(|s| s.last());
let name = name.unwrap_or_default().to_string(); let name = name.unwrap_or_default().to_string();
folders.push(( folders.push((
root_uri.clone(), root_url,
lsp::WorkspaceFolder { lsp::WorkspaceFolder {
uri: root_uri, uri: root_uri,
name, name,
@ -1077,11 +1136,11 @@ impl Default for LspTsConfig {
"module": "esnext", "module": "esnext",
"moduleDetection": "force", "moduleDetection": "force",
"noEmit": true, "noEmit": true,
"noImplicitOverride": true,
"resolveJsonModule": true, "resolveJsonModule": true,
"strict": true, "strict": true,
"target": "esnext", "target": "esnext",
"useDefineForClassFields": true, "useDefineForClassFields": true,
"useUnknownInCatchVariables": false,
"jsx": "react", "jsx": "react",
"jsxFactory": "React.createElement", "jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment", "jsxFragmentFactory": "React.Fragment",
@ -1129,7 +1188,7 @@ pub struct ConfigData {
pub lockfile: Option<Arc<CliLockfile>>, pub lockfile: Option<Arc<CliLockfile>>,
pub npmrc: Option<Arc<ResolvedNpmRc>>, pub npmrc: Option<Arc<ResolvedNpmRc>>,
pub resolver: Arc<WorkspaceResolver>, pub resolver: Arc<WorkspaceResolver>,
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>, pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub import_map_from_settings: Option<ModuleSpecifier>, pub import_map_from_settings: Option<ModuleSpecifier>,
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>, watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
} }
@ -1384,11 +1443,12 @@ impl ConfigData {
} }
} }
let byonm = std::env::var("DENO_UNSTABLE_BYONM").is_ok() let node_modules_dir =
|| member_dir.workspace.has_unstable("byonm") member_dir.workspace.node_modules_dir().unwrap_or_default();
|| (*DENO_FUTURE let byonm = match node_modules_dir {
&& member_dir.workspace.package_jsons().next().is_some() Some(mode) => mode == NodeModulesDirMode::Manual,
&& member_dir.workspace.node_modules_dir().is_none()); None => member_dir.workspace.root_pkg_json().is_some(),
};
if byonm { if byonm {
lsp_log!(" Enabled 'bring your own node_modules'."); lsp_log!(" Enabled 'bring your own node_modules'.");
} }
@ -1398,9 +1458,10 @@ impl ConfigData {
// Mark the import map as a watched file // Mark the import map as a watched file
if let Some(import_map_specifier) = member_dir if let Some(import_map_specifier) = member_dir
.workspace .workspace
.to_import_map_specifier() .to_import_map_path()
.ok() .ok()
.flatten() .flatten()
.and_then(|path| Url::from_file_path(path).ok())
{ {
add_watched_file( add_watched_file(
import_map_specifier.clone(), import_map_specifier.clone(),
@ -1456,13 +1517,12 @@ impl ConfigData {
ConfigWatchedFileType::ImportMap, ConfigWatchedFileType::ImportMap,
); );
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let fetch_result = deno_core::unsync::spawn({ let fetch_result =
deno_core::unsync::spawn({
let file_fetcher = file_fetcher.cloned().unwrap(); let file_fetcher = file_fetcher.cloned().unwrap();
let import_map_url = import_map_url.clone(); let import_map_url = import_map_url.clone();
async move { async move {
file_fetcher file_fetcher.fetch_bypass_permissions(&import_map_url).await
.fetch(&import_map_url, &PermissionsContainer::allow_all())
.await
} }
}) })
.await .await
@ -1490,29 +1550,15 @@ impl ConfigData {
None None
} }
}; };
let resolver = deno_core::unsync::spawn({ let resolver = member_dir
let workspace = member_dir.workspace.clone(); .workspace
let file_fetcher = file_fetcher.cloned();
async move {
workspace
.create_resolver( .create_resolver(
CreateResolverOptions { CreateResolverOptions {
pkg_json_dep_resolution, pkg_json_dep_resolution,
specified_import_map, specified_import_map,
}, },
move |specifier| { |path| Ok(std::fs::read_to_string(path)?),
let specifier = specifier.clone();
let file_fetcher = file_fetcher.clone().unwrap();
async move {
let file = file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all())
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
) )
.await
.inspect_err(|err| { .inspect_err(|err| {
lsp_warn!( lsp_warn!(
" Failed to load resolver: {}", " Failed to load resolver: {}",
@ -1520,10 +1566,6 @@ impl ConfigData {
); );
}) })
.ok() .ok()
}
})
.await
.unwrap()
.unwrap_or_else(|| { .unwrap_or_else(|| {
// create a dummy resolver // create a dummy resolver
WorkspaceResolver::new_raw( WorkspaceResolver::new_raw(
@ -1549,9 +1591,11 @@ impl ConfigData {
.is_ok() .is_ok()
|| member_dir.workspace.has_unstable("sloppy-imports"); || member_dir.workspace.has_unstable("sloppy-imports");
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
Arc::new(SloppyImportsResolver::new_without_stat_cache(Arc::new( Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
deno_runtime::deno_fs::RealFs, deno_runtime::deno_fs::RealFs,
))) )),
))
}); });
let resolver = Arc::new(resolver); let resolver = Arc::new(resolver);
let lint_rule_provider = LintRuleProvider::new( let lint_rule_provider = LintRuleProvider::new(
@ -1677,23 +1721,28 @@ impl ConfigTree {
.unwrap_or_else(|| Arc::new(FmtConfig::new_with_base(PathBuf::from("/")))) .unwrap_or_else(|| Arc::new(FmtConfig::new_with_base(PathBuf::from("/"))))
} }
/// Returns (scope_uri, type). /// Returns (scope_url, type).
pub fn watched_file_type( pub fn watched_file_type(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> { ) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> {
for (scope_uri, data) in self.scopes.iter() { for (scope_url, data) in self.scopes.iter() {
if let Some(typ) = data.watched_files.get(specifier) { if let Some(typ) = data.watched_files.get(specifier) {
return Some((scope_uri, *typ)); return Some((scope_url, *typ));
} }
} }
None None
} }
pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool { pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.path().ends_with("/deno.json") let path = specifier.path();
|| specifier.path().ends_with("/deno.jsonc") if path.ends_with("/deno.json")
|| specifier.path().ends_with("/package.json") || path.ends_with("/deno.jsonc")
|| path.ends_with("/package.json")
|| path.ends_with("/node_modules/.package-lock.json")
|| path.ends_with("/node_modules/.yarn-integrity.json")
|| path.ends_with("/node_modules/.modules.yaml")
|| path.ends_with("/node_modules/.deno/.setup-cache.bin")
{ {
return true; return true;
} }
@ -1703,6 +1752,46 @@ impl ConfigTree {
.any(|data| data.watched_files.contains_key(specifier)) .any(|data| data.watched_files.contains_key(specifier))
} }
pub fn to_did_refresh_params(
&self,
) -> lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams {
let data = self
.scopes
.values()
.filter_map(|data| {
let workspace_root_scope_uri =
Some(data.member_dir.workspace.root_dir())
.filter(|s| *s != data.member_dir.dir_url())
.and_then(|s| url_to_uri(s).ok());
Some(lsp_custom::DenoConfigurationData {
scope_uri: url_to_uri(&data.scope).ok()?,
deno_json: data.maybe_deno_json().and_then(|c| {
if workspace_root_scope_uri.is_some()
&& Some(&c.specifier)
== data
.member_dir
.workspace
.root_deno_json()
.map(|c| &c.specifier)
{
return None;
}
Some(lsp::TextDocumentIdentifier {
uri: url_to_uri(&c.specifier).ok()?,
})
}),
package_json: data.maybe_pkg_json().and_then(|p| {
Some(lsp::TextDocumentIdentifier {
uri: url_to_uri(&p.specifier()).ok()?,
})
}),
workspace_root_scope_uri,
})
})
.collect();
lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { data }
}
pub async fn refresh( pub async fn refresh(
&mut self, &mut self,
settings: &Settings, settings: &Settings,
@ -1727,7 +1816,7 @@ impl ConfigTree {
let config_file_path = (|| { let config_file_path = (|| {
let config_setting = ws_settings.config.as_ref()?; let config_setting = ws_settings.config.as_ref()?;
let config_uri = folder_uri.join(config_setting).ok()?; let config_uri = folder_uri.join(config_setting).ok()?;
specifier_to_file_path(&config_uri).ok() url_to_file_path(&config_uri).ok()
})(); })();
if config_file_path.is_some() || ws_settings.import_map.is_some() { if config_file_path.is_some() || ws_settings.import_map.is_some() {
scopes.insert( scopes.insert(
@ -1804,7 +1893,7 @@ impl ConfigTree {
let scope = config_file.specifier.join(".").unwrap(); let scope = config_file.specifier.join(".").unwrap();
let json_text = serde_json::to_string(&config_file.json).unwrap(); let json_text = serde_json::to_string(&config_file.json).unwrap();
let test_fs = deno_runtime::deno_fs::InMemoryFs::default(); let test_fs = deno_runtime::deno_fs::InMemoryFs::default();
let config_path = specifier_to_file_path(&config_file.specifier).unwrap(); let config_path = url_to_file_path(&config_file.specifier).unwrap();
test_fs.setup_text_files(vec![( test_fs.setup_text_files(vec![(
config_path.to_string_lossy().to_string(), config_path.to_string_lossy().to_string(),
json_text, json_text,
@ -1862,13 +1951,17 @@ fn resolve_node_modules_dir(
// `nodeModulesDir: true` setting in the deno.json file. This is to // `nodeModulesDir: true` setting in the deno.json file. This is to
// reduce the chance of modifying someone's node_modules directory // reduce the chance of modifying someone's node_modules directory
// without them having asked us to do so. // without them having asked us to do so.
let explicitly_disabled = workspace.node_modules_dir() == Some(false); let node_modules_mode = workspace.node_modules_dir().ok().flatten();
let explicitly_disabled = node_modules_mode == Some(NodeModulesDirMode::None);
if explicitly_disabled { if explicitly_disabled {
return None; return None;
} }
let enabled = byonm let enabled = byonm
|| workspace.node_modules_dir() == Some(true) || node_modules_mode
.map(|m| m.uses_node_modules_dir())
.unwrap_or(false)
|| workspace.vendor_dir_path().is_some(); || workspace.vendor_dir_path().is_some();
if !enabled { if !enabled {
return None; return None;
} }
@ -1884,7 +1977,11 @@ fn resolve_lockfile_from_path(
lockfile_path: PathBuf, lockfile_path: PathBuf,
frozen: bool, frozen: bool,
) -> Option<CliLockfile> { ) -> Option<CliLockfile> {
match CliLockfile::read_from_path(lockfile_path, frozen) { match CliLockfile::read_from_path(CliLockfileReadFromPathOptions {
file_path: lockfile_path,
frozen,
skip_write: false,
}) {
Ok(value) => { Ok(value) => {
if value.filename.exists() { if value.filename.exists() {
if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename) if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename)
@ -2129,7 +2226,7 @@ mod tests {
}, },
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
javascript: LanguageWorkspaceSettings { javascript: LanguageWorkspaceSettings {
inlay_hints: InlayHintsSettings { inlay_hints: InlayHintsSettings {
parameter_names: InlayHintsParamNamesOptions { parameter_names: InlayHintsParamNamesOptions {
@ -2157,6 +2254,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,
@ -2202,6 +2300,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,

View file

@ -12,14 +12,15 @@ use super::language_server::StateSnapshot;
use super::performance::Performance; use super::performance::Performance;
use super::tsc; use super::tsc;
use super::tsc::TsServer; use super::tsc::TsServer;
use super::urls::LspClientUrl; use super::urls::uri_parse_unencoded;
use super::urls::url_to_uri;
use super::urls::LspUrlMap; use super::urls::LspUrlMap;
use crate::graph_util; use crate::graph_util;
use crate::graph_util::enhanced_resolution_error_message; use crate::graph_util::enhanced_resolution_error_message;
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams; use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
use crate::resolver::SloppyImportsResolution; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions; use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
@ -37,12 +38,14 @@ use deno_core::serde_json::json;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::unsync::JoinHandle; use deno_core::unsync::JoinHandle;
use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
@ -160,15 +163,14 @@ impl DiagnosticsPublisher {
.state .state
.update(&record.specifier, version, &all_specifier_diagnostics); .update(&record.specifier, version, &all_specifier_diagnostics);
let file_referrer = documents.get_file_referrer(&record.specifier); let file_referrer = documents.get_file_referrer(&record.specifier);
let Ok(uri) =
url_map.specifier_to_uri(&record.specifier, file_referrer.as_deref())
else {
continue;
};
self self
.client .client
.publish_diagnostics( .publish_diagnostics(uri, all_specifier_diagnostics, version)
url_map
.normalize_specifier(&record.specifier, file_referrer.as_deref())
.unwrap_or(LspClientUrl::new(record.specifier)),
all_specifier_diagnostics,
version,
)
.await; .await;
messages_sent += 1; messages_sent += 1;
} }
@ -191,15 +193,14 @@ impl DiagnosticsPublisher {
// clear out any diagnostics for this specifier // clear out any diagnostics for this specifier
self.state.update(specifier, removed_value.version, &[]); self.state.update(specifier, removed_value.version, &[]);
let file_referrer = documents.get_file_referrer(specifier); let file_referrer = documents.get_file_referrer(specifier);
let Ok(uri) =
url_map.specifier_to_uri(specifier, file_referrer.as_deref())
else {
continue;
};
self self
.client .client
.publish_diagnostics( .publish_diagnostics(uri, Vec::new(), removed_value.version)
url_map
.normalize_specifier(specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(specifier.clone())),
Vec::new(),
removed_value.version,
)
.await; .await;
messages_sent += 1; messages_sent += 1;
} }
@ -337,9 +338,9 @@ impl DiagnosticsState {
if diagnostic.code if diagnostic.code
== Some(lsp::NumberOrString::String("no-cache".to_string())) == Some(lsp::NumberOrString::String("no-cache".to_string()))
|| diagnostic.code || diagnostic.code
== Some(lsp::NumberOrString::String("no-cache-jsr".to_string())) == Some(lsp::NumberOrString::String("not-installed-jsr".to_string()))
|| diagnostic.code || diagnostic.code
== Some(lsp::NumberOrString::String("no-cache-npm".to_string())) == Some(lsp::NumberOrString::String("not-installed-npm".to_string()))
{ {
no_cache_diagnostics.push(diagnostic.clone()); no_cache_diagnostics.push(diagnostic.clone());
} }
@ -737,7 +738,7 @@ fn to_lsp_related_information(
if let (Some(file_name), Some(start), Some(end)) = if let (Some(file_name), Some(start), Some(end)) =
(&ri.file_name, &ri.start, &ri.end) (&ri.file_name, &ri.start, &ri.end)
{ {
let uri = lsp::Url::parse(file_name).unwrap(); let uri = uri_parse_unencoded(file_name).unwrap();
Some(lsp::DiagnosticRelatedInformation { Some(lsp::DiagnosticRelatedInformation {
location: lsp::Location { location: lsp::Location {
uri, uri,
@ -991,9 +992,9 @@ pub enum DenoDiagnostic {
/// A remote module was not found in the cache. /// A remote module was not found in the cache.
NoCache(ModuleSpecifier), NoCache(ModuleSpecifier),
/// A remote jsr package reference was not found in the cache. /// A remote jsr package reference was not found in the cache.
NoCacheJsr(PackageReq, ModuleSpecifier), NotInstalledJsr(PackageReq, ModuleSpecifier),
/// A remote npm package reference was not found in the cache. /// A remote npm package reference was not found in the cache.
NoCacheNpm(PackageReq, ModuleSpecifier), NotInstalledNpm(PackageReq, ModuleSpecifier),
/// A local module was not found on the local file system. /// A local module was not found on the local file system.
NoLocal(ModuleSpecifier), NoLocal(ModuleSpecifier),
/// The specifier resolved to a remote specifier that was redirected to /// The specifier resolved to a remote specifier that was redirected to
@ -1018,8 +1019,8 @@ impl DenoDiagnostic {
Self::InvalidAttributeType(_) => "invalid-attribute-type", Self::InvalidAttributeType(_) => "invalid-attribute-type",
Self::NoAttributeType => "no-attribute-type", Self::NoAttributeType => "no-attribute-type",
Self::NoCache(_) => "no-cache", Self::NoCache(_) => "no-cache",
Self::NoCacheJsr(_, _) => "no-cache-jsr", Self::NotInstalledJsr(_, _) => "not-installed-jsr",
Self::NoCacheNpm(_, _) => "no-cache-npm", Self::NotInstalledNpm(_, _) => "not-installed-npm",
Self::NoLocal(_) => "no-local", Self::NoLocal(_) => "no-local",
Self::Redirect { .. } => "redirect", Self::Redirect { .. } => "redirect",
Self::ResolutionError(err) => { Self::ResolutionError(err) => {
@ -1070,7 +1071,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!("\"{to}\""), new_text: format!("\"{to}\""),
range: diagnostic.range, range: diagnostic.range,
@ -1087,7 +1088,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: " with { type: \"json\" }".to_string(), new_text: " with { type: \"json\" }".to_string(),
range: lsp::Range { range: lsp::Range {
@ -1100,17 +1101,22 @@ impl DenoDiagnostic {
}), }),
..Default::default() ..Default::default()
}, },
"no-cache" | "no-cache-jsr" | "no-cache-npm" => { "no-cache" | "not-installed-jsr" | "not-installed-npm" => {
let data = diagnostic let data = diagnostic
.data .data
.clone() .clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?; .ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataSpecifier = serde_json::from_value(data)?; let data: DiagnosticDataSpecifier = serde_json::from_value(data)?;
let title = if matches!(
code.as_str(),
"not-installed-jsr" | "not-installed-npm"
) {
format!("Install \"{}\" and its dependencies.", data.specifier)
} else {
format!("Cache \"{}\" and its dependencies.", data.specifier)
};
lsp::CodeAction { lsp::CodeAction {
title: format!( title,
"Cache \"{}\" and its dependencies.",
data.specifier
),
kind: Some(lsp::CodeActionKind::QUICKFIX), kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
command: Some(lsp::Command { command: Some(lsp::Command {
@ -1133,7 +1139,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!( new_text: format!(
"\"{}\"", "\"{}\"",
@ -1159,7 +1165,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!( new_text: format!(
"\"{}\"", "\"{}\"",
@ -1185,7 +1191,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!("\"node:{}\"", data.specifier), new_text: format!("\"node:{}\"", data.specifier),
range: diagnostic.range, range: diagnostic.range,
@ -1216,8 +1222,8 @@ impl DenoDiagnostic {
match code.as_str() { match code.as_str() {
"import-map-remap" "import-map-remap"
| "no-cache" | "no-cache"
| "no-cache-jsr" | "not-installed-jsr"
| "no-cache-npm" | "not-installed-npm"
| "no-attribute-type" | "no-attribute-type"
| "redirect" | "redirect"
| "import-node-prefix-missing" => true, | "import-node-prefix-missing" => true,
@ -1255,10 +1261,12 @@ impl DenoDiagnostic {
Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None), Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None),
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None), Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))), Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => { Self::NoLocal(specifier) => {
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution); let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
).resolve(specifier, SloppyImportsResolutionMode::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| { let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({ json!({
"specifier": specifier, "specifier": specifier,
@ -1303,10 +1311,7 @@ impl DenoDiagnostic {
} }
} }
fn specifier_text_for_redirected( fn specifier_text_for_redirected(redirect: &Url, referrer: &Url) -> String {
redirect: &lsp::Url,
referrer: &lsp::Url,
) -> String {
if redirect.scheme() == "file" && referrer.scheme() == "file" { if redirect.scheme() == "file" && referrer.scheme() == "file" {
// use a relative specifier when it's going to a file url // use a relative specifier when it's going to a file url
relative_specifier(redirect, referrer) relative_specifier(redirect, referrer)
@ -1315,7 +1320,7 @@ fn specifier_text_for_redirected(
} }
} }
fn relative_specifier(specifier: &lsp::Url, referrer: &lsp::Url) -> String { fn relative_specifier(specifier: &Url, referrer: &Url) -> String {
match referrer.make_relative(specifier) { match referrer.make_relative(specifier) {
Some(relative) => { Some(relative) => {
if relative.starts_with('.') { if relative.starts_with('.') {
@ -1410,7 +1415,8 @@ fn diagnose_resolution(
JsrPackageReqReference::from_specifier(specifier) JsrPackageReqReference::from_specifier(specifier)
{ {
let req = pkg_ref.into_inner().req; let req = pkg_ref.into_inner().req;
diagnostics.push(DenoDiagnostic::NoCacheJsr(req, specifier.clone())); diagnostics
.push(DenoDiagnostic::NotInstalledJsr(req, specifier.clone()));
} else if let Ok(pkg_ref) = } else if let Ok(pkg_ref) =
NpmPackageReqReference::from_specifier(specifier) NpmPackageReqReference::from_specifier(specifier)
{ {
@ -1419,7 +1425,7 @@ fn diagnose_resolution(
let req = pkg_ref.into_inner().req; let req = pkg_ref.into_inner().req;
if !npm_resolver.is_pkg_req_folder_cached(&req) { if !npm_resolver.is_pkg_req_folder_cached(&req) {
diagnostics diagnostics
.push(DenoDiagnostic::NoCacheNpm(req, specifier.clone())); .push(DenoDiagnostic::NotInstalledNpm(req, specifier.clone()));
} }
} }
} else if let Some(module_name) = specifier.as_str().strip_prefix("node:") } else if let Some(module_name) = specifier.as_str().strip_prefix("node:")
@ -1445,7 +1451,7 @@ fn diagnose_resolution(
// check that a @types/node package exists in the resolver // check that a @types/node package exists in the resolver
let types_node_req = PackageReq::from_str("@types/node").unwrap(); let types_node_req = PackageReq::from_str("@types/node").unwrap();
if !npm_resolver.is_pkg_req_folder_cached(&types_node_req) { if !npm_resolver.is_pkg_req_folder_cached(&types_node_req) {
diagnostics.push(DenoDiagnostic::NoCacheNpm( diagnostics.push(DenoDiagnostic::NotInstalledNpm(
types_node_req, types_node_req,
ModuleSpecifier::parse("npm:@types/node").unwrap(), ModuleSpecifier::parse("npm:@types/node").unwrap(),
)); ));
@ -1493,7 +1499,11 @@ fn diagnose_dependency(
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
.and_then(|d| d.resolver.maybe_import_map()); .and_then(|d| d.resolver.maybe_import_map());
if let Some(import_map) = import_map { if let Some(import_map) = import_map {
if let Resolution::Ok(resolved) = &dependency.maybe_code { let resolved = dependency
.maybe_code
.ok()
.or_else(|| dependency.maybe_type.ok());
if let Some(resolved) = resolved {
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
if dependency_key != to { if dependency_key != to {
diagnostics.push( diagnostics.push(
@ -1511,17 +1521,19 @@ fn diagnose_dependency(
let import_ranges: Vec<_> = dependency let import_ranges: Vec<_> = dependency
.imports .imports
.iter() .iter()
.map(|i| documents::to_lsp_range(&i.range)) .map(|i| documents::to_lsp_range(&i.specifier_range))
.collect(); .collect();
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has // TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
// a different specifier and therefore needs a separate call to // a different specifier and therefore needs a separate call to
// `diagnose_resolution()`. It would be much cleaner if that were modelled as // `diagnose_resolution()`. It would be much cleaner if that were modelled as
// a separate dependency: https://github.com/denoland/deno_graph/issues/247. // a separate dependency: https://github.com/denoland/deno_graph/issues/247.
let is_types_deno_types = !dependency.maybe_type.is_none() let is_types_deno_types = !dependency.maybe_type.is_none()
&& !dependency && !dependency.imports.iter().any(|i| {
.imports dependency
.iter() .maybe_type
.any(|i| dependency.maybe_type.includes(&i.range.start).is_some()); .includes(&i.specifier_range.start)
.is_some()
});
diagnostics.extend( diagnostics.extend(
diagnose_resolution( diagnose_resolution(
@ -1634,7 +1646,8 @@ mod tests {
use test_util::TempDir; use test_util::TempDir;
fn mock_config() -> Config { fn mock_config() -> Config {
let root_uri = resolve_url("file:///").unwrap(); let root_url = resolve_url("file:///").unwrap();
let root_uri = url_to_uri(&root_url).unwrap();
Config { Config {
settings: Arc::new(Settings { settings: Arc::new(Settings {
unscoped: Arc::new(WorkspaceSettings { unscoped: Arc::new(WorkspaceSettings {
@ -1645,7 +1658,7 @@ mod tests {
..Default::default() ..Default::default()
}), }),
workspace_folders: Arc::new(vec![( workspace_folders: Arc::new(vec![(
root_uri.clone(), root_url,
lsp::WorkspaceFolder { lsp::WorkspaceFolder {
uri: root_uri, uri: root_uri,
name: "".to_string(), name: "".to_string(),
@ -1660,7 +1673,7 @@ mod tests {
maybe_import_map: Option<(&str, &str)>, maybe_import_map: Option<(&str, &str)>,
) -> (TempDir, StateSnapshot) { ) -> (TempDir, StateSnapshot) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let root_uri = temp_dir.uri(); let root_uri = temp_dir.url();
let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap())); let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap()));
let mut config = Config::new_with_roots([root_uri.clone()]); let mut config = Config::new_with_roots([root_uri.clone()]);
if let Some((relative_path, json_string)) = maybe_import_map { if let Some((relative_path, json_string)) = maybe_import_map {
@ -1827,7 +1840,7 @@ let c: number = "a";
assert_eq!(actual.len(), 2); assert_eq!(actual.len(), 2);
for record in actual { for record in actual {
let relative_specifier = let relative_specifier =
temp_dir.uri().make_relative(&record.specifier).unwrap(); temp_dir.url().make_relative(&record.specifier).unwrap();
match relative_specifier.as_str() { match relative_specifier.as_str() {
"std/assert/mod.ts" => { "std/assert/mod.ts" => {
assert_eq!(json!(record.versioned.diagnostics), json!([])) assert_eq!(json!(record.versioned.diagnostics), json!([]))
@ -2046,7 +2059,7 @@ let c: number = "a";
"source": "deno", "source": "deno",
"message": format!( "message": format!(
"Unable to load a local module: {}🦕.ts\nPlease check the file path.", "Unable to load a local module: {}🦕.ts\nPlease check the file path.",
temp_dir.uri(), temp_dir.url(),
), ),
} }
]) ])

View file

@ -2,7 +2,6 @@
use super::cache::calculate_fs_version; use super::cache::calculate_fs_version;
use super::cache::LspCache; use super::cache::LspCache;
use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY;
use super::config::Config; use super::config::Config;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::testing::TestCollector; use super::testing::TestCollector;
@ -12,7 +11,6 @@ use super::tsc;
use super::tsc::AssetDocument; use super::tsc::AssetDocument;
use crate::graph_util::CliJsrUrlProvider; use crate::graph_util::CliJsrUrlProvider;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::swc::visit::VisitWith; use deno_ast::swc::visit::VisitWith;
@ -28,6 +26,7 @@ use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
@ -61,6 +60,9 @@ pub enum LanguageId {
Json, Json,
JsonC, JsonC,
Markdown, Markdown,
Html,
Css,
Yaml,
Unknown, Unknown,
} }
@ -74,6 +76,9 @@ impl LanguageId {
LanguageId::Json => Some("json"), LanguageId::Json => Some("json"),
LanguageId::JsonC => Some("jsonc"), LanguageId::JsonC => Some("jsonc"),
LanguageId::Markdown => Some("md"), LanguageId::Markdown => Some("md"),
LanguageId::Html => Some("html"),
LanguageId::Css => Some("css"),
LanguageId::Yaml => Some("yaml"),
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -86,6 +91,9 @@ impl LanguageId {
LanguageId::Tsx => Some("text/tsx"), LanguageId::Tsx => Some("text/tsx"),
LanguageId::Json | LanguageId::JsonC => Some("application/json"), LanguageId::Json | LanguageId::JsonC => Some("application/json"),
LanguageId::Markdown => Some("text/markdown"), LanguageId::Markdown => Some("text/markdown"),
LanguageId::Html => Some("text/html"),
LanguageId::Css => Some("text/css"),
LanguageId::Yaml => Some("application/yaml"),
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -110,6 +118,9 @@ impl FromStr for LanguageId {
"json" => Ok(Self::Json), "json" => Ok(Self::Json),
"jsonc" => Ok(Self::JsonC), "jsonc" => Ok(Self::JsonC),
"markdown" => Ok(Self::Markdown), "markdown" => Ok(Self::Markdown),
"html" => Ok(Self::Html),
"css" => Ok(Self::Css),
"yaml" => Ok(Self::Yaml),
_ => Ok(Self::Unknown), _ => Ok(Self::Unknown),
} }
} }
@ -838,7 +849,7 @@ impl FileSystemDocuments {
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<Arc<Document>> { ) -> Option<Arc<Document>> {
let doc = if specifier.scheme() == "file" { let doc = if specifier.scheme() == "file" {
let path = specifier_to_file_path(specifier).ok()?; let path = url_to_file_path(specifier).ok()?;
let bytes = fs::read(path).ok()?; let bytes = fs::read(path).ok()?;
let content = let content =
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?; deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?;
@ -872,22 +883,19 @@ impl FileSystemDocuments {
} else { } else {
let http_cache = cache.for_specifier(file_referrer); let http_cache = cache.for_specifier(file_referrer);
let cache_key = http_cache.cache_item_key(specifier).ok()?; let cache_key = http_cache.cache_item_key(specifier).ok()?;
let bytes = http_cache let cached_file = http_cache.get(&cache_key, None).ok()??;
.read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY)
.ok()??;
let specifier_headers = http_cache.read_headers(&cache_key).ok()??;
let (_, maybe_charset) = let (_, maybe_charset) =
deno_graph::source::resolve_media_type_and_charset_from_headers( deno_graph::source::resolve_media_type_and_charset_from_headers(
specifier, specifier,
Some(&specifier_headers), Some(&cached_file.metadata.headers),
); );
let content = deno_graph::source::decode_owned_source( let content = deno_graph::source::decode_owned_source(
specifier, specifier,
bytes, cached_file.content,
maybe_charset, maybe_charset,
) )
.ok()?; .ok()?;
let maybe_headers = Some(specifier_headers); let maybe_headers = Some(cached_file.metadata.headers);
Document::new( Document::new(
specifier.clone(), specifier.clone(),
content.into(), content.into(),
@ -1128,7 +1136,7 @@ impl Documents {
return true; return true;
} }
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
return specifier_to_file_path(&specifier) return url_to_file_path(&specifier)
.map(|p| p.is_file()) .map(|p| p.is_file())
.unwrap_or(false); .unwrap_or(false);
} }
@ -1243,7 +1251,7 @@ impl Documents {
/// tsc when type checking. /// tsc when type checking.
pub fn resolve( pub fn resolve(
&self, &self,
specifiers: &[String], raw_specifiers: &[String],
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Vec<Option<(ModuleSpecifier, MediaType)>> { ) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
@ -1254,16 +1262,16 @@ impl Documents {
.or(file_referrer); .or(file_referrer);
let dependencies = document.as_ref().map(|d| d.dependencies()); let dependencies = document.as_ref().map(|d| d.dependencies());
let mut results = Vec::new(); let mut results = Vec::new();
for specifier in specifiers { for raw_specifier in raw_specifiers {
if specifier.starts_with("asset:") { if raw_specifier.starts_with("asset:") {
if let Ok(specifier) = ModuleSpecifier::parse(specifier) { if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
let media_type = MediaType::from_specifier(&specifier); let media_type = MediaType::from_specifier(&specifier);
results.push(Some((specifier, media_type))); results.push(Some((specifier, media_type)));
} else { } else {
results.push(None); results.push(None);
} }
} else if let Some(dep) = } else if let Some(dep) =
dependencies.as_ref().and_then(|d| d.get(specifier)) dependencies.as_ref().and_then(|d| d.get(raw_specifier))
{ {
if let Some(specifier) = dep.maybe_type.maybe_specifier() { if let Some(specifier) = dep.maybe_type.maybe_specifier() {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
@ -1282,7 +1290,7 @@ impl Documents {
} }
} else if let Ok(specifier) = } else if let Ok(specifier) =
self.resolver.as_graph_resolver(file_referrer).resolve( self.resolver.as_graph_resolver(file_referrer).resolve(
specifier, raw_specifier,
&deno_graph::Range { &deno_graph::Range {
specifier: referrer.clone(), specifier: referrer.clone(),
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
@ -1317,7 +1325,7 @@ impl Documents {
let fs_docs = &self.file_system_docs; let fs_docs = &self.file_system_docs;
// Clean up non-existent documents. // Clean up non-existent documents.
fs_docs.docs.retain(|specifier, _| { fs_docs.docs.retain(|specifier, _| {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
// Remove non-file schemed docs (deps). They may not be dependencies // Remove non-file schemed docs (deps). They may not be dependencies
// anymore after updating resolvers. // anymore after updating resolvers.
return false; return false;
@ -1414,11 +1422,9 @@ impl Documents {
if let Some(lockfile) = config_data.lockfile.as_ref() { if let Some(lockfile) = config_data.lockfile.as_ref() {
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default(); let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for key in lockfile.content.packages.specifiers.keys() { for dep_req in lockfile.content.packages.specifiers.keys() {
if let Some(key) = key.strip_prefix("npm:") { if dep_req.kind == deno_semver::package::PackageKind::Npm {
if let Ok(req) = PackageReq::from_str(key) { reqs.insert(dep_req.req.clone());
reqs.insert(req);
}
} }
} }
} }
@ -1518,12 +1524,16 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
fn cache_module_info( fn cache_module_info(
&self, &self,
specifier: &deno_ast::ModuleSpecifier, specifier: &deno_ast::ModuleSpecifier,
media_type: MediaType,
source: &Arc<[u8]>, source: &Arc<[u8]>,
module_info: &deno_graph::ModuleInfo, module_info: &deno_graph::ModuleInfo,
) { ) {
self self.inner_loader.cache_module_info(
.inner_loader specifier,
.cache_module_info(specifier, source, module_info) media_type,
source,
module_info,
)
} }
} }
@ -1607,7 +1617,7 @@ mod tests {
async fn setup() -> (Documents, LspCache, TempDir) { async fn setup() -> (Documents, LspCache, TempDir) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
temp_dir.create_dir_all(".deno_dir"); temp_dir.create_dir_all(".deno_dir");
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let config = Config::default(); let config = Config::default();
let resolver = let resolver =
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
@ -1690,7 +1700,7 @@ console.log(b, "hello deno");
// but we'll guard against it anyway // but we'll guard against it anyway
let (mut documents, _, temp_dir) = setup().await; let (mut documents, _, temp_dir) = setup().await;
let file_path = temp_dir.path().join("file.ts"); let file_path = temp_dir.path().join("file.ts");
let file_specifier = temp_dir.uri().join("file.ts").unwrap(); let file_specifier = temp_dir.url().join("file.ts").unwrap();
file_path.write(""); file_path.write("");
// open the document // open the document
@ -1718,18 +1728,18 @@ console.log(b, "hello deno");
let (mut documents, cache, temp_dir) = setup().await; let (mut documents, cache, temp_dir) = setup().await;
let file1_path = temp_dir.path().join("file1.ts"); let file1_path = temp_dir.path().join("file1.ts");
let file1_specifier = temp_dir.uri().join("file1.ts").unwrap(); let file1_specifier = temp_dir.url().join("file1.ts").unwrap();
fs::write(&file1_path, "").unwrap(); fs::write(&file1_path, "").unwrap();
let file2_path = temp_dir.path().join("file2.ts"); let file2_path = temp_dir.path().join("file2.ts");
let file2_specifier = temp_dir.uri().join("file2.ts").unwrap(); let file2_specifier = temp_dir.url().join("file2.ts").unwrap();
fs::write(&file2_path, "").unwrap(); fs::write(&file2_path, "").unwrap();
let file3_path = temp_dir.path().join("file3.ts"); let file3_path = temp_dir.path().join("file3.ts");
let file3_specifier = temp_dir.uri().join("file3.ts").unwrap(); let file3_specifier = temp_dir.url().join("file3.ts").unwrap();
fs::write(&file3_path, "").unwrap(); fs::write(&file3_path, "").unwrap();
let mut config = Config::new_with_roots([temp_dir.uri()]); let mut config = Config::new_with_roots([temp_dir.url()]);
let workspace_settings = let workspace_settings =
serde_json::from_str(r#"{ "enable": true }"#).unwrap(); serde_json::from_str(r#"{ "enable": true }"#).unwrap();
config.set_workspace_settings(workspace_settings, vec![]); config.set_workspace_settings(workspace_settings, vec![]);

View file

@ -14,7 +14,6 @@ use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageInfoVersion; use deno_graph::packages::JsrPackageInfoVersion;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
@ -92,20 +91,23 @@ impl JsrCacheResolver {
} }
} }
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) { if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
for (req_url, nv_url) in &lockfile.lock().content.packages.specifiers { for (dep_req, version) in &lockfile.lock().content.packages.specifiers {
let Some(req) = req_url.strip_prefix("jsr:") else { let req = match dep_req.kind {
deno_semver::package::PackageKind::Jsr => &dep_req.req,
deno_semver::package::PackageKind::Npm => {
continue;
}
};
let Ok(version) = Version::parse_standard(version) else {
continue; continue;
}; };
let Some(nv) = nv_url.strip_prefix("jsr:") else { nv_by_req.insert(
continue; req.clone(),
}; Some(PackageNv {
let Ok(req) = PackageReq::from_str(req) else { name: req.name.clone(),
continue; version,
}; }),
let Ok(nv) = PackageNv::from_str(nv) else { );
continue;
};
nv_by_req.insert(req, Some(nv));
} }
} }
Self { Self {
@ -258,12 +260,9 @@ fn read_cached_url(
cache: &Arc<dyn HttpCache>, cache: &Arc<dyn HttpCache>,
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
cache cache
.read_file_bytes( .get(&cache.cache_item_key(url).ok()?, None)
&cache.cache_item_key(url).ok()?,
None,
deno_cache_dir::GlobalToLocalCopy::Disallow,
)
.ok()? .ok()?
.map(|f| f.content)
} }
#[derive(Debug)] #[derive(Debug)]
@ -311,7 +310,7 @@ impl PackageSearchApi for CliJsrSearchApi {
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher
.fetch(&search_url, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&search_url)
.await? .await?
.into_text_decoded() .into_text_decoded()
}) })

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use base64::Engine;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions; use deno_config::workspace::WorkspaceDiscoverOptions;
@ -12,9 +11,11 @@ use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use deno_core::url; use deno_core::url;
use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -31,6 +32,7 @@ use std::collections::VecDeque;
use std::env; use std::env;
use std::fmt::Write as _; use std::fmt::Write as _;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::unbounded_channel;
use tokio::sync::mpsc::UnboundedReceiver; use tokio::sync::mpsc::UnboundedReceiver;
@ -85,6 +87,8 @@ use super::tsc::ChangeKind;
use super::tsc::GetCompletionDetailsArgs; use super::tsc::GetCompletionDetailsArgs;
use super::tsc::TsServer; use super::tsc::TsServer;
use super::urls; use super::urls;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::create_default_npmrc; use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store; use crate::args::get_root_cert_store;
use crate::args::has_flag_env_var; use crate::args::has_flag_env_var;
@ -92,6 +96,7 @@ use crate::args::CaData;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::Flags; use crate::args::Flags;
use crate::args::InternalFlags;
use crate::args::UnstableFmtOptions; use crate::args::UnstableFmtOptions;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
@ -109,7 +114,6 @@ use crate::util::fs::remove_dir_all_if_exists;
use crate::util::path::is_importable_ext; use crate::util::path::is_importable_ext;
use crate::util::path::to_percent_decoded_str; use crate::util::path::to_percent_decoded_str;
use crate::util::sync::AsyncFlag; use crate::util::sync::AsyncFlag;
use deno_runtime::fs_util::specifier_to_file_path;
struct LspRootCertStoreProvider(RootCertStore); struct LspRootCertStoreProvider(RootCertStore);
@ -204,11 +208,11 @@ pub struct Inner {
module_registry: ModuleRegistry, module_registry: ModuleRegistry,
/// A lazily create "server" for handling test run requests. /// A lazily create "server" for handling test run requests.
maybe_testing_server: Option<testing::TestServer>, maybe_testing_server: Option<testing::TestServer>,
npm_search_api: CliNpmSearchApi, pub npm_search_api: CliNpmSearchApi,
project_version: usize, project_version: usize,
/// A collection of measurements which instrument that performance of the LSP. /// A collection of measurements which instrument that performance of the LSP.
performance: Arc<Performance>, performance: Arc<Performance>,
resolver: Arc<LspResolver>, pub resolver: Arc<LspResolver>,
task_queue: LanguageServerTaskQueue, task_queue: LanguageServerTaskQueue,
/// A memoized version of fixable diagnostic codes retrieved from TypeScript. /// A memoized version of fixable diagnostic codes retrieved from TypeScript.
ts_fixable_diagnostics: Vec<String>, ts_fixable_diagnostics: Vec<String>,
@ -237,7 +241,7 @@ impl LanguageServer {
} }
} }
/// Similar to `deno cache` on the command line, where modules will be cached /// Similar to `deno install --entrypoint` on the command line, where modules will be cached
/// in the Deno cache, including any of their dependencies. /// in the Deno cache, including any of their dependencies.
pub async fn cache( pub async fn cache(
&self, &self,
@ -271,10 +275,9 @@ impl LanguageServer {
factory.fs(), factory.fs(),
&roots, &roots,
graph_util::GraphValidOptions { graph_util::GraphValidOptions {
is_vendoring: false, kind: GraphKind::All,
follow_type_only: true,
check_js: false, check_js: false,
exit_lockfile_errors: false, exit_integrity_errors: false,
}, },
)?; )?;
@ -624,7 +627,7 @@ impl Inner {
let maybe_root_path = self let maybe_root_path = self
.config .config
.root_uri() .root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| url_to_file_path(uri).ok());
let root_cert_store = get_root_cert_store( let root_cert_store = get_root_cert_store(
maybe_root_path, maybe_root_path,
workspace_settings.certificate_stores.clone(), workspace_settings.certificate_stores.clone(),
@ -720,7 +723,9 @@ impl Inner {
.into_iter() .into_iter()
.map(|folder| { .map(|folder| {
( (
self.url_map.normalize_url(&folder.uri, LspUrlKind::Folder), self
.url_map
.uri_to_specifier(&folder.uri, LspUrlKind::Folder),
folder, folder,
) )
}) })
@ -728,14 +733,17 @@ impl Inner {
} }
// rootUri is deprecated by the LSP spec. If it's specified, merge it into // rootUri is deprecated by the LSP spec. If it's specified, merge it into
// workspace_folders. // workspace_folders.
#[allow(deprecated)]
if let Some(root_uri) = params.root_uri { if let Some(root_uri) = params.root_uri {
if !workspace_folders.iter().any(|(_, f)| f.uri == root_uri) { if !workspace_folders.iter().any(|(_, f)| f.uri == root_uri) {
let name = root_uri.path_segments().and_then(|s| s.last()); let root_url =
self.url_map.uri_to_specifier(&root_uri, LspUrlKind::Folder);
let name = root_url.path_segments().and_then(|s| s.last());
let name = name.unwrap_or_default().to_string(); let name = name.unwrap_or_default().to_string();
workspace_folders.insert( workspace_folders.insert(
0, 0,
( (
self.url_map.normalize_url(&root_uri, LspUrlKind::Folder), root_url,
WorkspaceFolder { WorkspaceFolder {
uri: root_uri, uri: root_uri,
name, name,
@ -795,7 +803,7 @@ impl Inner {
let mut roots = config let mut roots = config
.workspace_folders .workspace_folders
.iter() .iter()
.filter_map(|p| specifier_to_file_path(&p.0).ok()) .filter_map(|p| url_to_file_path(&p.0).ok())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
roots.sort(); roots.sort();
let roots = roots let roots = roots
@ -955,32 +963,43 @@ impl Inner {
.tree .tree
.refresh(&self.config.settings, &self.workspace_files, &file_fetcher) .refresh(&self.config.settings, &self.workspace_files, &file_fetcher)
.await; .await;
self
.client
.send_did_refresh_deno_configuration_tree_notification(
self.config.tree.to_did_refresh_params(),
);
for config_file in self.config.tree.config_files() { for config_file in self.config.tree.config_files() {
if let Ok((compiler_options, _)) = config_file.to_compiler_options() { (|| {
if let Some(compiler_options_obj) = compiler_options.as_object() { let compiler_options = config_file.to_compiler_options().ok()?.options;
if let Some(jsx_import_source) = let jsx_import_source = compiler_options.get("jsxImportSource")?;
compiler_options_obj.get("jsxImportSource") let jsx_import_source = jsx_import_source.as_str()?.to_string();
{
if let Some(jsx_import_source) = jsx_import_source.as_str() {
let specifiers = vec![Url::parse(&format!(
"data:application/typescript;base64,{}",
base64::engine::general_purpose::STANDARD
.encode(format!("import '{jsx_import_source}/jsx-runtime';"))
))
.unwrap()];
let referrer = config_file.specifier.clone(); let referrer = config_file.specifier.clone();
let specifier = format!("{jsx_import_source}/jsx-runtime");
self.task_queue.queue_task(Box::new(|ls: LanguageServer| { self.task_queue.queue_task(Box::new(|ls: LanguageServer| {
spawn(async move { spawn(async move {
if let Err(err) = ls.cache(specifiers, referrer, false).await let specifier = {
{ let inner = ls.inner.read().await;
let resolver = inner.resolver.as_graph_resolver(Some(&referrer));
let Ok(specifier) = resolver.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
deno_graph::source::ResolutionMode::Types,
) else {
return;
};
specifier
};
if let Err(err) = ls.cache(vec![specifier], referrer, false).await {
lsp_warn!("{:#}", err); lsp_warn!("{:#}", err);
} }
}); });
})); }));
} Some(())
} })();
}
}
} }
} }
@ -1012,7 +1031,10 @@ impl Inner {
async fn did_open(&mut self, params: DidOpenTextDocumentParams) { async fn did_open(&mut self, params: DidOpenTextDocumentParams) {
let mark = self.performance.mark_with_args("lsp.did_open", &params); let mark = self.performance.mark_with_args("lsp.did_open", &params);
if params.text_document.uri.scheme() == "deno" { let Some(scheme) = params.text_document.uri.scheme() else {
return;
};
if scheme.as_str() == "deno" {
// we can ignore virtual text documents opening, as they don't need to // we can ignore virtual text documents opening, as they don't need to
// be tracked in memory, as they are static assets that won't change // be tracked in memory, as they are static assets that won't change
// already managed by the language service // already managed by the language service
@ -1031,16 +1053,14 @@ impl Inner {
lsp_warn!( lsp_warn!(
"Unsupported language id \"{}\" received for document \"{}\".", "Unsupported language id \"{}\" received for document \"{}\".",
params.text_document.language_id, params.text_document.language_id,
params.text_document.uri params.text_document.uri.as_str()
); );
} }
let file_referrer = (self let file_referrer = Some(uri_to_url(&params.text_document.uri))
.documents .filter(|s| self.documents.is_valid_file_referrer(s));
.is_valid_file_referrer(&params.text_document.uri))
.then(|| params.text_document.uri.clone());
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
let document = self.documents.open( let document = self.documents.open(
specifier.clone(), specifier.clone(),
params.text_document.version, params.text_document.version,
@ -1062,7 +1082,7 @@ impl Inner {
let mark = self.performance.mark_with_args("lsp.did_change", &params); let mark = self.performance.mark_with_args("lsp.did_change", &params);
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
match self.documents.change( match self.documents.change(
&specifier, &specifier,
params.text_document.version, params.text_document.version,
@ -1099,7 +1119,7 @@ impl Inner {
let _mark = self.performance.measure_scope("lsp.did_save"); let _mark = self.performance.measure_scope("lsp.did_save");
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
self.documents.save(&specifier); self.documents.save(&specifier);
if !self if !self
.config .config
@ -1110,7 +1130,7 @@ impl Inner {
{ {
return; return;
} }
match specifier_to_file_path(&specifier) { match url_to_file_path(&specifier) {
Ok(path) if is_importable_ext(&path) => {} Ok(path) if is_importable_ext(&path) => {}
_ => return, _ => return,
} }
@ -1134,8 +1154,10 @@ impl Inner {
async fn did_close(&mut self, params: DidCloseTextDocumentParams) { async fn did_close(&mut self, params: DidCloseTextDocumentParams) {
let mark = self.performance.mark_with_args("lsp.did_close", &params); let mark = self.performance.mark_with_args("lsp.did_close", &params);
self.diagnostics_state.clear(&params.text_document.uri); let Some(scheme) = params.text_document.uri.scheme() else {
if params.text_document.uri.scheme() == "deno" { return;
};
if scheme.as_str() == "deno" {
// we can ignore virtual text documents closing, as they don't need to // we can ignore virtual text documents closing, as they don't need to
// be tracked in memory, as they are static assets that won't change // be tracked in memory, as they are static assets that won't change
// already managed by the language service // already managed by the language service
@ -1143,7 +1165,8 @@ impl Inner {
} }
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
self.diagnostics_state.clear(&specifier);
if self.is_diagnosable(&specifier) { if self.is_diagnosable(&specifier) {
self.refresh_npm_specifiers().await; self.refresh_npm_specifiers().await;
self.diagnostics_server.invalidate(&[specifier.clone()]); self.diagnostics_server.invalidate(&[specifier.clone()]);
@ -1196,7 +1219,7 @@ impl Inner {
let changes = params let changes = params
.changes .changes
.into_iter() .into_iter()
.map(|e| (self.url_map.normalize_url(&e.uri, LspUrlKind::File), e)) .map(|e| (self.url_map.uri_to_specifier(&e.uri, LspUrlKind::File), e))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if changes if changes
.iter() .iter()
@ -1215,7 +1238,7 @@ impl Inner {
_ => return None, _ => return None,
}; };
Some(lsp_custom::DenoConfigurationChangeEvent { Some(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: t.0.clone(), scope_uri: url_to_uri(t.0).ok()?,
file_uri: e.uri.clone(), file_uri: e.uri.clone(),
typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type( typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type(
e.typ, e.typ,
@ -1250,7 +1273,7 @@ impl Inner {
_ => return None, _ => return None,
}; };
Some(lsp_custom::DenoConfigurationChangeEvent { Some(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: t.0.clone(), scope_uri: url_to_uri(t.0).ok()?,
file_uri: e.uri.clone(), file_uri: e.uri.clone(),
typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type( typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type(
e.typ, e.typ,
@ -1276,7 +1299,7 @@ impl Inner {
) -> LspResult<Option<DocumentSymbolResponse>> { ) -> LspResult<Option<DocumentSymbolResponse>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -1316,13 +1339,11 @@ impl Inner {
&self, &self,
params: DocumentFormattingParams, params: DocumentFormattingParams,
) -> LspResult<Option<Vec<TextEdit>>> { ) -> LspResult<Option<Vec<TextEdit>>> {
let file_referrer = (self let file_referrer = Some(uri_to_url(&params.text_document.uri))
.documents .filter(|s| self.documents.is_valid_file_referrer(s));
.is_valid_file_referrer(&params.text_document.uri))
.then(|| params.text_document.uri.clone());
let mut specifier = self let mut specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
// skip formatting any files ignored by the config file // skip formatting any files ignored by the config file
if !self if !self
.config .config
@ -1343,11 +1364,11 @@ impl Inner {
// counterparts, but for formatting we want to favour the file URL. // counterparts, but for formatting we want to favour the file URL.
// TODO(nayeemrmn): Implement `Document::file_resource_path()` or similar. // TODO(nayeemrmn): Implement `Document::file_resource_path()` or similar.
if specifier.scheme() != "file" if specifier.scheme() != "file"
&& params.text_document.uri.scheme() == "file" && params.text_document.uri.scheme().map(|s| s.as_str()) == Some("file")
{ {
specifier = params.text_document.uri.clone(); specifier = uri_to_url(&params.text_document.uri);
} }
let file_path = specifier_to_file_path(&specifier).map_err(|err| { let file_path = url_to_file_path(&specifier).map_err(|err| {
error!("{:#}", err); error!("{:#}", err);
LspError::invalid_request() LspError::invalid_request()
})?; })?;
@ -1369,18 +1390,9 @@ impl Inner {
.data_for_specifier(&specifier) .data_for_specifier(&specifier)
.map(|d| &d.member_dir.workspace); .map(|d| &d.member_dir.workspace);
let unstable_options = UnstableFmtOptions { let unstable_options = UnstableFmtOptions {
css: maybe_workspace
.map(|w| w.has_unstable("fmt-css"))
.unwrap_or(false),
html: maybe_workspace
.map(|w| w.has_unstable("fmt-html"))
.unwrap_or(false),
component: maybe_workspace component: maybe_workspace
.map(|w| w.has_unstable("fmt-component")) .map(|w| w.has_unstable("fmt-component"))
.unwrap_or(false), .unwrap_or(false),
yaml: maybe_workspace
.map(|w| w.has_unstable("fmt-yaml"))
.unwrap_or(false),
}; };
let document = document.clone(); let document = document.clone();
move || { move || {
@ -1404,6 +1416,7 @@ impl Inner {
document.content(), document.content(),
&fmt_options, &fmt_options,
&unstable_options, &unstable_options,
None,
) )
} }
}; };
@ -1437,7 +1450,7 @@ impl Inner {
} }
async fn hover(&self, params: HoverParams) -> LspResult<Option<Hover>> { async fn hover(&self, params: HoverParams) -> LspResult<Option<Hover>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -1570,7 +1583,7 @@ impl Inner {
) -> LspResult<Option<CodeActionResponse>> { ) -> LspResult<Option<CodeActionResponse>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -1605,8 +1618,8 @@ impl Inner {
None => false, None => false,
}) })
.collect(); .collect();
if !fixable_diagnostics.is_empty() {
let mut code_actions = CodeActionCollection::default(); let mut code_actions = CodeActionCollection::default();
if !fixable_diagnostics.is_empty() {
let file_diagnostics = self let file_diagnostics = self
.diagnostics_server .diagnostics_server
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version()); .get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
@ -1667,9 +1680,9 @@ impl Inner {
if diagnostic.code if diagnostic.code
== Some(NumberOrString::String("no-cache".to_string())) == Some(NumberOrString::String("no-cache".to_string()))
|| diagnostic.code || diagnostic.code
== Some(NumberOrString::String("no-cache-jsr".to_string())) == Some(NumberOrString::String("not-installed-jsr".to_string()))
|| diagnostic.code || diagnostic.code
== Some(NumberOrString::String("no-cache-npm".to_string())) == Some(NumberOrString::String("not-installed-npm".to_string()))
{ {
includes_no_cache = true; includes_no_cache = true;
} }
@ -1714,9 +1727,14 @@ impl Inner {
.add_cache_all_action(&specifier, no_cache_diagnostics.to_owned()); .add_cache_all_action(&specifier, no_cache_diagnostics.to_owned());
} }
} }
}
if let Some(document) = asset_or_doc.document() {
code_actions
.add_source_actions(document, &params.range, self)
.await;
}
code_actions.set_preferred_fixes(); code_actions.set_preferred_fixes();
all_actions.extend(code_actions.get_response()); all_actions.extend(code_actions.get_response());
}
// Refactor // Refactor
let only = params let only = params
@ -1905,6 +1923,7 @@ impl Inner {
// as the import map is an implementation detail // as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()), .and_then(|d| d.resolver.maybe_import_map()),
self.resolver.as_ref(), self.resolver.as_ref(),
file_referrer,
) )
} }
@ -1914,7 +1933,7 @@ impl Inner {
) -> LspResult<Option<Vec<CodeLens>>> { ) -> LspResult<Option<Vec<CodeLens>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -2000,7 +2019,7 @@ impl Inner {
&self, &self,
params: DocumentHighlightParams, params: DocumentHighlightParams,
) -> LspResult<Option<Vec<DocumentHighlight>>> { ) -> LspResult<Option<Vec<DocumentHighlight>>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2044,7 +2063,7 @@ impl Inner {
&self, &self,
params: ReferenceParams, params: ReferenceParams,
) -> LspResult<Option<Vec<Location>>> { ) -> LspResult<Option<Vec<Location>>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position.text_document.uri, &params.text_document_position.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2100,7 +2119,7 @@ impl Inner {
&self, &self,
params: GotoDefinitionParams, params: GotoDefinitionParams,
) -> LspResult<Option<GotoDefinitionResponse>> { ) -> LspResult<Option<GotoDefinitionResponse>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2139,7 +2158,7 @@ impl Inner {
&self, &self,
params: GotoTypeDefinitionParams, params: GotoTypeDefinitionParams,
) -> LspResult<Option<GotoTypeDefinitionResponse>> { ) -> LspResult<Option<GotoTypeDefinitionResponse>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2185,7 +2204,7 @@ impl Inner {
&self, &self,
params: CompletionParams, params: CompletionParams,
) -> LspResult<Option<CompletionResponse>> { ) -> LspResult<Option<CompletionResponse>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position.text_document.uri, &params.text_document_position.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2374,7 +2393,7 @@ impl Inner {
&self, &self,
params: GotoImplementationParams, params: GotoImplementationParams,
) -> LspResult<Option<GotoImplementationResponse>> { ) -> LspResult<Option<GotoImplementationResponse>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2425,7 +2444,7 @@ impl Inner {
) -> LspResult<Option<Vec<FoldingRange>>> { ) -> LspResult<Option<Vec<FoldingRange>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -2472,7 +2491,7 @@ impl Inner {
) -> LspResult<Option<Vec<CallHierarchyIncomingCall>>> { ) -> LspResult<Option<Vec<CallHierarchyIncomingCall>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.item.uri, LspUrlKind::File); .uri_to_specifier(&params.item.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -2501,7 +2520,7 @@ impl Inner {
let maybe_root_path_owned = self let maybe_root_path_owned = self
.config .config
.root_uri() .root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new(); let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new();
for item in incoming_calls.iter() { for item in incoming_calls.iter() {
if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call( if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call(
@ -2521,7 +2540,7 @@ impl Inner {
) -> LspResult<Option<Vec<CallHierarchyOutgoingCall>>> { ) -> LspResult<Option<Vec<CallHierarchyOutgoingCall>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.item.uri, LspUrlKind::File); .uri_to_specifier(&params.item.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -2547,7 +2566,7 @@ impl Inner {
let maybe_root_path_owned = self let maybe_root_path_owned = self
.config .config
.root_uri() .root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new(); let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new();
for item in outgoing_calls.iter() { for item in outgoing_calls.iter() {
if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call( if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call(
@ -2566,7 +2585,7 @@ impl Inner {
&self, &self,
params: CallHierarchyPrepareParams, params: CallHierarchyPrepareParams,
) -> LspResult<Option<Vec<CallHierarchyItem>>> { ) -> LspResult<Option<Vec<CallHierarchyItem>>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2596,7 +2615,7 @@ impl Inner {
let maybe_root_path_owned = self let maybe_root_path_owned = self
.config .config
.root_uri() .root_uri()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| url_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyItem>::new(); let mut resolved_items = Vec::<CallHierarchyItem>::new();
match one_or_many { match one_or_many {
tsc::OneOrMany::One(item) => { tsc::OneOrMany::One(item) => {
@ -2630,7 +2649,7 @@ impl Inner {
&self, &self,
params: RenameParams, params: RenameParams,
) -> LspResult<Option<WorkspaceEdit>> { ) -> LspResult<Option<WorkspaceEdit>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position.text_document.uri, &params.text_document_position.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2679,7 +2698,7 @@ impl Inner {
) -> LspResult<Option<Vec<SelectionRange>>> { ) -> LspResult<Option<Vec<SelectionRange>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
{ {
@ -2717,7 +2736,7 @@ impl Inner {
) -> LspResult<Option<SemanticTokensResult>> { ) -> LspResult<Option<SemanticTokensResult>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) { if !self.is_diagnosable(&specifier) {
return Ok(None); return Ok(None);
} }
@ -2770,7 +2789,7 @@ impl Inner {
) -> LspResult<Option<SemanticTokensRangeResult>> { ) -> LspResult<Option<SemanticTokensRangeResult>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) { if !self.is_diagnosable(&specifier) {
return Ok(None); return Ok(None);
} }
@ -2819,7 +2838,7 @@ impl Inner {
&self, &self,
params: SignatureHelpParams, params: SignatureHelpParams,
) -> LspResult<Option<SignatureHelp>> { ) -> LspResult<Option<SignatureHelp>> {
let specifier = self.url_map.normalize_url( let specifier = self.url_map.uri_to_specifier(
&params.text_document_position_params.text_document.uri, &params.text_document_position_params.text_document.uri,
LspUrlKind::File, LspUrlKind::File,
); );
@ -2873,8 +2892,8 @@ impl Inner {
) -> LspResult<Option<WorkspaceEdit>> { ) -> LspResult<Option<WorkspaceEdit>> {
let mut changes = vec![]; let mut changes = vec![];
for rename in params.files { for rename in params.files {
let old_specifier = self.url_map.normalize_url( let old_specifier = self.url_map.uri_to_specifier(
&resolve_url(&rename.old_uri).unwrap(), &Uri::from_str(&rename.old_uri).unwrap(),
LspUrlKind::File, LspUrlKind::File,
); );
let options = self let options = self
@ -2899,8 +2918,8 @@ impl Inner {
.get_edits_for_file_rename( .get_edits_for_file_rename(
self.snapshot(), self.snapshot(),
old_specifier, old_specifier,
self.url_map.normalize_url( self.url_map.uri_to_specifier(
&resolve_url(&rename.new_uri).unwrap(), &Uri::from_str(&rename.new_uri).unwrap(),
LspUrlKind::File, LspUrlKind::File,
), ),
format_code_settings, format_code_settings,
@ -3498,24 +3517,31 @@ impl Inner {
} }
let mut config_events = vec![]; let mut config_events = vec![];
for (scope_uri, config_data) in self.config.tree.data_by_scope().iter() { for (scope_url, config_data) in self.config.tree.data_by_scope().iter() {
let Ok(scope_uri) = url_to_uri(scope_url) else {
continue;
};
if let Some(config_file) = config_data.maybe_deno_json() { if let Some(config_file) = config_data.maybe_deno_json() {
if let Ok(file_uri) = url_to_uri(&config_file.specifier) {
config_events.push(lsp_custom::DenoConfigurationChangeEvent { config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(), scope_uri: scope_uri.clone(),
file_uri: config_file.specifier.clone(), file_uri,
typ: lsp_custom::DenoConfigurationChangeType::Added, typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::DenoJson, configuration_type: lsp_custom::DenoConfigurationType::DenoJson,
}); });
} }
}
if let Some(package_json) = config_data.maybe_pkg_json() { if let Some(package_json) = config_data.maybe_pkg_json() {
if let Ok(file_uri) = url_to_uri(&package_json.specifier()) {
config_events.push(lsp_custom::DenoConfigurationChangeEvent { config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(), scope_uri,
file_uri: package_json.specifier(), file_uri,
typ: lsp_custom::DenoConfigurationChangeType::Added, typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::PackageJson, configuration_type: lsp_custom::DenoConfigurationType::PackageJson,
}); });
} }
} }
}
if !config_events.is_empty() { if !config_events.is_empty() {
self.client.send_did_change_deno_configuration_notification( self.client.send_did_change_deno_configuration_notification(
lsp_custom::DidChangeDenoConfigurationNotificationParams { lsp_custom::DidChangeDenoConfigurationNotificationParams {
@ -3533,19 +3559,22 @@ impl Inner {
force_global_cache: bool, force_global_cache: bool,
) -> Result<PrepareCacheResult, AnyError> { ) -> Result<PrepareCacheResult, AnyError> {
let config_data = self.config.tree.data_for_specifier(&referrer); let config_data = self.config.tree.data_for_specifier(&referrer);
let byonm = config_data.map(|d| d.byonm).unwrap_or(false);
let mut roots = if !specifiers.is_empty() { let mut roots = if !specifiers.is_empty() {
specifiers specifiers
} else { } else {
vec![referrer.clone()] vec![referrer.clone()]
}; };
// always include the npm packages since resolution of one npm package if byonm {
// might affect the resolution of other npm packages roots.retain(|s| s.scheme() != "npm");
if let Some(npm_reqs) = self } else if let Some(npm_reqs) = self
.documents .documents
.npm_reqs_by_scope() .npm_reqs_by_scope()
.get(&config_data.map(|d| d.scope.as_ref().clone())) .get(&config_data.map(|d| d.scope.as_ref().clone()))
{ {
// always include the npm packages since resolution of one npm package
// might affect the resolution of other npm packages
roots.extend( roots.extend(
npm_reqs npm_reqs
.iter() .iter()
@ -3583,7 +3612,10 @@ impl Inner {
}; };
let cli_options = CliOptions::new( let cli_options = CliOptions::new(
Arc::new(Flags { Arc::new(Flags {
internal: InternalFlags {
cache_path: Some(self.cache.deno_dir().root.clone()), cache_path: Some(self.cache.deno_dir().root.clone()),
..Default::default()
},
ca_stores: workspace_settings.certificate_stores.clone(), ca_stores: workspace_settings.certificate_stores.clone(),
ca_data: workspace_settings.tls_certificate.clone().map(CaData::File), ca_data: workspace_settings.tls_certificate.clone().map(CaData::File),
unsafely_ignore_certificate_errors: workspace_settings unsafely_ignore_certificate_errors: workspace_settings
@ -3594,13 +3626,13 @@ impl Inner {
.as_ref() .as_ref()
.map(|url| url.to_string()) .map(|url| url.to_string())
}), }),
node_modules_dir: Some(
config_data
.and_then(|d| d.node_modules_dir.as_ref())
.is_some(),
),
// bit of a hack to force the lsp to cache the @types/node package // bit of a hack to force the lsp to cache the @types/node package
type_check_mode: crate::args::TypeCheckMode::Local, type_check_mode: crate::args::TypeCheckMode::Local,
permissions: crate::args::PermissionFlags {
// allow remote import permissions in the lsp for now
allow_import: Some(vec![]),
..Default::default()
},
..Default::default() ..Default::default()
}), }),
initial_cwd, initial_cwd,
@ -3640,7 +3672,9 @@ impl Inner {
.into_iter() .into_iter()
.map(|folder| { .map(|folder| {
( (
self.url_map.normalize_url(&folder.uri, LspUrlKind::Folder), self
.url_map
.uri_to_specifier(&folder.uri, LspUrlKind::Folder),
folder, folder,
) )
}) })
@ -3716,7 +3750,8 @@ impl Inner {
result.push(TaskDefinition { result.push(TaskDefinition {
name: name.clone(), name: name.clone(),
command: command.to_string(), command: command.to_string(),
source_uri: config_file.specifier.clone(), source_uri: url_to_uri(&config_file.specifier)
.map_err(|_| LspError::internal_error())?,
}); });
} }
}; };
@ -3727,7 +3762,8 @@ impl Inner {
result.push(TaskDefinition { result.push(TaskDefinition {
name: name.clone(), name: name.clone(),
command: command.clone(), command: command.clone(),
source_uri: package_json.specifier(), source_uri: url_to_uri(&package_json.specifier())
.map_err(|_| LspError::internal_error())?,
}); });
} }
} }
@ -3742,7 +3778,7 @@ impl Inner {
) -> LspResult<Option<Vec<InlayHint>>> { ) -> LspResult<Option<Vec<InlayHint>>> {
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
if !self.is_diagnosable(&specifier) if !self.is_diagnosable(&specifier)
|| !self.config.specifier_enabled(&specifier) || !self.config.specifier_enabled(&specifier)
|| !self.config.enabled_inlay_hints_for_specifier(&specifier) || !self.config.enabled_inlay_hints_for_specifier(&specifier)
@ -3776,7 +3812,7 @@ impl Inner {
let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { let maybe_inlay_hints = maybe_inlay_hints.map(|hints| {
hints hints
.iter() .iter()
.map(|hint| hint.to_lsp(line_index.clone())) .map(|hint| hint.to_lsp(line_index.clone(), self))
.collect() .collect()
}); });
self.performance.measure(mark); self.performance.measure(mark);
@ -3805,7 +3841,7 @@ impl Inner {
.mark_with_args("lsp.virtual_text_document", &params); .mark_with_args("lsp.virtual_text_document", &params);
let specifier = self let specifier = self
.url_map .url_map
.normalize_url(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
let contents = if specifier.scheme() == "deno" let contents = if specifier.scheme() == "deno"
&& specifier.path() == "/status.md" && specifier.path() == "/status.md"
{ {
@ -3844,7 +3880,11 @@ impl Inner {
</details> </details>
"#, "#,
serde_json::to_string_pretty(&workspace_settings).unwrap(), serde_json::to_string_pretty(&workspace_settings)
.inspect_err(|e| {
dbg!(e);
})
.unwrap(),
documents_specifiers.len(), documents_specifiers.len(),
documents_specifiers documents_specifiers
.into_iter() .into_iter()
@ -3957,11 +3997,11 @@ mod tests {
temp_dir.write("root4_parent/root4/main.ts", ""); // yes, enabled temp_dir.write("root4_parent/root4/main.ts", ""); // yes, enabled
let mut config = Config::new_with_roots(vec![ let mut config = Config::new_with_roots(vec![
temp_dir.uri().join("root1/").unwrap(), temp_dir.url().join("root1/").unwrap(),
temp_dir.uri().join("root2/").unwrap(), temp_dir.url().join("root2/").unwrap(),
temp_dir.uri().join("root2/root2.1/").unwrap(), temp_dir.url().join("root2/root2.1/").unwrap(),
temp_dir.uri().join("root3/").unwrap(), temp_dir.url().join("root3/").unwrap(),
temp_dir.uri().join("root4_parent/root4/").unwrap(), temp_dir.url().join("root4_parent/root4/").unwrap(),
]); ]);
config.set_client_capabilities(ClientCapabilities { config.set_client_capabilities(ClientCapabilities {
workspace: Some(Default::default()), workspace: Some(Default::default()),
@ -3971,14 +4011,14 @@ mod tests {
Default::default(), Default::default(),
vec![ vec![
( (
temp_dir.uri().join("root1/").unwrap(), temp_dir.url().join("root1/").unwrap(),
WorkspaceSettings { WorkspaceSettings {
enable: Some(true), enable: Some(true),
..Default::default() ..Default::default()
}, },
), ),
( (
temp_dir.uri().join("root2/").unwrap(), temp_dir.url().join("root2/").unwrap(),
WorkspaceSettings { WorkspaceSettings {
enable: Some(true), enable: Some(true),
enable_paths: Some(vec![ enable_paths: Some(vec![
@ -3990,21 +4030,21 @@ mod tests {
}, },
), ),
( (
temp_dir.uri().join("root2/root2.1/").unwrap(), temp_dir.url().join("root2/root2.1/").unwrap(),
WorkspaceSettings { WorkspaceSettings {
enable: Some(true), enable: Some(true),
..Default::default() ..Default::default()
}, },
), ),
( (
temp_dir.uri().join("root3/").unwrap(), temp_dir.url().join("root3/").unwrap(),
WorkspaceSettings { WorkspaceSettings {
enable: Some(false), enable: Some(false),
..Default::default() ..Default::default()
}, },
), ),
( (
temp_dir.uri().join("root4_parent/root4/").unwrap(), temp_dir.url().join("root4_parent/root4/").unwrap(),
WorkspaceSettings { WorkspaceSettings {
enable: Some(true), enable: Some(true),
..Default::default() ..Default::default()
@ -4018,22 +4058,22 @@ mod tests {
assert_eq!( assert_eq!(
json!(workspace_files), json!(workspace_files),
json!([ json!([
temp_dir.uri().join("root4_parent/deno.json").unwrap(), temp_dir.url().join("root4_parent/deno.json").unwrap(),
temp_dir.uri().join("root1/mod0.ts").unwrap(), temp_dir.url().join("root1/mod0.ts").unwrap(),
temp_dir.uri().join("root1/mod1.js").unwrap(), temp_dir.url().join("root1/mod1.js").unwrap(),
temp_dir.uri().join("root1/mod2.tsx").unwrap(), temp_dir.url().join("root1/mod2.tsx").unwrap(),
temp_dir.uri().join("root1/mod3.d.ts").unwrap(), temp_dir.url().join("root1/mod3.d.ts").unwrap(),
temp_dir.uri().join("root1/mod4.jsx").unwrap(), temp_dir.url().join("root1/mod4.jsx").unwrap(),
temp_dir.uri().join("root1/mod5.mjs").unwrap(), temp_dir.url().join("root1/mod5.mjs").unwrap(),
temp_dir.uri().join("root1/mod6.mts").unwrap(), temp_dir.url().join("root1/mod6.mts").unwrap(),
temp_dir.uri().join("root1/mod7.d.mts").unwrap(), temp_dir.url().join("root1/mod7.d.mts").unwrap(),
temp_dir.uri().join("root1/mod8.json").unwrap(), temp_dir.url().join("root1/mod8.json").unwrap(),
temp_dir.uri().join("root1/mod9.jsonc").unwrap(), temp_dir.url().join("root1/mod9.jsonc").unwrap(),
temp_dir.uri().join("root2/file1.ts").unwrap(), temp_dir.url().join("root2/file1.ts").unwrap(),
temp_dir.uri().join("root4_parent/root4/main.ts").unwrap(), temp_dir.url().join("root4_parent/root4/main.ts").unwrap(),
temp_dir.uri().join("root1/folder/mod.ts").unwrap(), temp_dir.url().join("root1/folder/mod.ts").unwrap(),
temp_dir.uri().join("root2/folder/main.ts").unwrap(), temp_dir.url().join("root2/folder/main.ts").unwrap(),
temp_dir.uri().join("root2/root2.1/main.ts").unwrap(), temp_dir.url().join("root2/root2.1/main.ts").unwrap(),
]) ])
); );
} }

View file

@ -17,7 +17,7 @@ pub struct TaskDefinition {
// TODO(nayeemrmn): Rename this to `command` in vscode_deno. // TODO(nayeemrmn): Rename this to `command` in vscode_deno.
#[serde(rename = "detail")] #[serde(rename = "detail")]
pub command: String, pub command: String,
pub source_uri: lsp::Url, pub source_uri: lsp::Uri,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
@ -46,6 +46,30 @@ pub struct DiagnosticBatchNotificationParams {
pub messages_len: usize, pub messages_len: usize,
} }
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DenoConfigurationData {
pub scope_uri: lsp::Uri,
pub workspace_root_scope_uri: Option<lsp::Uri>,
pub deno_json: Option<lsp::TextDocumentIdentifier>,
pub package_json: Option<lsp::TextDocumentIdentifier>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DidRefreshDenoConfigurationTreeNotificationParams {
pub data: Vec<DenoConfigurationData>,
}
pub enum DidRefreshDenoConfigurationTreeNotification {}
impl lsp::notification::Notification
for DidRefreshDenoConfigurationTreeNotification
{
type Params = DidRefreshDenoConfigurationTreeNotificationParams;
const METHOD: &'static str = "deno/didRefreshDenoConfigurationTree";
}
#[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)] #[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum DenoConfigurationChangeType { pub enum DenoConfigurationChangeType {
@ -75,8 +99,8 @@ pub enum DenoConfigurationType {
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DenoConfigurationChangeEvent { pub struct DenoConfigurationChangeEvent {
pub scope_uri: lsp::Url, pub scope_uri: lsp::Uri,
pub file_uri: lsp::Url, pub file_uri: lsp::Uri,
#[serde(rename = "type")] #[serde(rename = "type")]
pub typ: DenoConfigurationChangeType, pub typ: DenoConfigurationChangeType,
pub configuration_type: DenoConfigurationType, pub configuration_type: DenoConfigurationType,
@ -88,13 +112,15 @@ pub struct DidChangeDenoConfigurationNotificationParams {
pub changes: Vec<DenoConfigurationChangeEvent>, pub changes: Vec<DenoConfigurationChangeEvent>,
} }
// TODO(nayeemrmn): This is being replaced by
// `DidRefreshDenoConfigurationTreeNotification` for Deno > v2.0.0. Remove it
// soon.
pub enum DidChangeDenoConfigurationNotification {} pub enum DidChangeDenoConfigurationNotification {}
impl lsp::notification::Notification impl lsp::notification::Notification
for DidChangeDenoConfigurationNotification for DidChangeDenoConfigurationNotification
{ {
type Params = DidChangeDenoConfigurationNotificationParams; type Params = DidChangeDenoConfigurationNotificationParams;
const METHOD: &'static str = "deno/didChangeDenoConfiguration"; const METHOD: &'static str = "deno/didChangeDenoConfiguration";
} }
@ -102,7 +128,6 @@ pub enum DidUpgradeCheckNotification {}
impl lsp::notification::Notification for DidUpgradeCheckNotification { impl lsp::notification::Notification for DidUpgradeCheckNotification {
type Params = DidUpgradeCheckNotificationParams; type Params = DidUpgradeCheckNotificationParams;
const METHOD: &'static str = "deno/didUpgradeCheck"; const METHOD: &'static str = "deno/didUpgradeCheck";
} }
@ -125,6 +150,5 @@ pub enum DiagnosticBatchNotification {}
impl lsp::notification::Notification for DiagnosticBatchNotification { impl lsp::notification::Notification for DiagnosticBatchNotification {
type Params = DiagnosticBatchNotificationParams; type Params = DiagnosticBatchNotificationParams;
const METHOD: &'static str = "deno/internalTestDiagnosticBatch"; const METHOD: &'static str = "deno/internalTestDiagnosticBatch";
} }

View file

@ -4,7 +4,7 @@ use dashmap::DashMap;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_npm::npm_rc::NpmRc;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use serde::Deserialize; use serde::Deserialize;
@ -26,7 +26,10 @@ pub struct CliNpmSearchApi {
impl CliNpmSearchApi { impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(file_fetcher.clone()); let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
);
Self { Self {
file_fetcher, file_fetcher,
resolver, resolver,
@ -55,7 +58,7 @@ impl PackageSearchApi for CliNpmSearchApi {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher
.fetch(&search_url, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&search_url)
.await? .await?
.into_text_decoded() .into_text_decoded()
}) })

View file

@ -16,6 +16,7 @@ use crate::args::CacheSetting;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache; use crate::cache::HttpCache;
use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
@ -30,7 +31,6 @@ use deno_core::url::Position;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::Dependency; use deno_graph::Dependency;
use deno_runtime::deno_permissions::PermissionsContainer;
use log::error; use log::error;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::borrow::Cow; use std::borrow::Cow;
@ -481,7 +481,8 @@ impl ModuleRegistry {
file_fetcher file_fetcher
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None, maybe_cache_setting: None,
}) })
@ -584,7 +585,7 @@ impl ModuleRegistry {
let file = deno_core::unsync::spawn({ let file = deno_core::unsync::spawn({
async move { async move {
file_fetcher file_fetcher
.fetch(&endpoint, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&endpoint)
.await .await
.ok()? .ok()?
.into_text_decoded() .into_text_decoded()
@ -983,7 +984,7 @@ impl ModuleRegistry {
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&specifier)
.await .await
.ok()? .ok()?
.into_text_decoded() .into_text_decoded()
@ -1049,7 +1050,7 @@ impl ModuleRegistry {
let specifier = specifier.clone(); let specifier = specifier.clone();
async move { async move {
file_fetcher file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&specifier)
.await .await
.map_err(|err| { .map_err(|err| {
error!( error!(
@ -1095,7 +1096,7 @@ impl ModuleRegistry {
let specifier = specifier.clone(); let specifier = specifier.clone();
async move { async move {
file_fetcher file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&specifier)
.await .await
.map_err(|err| { .map_err(|err| {
error!( error!(

View file

@ -8,6 +8,7 @@ use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use lsp_types::Uri;
use tower_lsp::lsp_types::ClientCapabilities; use tower_lsp::lsp_types::ClientCapabilities;
use tower_lsp::lsp_types::ClientInfo; use tower_lsp::lsp_types::ClientInfo;
use tower_lsp::lsp_types::CompletionContext; use tower_lsp::lsp_types::CompletionContext;
@ -40,6 +41,8 @@ use super::config::LanguageWorkspaceSettings;
use super::config::ObjectLiteralMethodSnippets; use super::config::ObjectLiteralMethodSnippets;
use super::config::TestingSettings; use super::config::TestingSettings;
use super::config::WorkspaceSettings; use super::config::WorkspaceSettings;
use super::urls::uri_parse_unencoded;
use super::urls::url_to_uri;
#[derive(Debug)] #[derive(Debug)]
pub struct ReplCompletionItem { pub struct ReplCompletionItem {
@ -73,7 +76,7 @@ impl ReplLanguageServer {
.initialize(InitializeParams { .initialize(InitializeParams {
process_id: None, process_id: None,
root_path: None, root_path: None,
root_uri: Some(cwd_uri.clone()), root_uri: Some(url_to_uri(&cwd_uri).unwrap()),
initialization_options: Some( initialization_options: Some(
serde_json::to_value(get_repl_workspace_settings()).unwrap(), serde_json::to_value(get_repl_workspace_settings()).unwrap(),
), ),
@ -84,6 +87,7 @@ impl ReplLanguageServer {
general: None, general: None,
experimental: None, experimental: None,
offset_encoding: None, offset_encoding: None,
notebook_document: None,
}, },
trace: None, trace: None,
workspace_folders: None, workspace_folders: None,
@ -92,6 +96,7 @@ impl ReplLanguageServer {
version: None, version: None,
}), }),
locale: None, locale: None,
work_done_progress_params: Default::default(),
}) })
.await?; .await?;
@ -133,7 +138,7 @@ impl ReplLanguageServer {
.completion(CompletionParams { .completion(CompletionParams {
text_document_position: TextDocumentPositionParams { text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { text_document: TextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
}, },
position: Position { position: Position {
line: line_and_column.line_index as u32, line: line_and_column.line_index as u32,
@ -208,7 +213,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_change(DidChangeTextDocumentParams { .did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier { text_document: VersionedTextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
version: self.document_version, version: self.document_version,
}, },
content_changes: vec![TextDocumentContentChangeEvent { content_changes: vec![TextDocumentContentChangeEvent {
@ -233,7 +238,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_close(DidCloseTextDocumentParams { .did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { text_document: TextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
}, },
}) })
.await; .await;
@ -248,7 +253,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_open(DidOpenTextDocumentParams { .did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem { text_document: TextDocumentItem {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
language_id: "typescript".to_string(), language_id: "typescript".to_string(),
version: self.document_version, version: self.document_version,
text: format!("{}{}", self.document_text, self.pending_text), text: format!("{}{}", self.document_text, self.pending_text),
@ -257,8 +262,9 @@ impl ReplLanguageServer {
.await; .await;
} }
fn get_document_specifier(&self) -> ModuleSpecifier { fn get_document_uri(&self) -> Uri {
self.cwd_uri.join("$deno$repl.ts").unwrap() uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str())
.unwrap()
} }
} }
@ -306,7 +312,7 @@ pub fn get_repl_workspace_settings() -> WorkspaceSettings {
document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
suggest: DenoCompletionSettings { suggest: DenoCompletionSettings {
imports: ImportCompletionSettings { imports: ImportCompletionSettings {
auto_discover: false, auto_discover: false,

View file

@ -1,28 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::PackageJsonInstallDepsProvider;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::HttpCache; use deno_cache_dir::HttpCache;
@ -33,10 +10,10 @@ use deno_graph::source::Resolver;
use deno_graph::GraphImport; use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
@ -55,6 +32,30 @@ use std::sync::Arc;
use super::cache::LspCache; use super::cache::LspCache;
use super::jsr::JsrCacheResolver; use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct LspScopeResolver { struct LspScopeResolver {
@ -327,11 +328,11 @@ impl LspResolver {
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?; let node_resolver = resolver.node_resolver.as_ref()?;
Some(NodeResolution::into_specifier_and_media_type( Some(NodeResolution::into_specifier_and_media_type(Some(
node_resolver node_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
.ok(), .ok()?,
)) )))
} }
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
@ -372,6 +373,26 @@ impl LspResolver {
Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1)
} }
pub fn is_bare_package_json_dep(
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
) -> bool {
let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
return false;
};
node_resolver
.resolve_if_for_npm_pkg(
specifier_text,
referrer,
NodeResolutionMode::Types,
)
.ok()
.flatten()
.is_some()
}
pub fn get_closest_package_json( pub fn get_closest_package_json(
&self, &self,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
@ -439,11 +460,11 @@ async fn create_npm_resolver(
) -> Option<Arc<dyn CliNpmResolver>> { ) -> Option<Arc<dyn CliNpmResolver>> {
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
let options = if enable_byonm { let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: Arc::new(deno_fs::RealFs), fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
root_node_modules_dir: config_data.and_then(|config_data| { root_node_modules_dir: config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| { config_data.node_modules_dir.clone().or_else(|| {
specifier_to_file_path(&config_data.scope) url_to_file_path(&config_data.scope)
.ok() .ok()
.map(|p| p.join("node_modules/")) .map(|p| p.join("node_modules/"))
}) })
@ -474,9 +495,7 @@ async fn create_npm_resolver(
maybe_node_modules_path: config_data maybe_node_modules_path: config_data
.and_then(|d| d.node_modules_dir.clone()), .and_then(|d| d.node_modules_dir.clone()),
// only used for top level install, so we can ignore this // only used for top level install, so we can ignore this
package_json_deps_provider: Arc::new( npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
PackageJsonInstallDepsProvider::empty(),
),
npmrc: config_data npmrc: config_data
.and_then(|d| d.npmrc.clone()) .and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc), .unwrap_or_else(create_default_npmrc),

View file

@ -5,10 +5,12 @@ use super::lsp_custom::TestData;
use crate::lsp::client::TestingNotification; use crate::lsp::client::TestingNotification;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::lsp::urls::url_to_uri;
use crate::tools::test::TestDescription; use crate::tools::test::TestDescription;
use crate::tools::test::TestStepDescription; use crate::tools::test::TestStepDescription;
use crate::util::checksum; use crate::util::checksum;
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use lsp::Range; use lsp::Range;
use std::collections::HashMap; use std::collections::HashMap;
@ -143,11 +145,12 @@ impl TestModule {
pub fn as_replace_notification( pub fn as_replace_notification(
&self, &self,
maybe_root_uri: Option<&ModuleSpecifier>, maybe_root_uri: Option<&ModuleSpecifier>,
) -> TestingNotification { ) -> Result<TestingNotification, AnyError> {
let label = self.label(maybe_root_uri); let label = self.label(maybe_root_uri);
TestingNotification::Module(lsp_custom::TestModuleNotificationParams { Ok(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: self.specifier.clone(), uri: url_to_uri(&self.specifier)?,
}, },
kind: lsp_custom::TestModuleNotificationKind::Replace, kind: lsp_custom::TestModuleNotificationKind::Replace,
label, label,
@ -157,7 +160,8 @@ impl TestModule {
.filter(|(_, def)| def.parent_id.is_none()) .filter(|(_, def)| def.parent_id.is_none())
.map(|(id, _)| self.get_test_data(id)) .map(|(id, _)| self.get_test_data(id))
.collect(), .collect(),
}) },
))
} }
pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String { pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String {

View file

@ -12,6 +12,9 @@ use crate::lsp::client::Client;
use crate::lsp::client::TestingNotification; use crate::lsp::client::TestingNotification;
use crate::lsp::config; use crate::lsp::config;
use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_log;
use crate::lsp::urls::uri_parse_unencoded;
use crate::lsp::urls::uri_to_url;
use crate::lsp::urls::url_to_uri;
use crate::tools::test; use crate::tools::test;
use crate::tools::test::create_test_event_channel; use crate::tools::test::create_test_event_channel;
use crate::tools::test::FailFastTracker; use crate::tools::test::FailFastTracker;
@ -28,8 +31,10 @@ use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util::create_and_run_current_thread; use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap; use indexmap::IndexMap;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
@ -53,12 +58,12 @@ fn as_queue_and_filters(
if let Some(include) = &params.include { if let Some(include) = &params.include {
for item in include { for item in include {
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) { let url = uri_to_url(&item.text_document.uri);
queue.insert(item.text_document.uri.clone()); if let Some((test_definitions, _)) = tests.get(&url) {
queue.insert(url.clone());
if let Some(id) = &item.id { if let Some(id) = &item.id {
if let Some(test) = test_definitions.get(id) { if let Some(test) = test_definitions.get(id) {
let filter = let filter = filters.entry(url).or_default();
filters.entry(item.text_document.uri.clone()).or_default();
if let Some(include) = filter.include.as_mut() { if let Some(include) = filter.include.as_mut() {
include.insert(test.id.clone(), test.clone()); include.insert(test.id.clone(), test.clone());
} else { } else {
@ -75,19 +80,19 @@ fn as_queue_and_filters(
} }
for item in &params.exclude { for item in &params.exclude {
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) { let url = uri_to_url(&item.text_document.uri);
if let Some((test_definitions, _)) = tests.get(&url) {
if let Some(id) = &item.id { if let Some(id) = &item.id {
// there is no way to exclude a test step // there is no way to exclude a test step
if item.step_id.is_none() { if item.step_id.is_none() {
if let Some(test) = test_definitions.get(id) { if let Some(test) = test_definitions.get(id) {
let filter = let filter = filters.entry(url.clone()).or_default();
filters.entry(item.text_document.uri.clone()).or_default();
filter.exclude.insert(test.id.clone(), test.clone()); filter.exclude.insert(test.id.clone(), test.clone());
} }
} }
} else { } else {
// the entire test module is excluded // the entire test module is excluded
queue.remove(&item.text_document.uri); queue.remove(&url);
} }
} }
} }
@ -182,7 +187,7 @@ impl TestRun {
self self
.queue .queue
.iter() .iter()
.map(|s| { .filter_map(|s| {
let ids = if let Some((test_module, _)) = tests.get(s) { let ids = if let Some((test_module, _)) = tests.get(s) {
if let Some(filter) = self.filters.get(s) { if let Some(filter) = self.filters.get(s) {
filter.as_ids(test_module) filter.as_ids(test_module)
@ -192,10 +197,12 @@ impl TestRun {
} else { } else {
Vec::new() Vec::new()
}; };
lsp_custom::EnqueuedTestModule { Some(lsp_custom::EnqueuedTestModule {
text_document: lsp::TextDocumentIdentifier { uri: s.clone() }, text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(s).ok()?,
},
ids, ids,
} })
}) })
.collect() .collect()
} }
@ -213,25 +220,22 @@ impl TestRun {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let args = self.get_args(); let args = self.get_args();
lsp_log!("Executing test run with arguments: {}", args.join(" ")); lsp_log!("Executing test run with arguments: {}", args.join(" "));
let flags = let flags = Arc::new(flags_from_vec(
Arc::new(flags_from_vec(args.into_iter().map(From::from).collect())?); args.into_iter().map(|s| From::from(s.as_ref())).collect(),
)?);
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
// Various test files should not share the same permissions in terms of // Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one // `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable. // file would have impact on other files, which is undesirable.
let permissions = let permission_desc_parser = factory.permission_desc_parser()?.clone();
Permissions::from_options(&cli_options.permissions_options()?)?; let permissions = Permissions::from_options(
permission_desc_parser.as_ref(),
&cli_options.permissions_options(),
)?;
let main_graph_container = factory.main_module_graph_container().await?; let main_graph_container = factory.main_module_graph_container().await?;
test::check_specifiers( main_graph_container
factory.file_fetcher()?, .check_specifiers(&self.queue.iter().cloned().collect::<Vec<_>>(), None)
main_graph_container,
self
.queue
.iter()
.map(|s| (s.clone(), test::TestMode::Executable))
.collect(),
)
.await?; .await?;
let (concurrent_jobs, fail_fast) = let (concurrent_jobs, fail_fast) =
@ -269,7 +273,10 @@ impl TestRun {
let join_handles = queue.into_iter().map(move |specifier| { let join_handles = queue.into_iter().map(move |specifier| {
let specifier = specifier.clone(); let specifier = specifier.clone();
let worker_factory = worker_factory.clone(); let worker_factory = worker_factory.clone();
let permissions = permissions.clone(); let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
permissions.clone(),
);
let worker_sender = test_event_sender_factory.worker(); let worker_sender = test_event_sender_factory.worker();
let fail_fast_tracker = fail_fast_tracker.clone(); let fail_fast_tracker = fail_fast_tracker.clone();
let lsp_filter = self.filters.get(&specifier); let lsp_filter = self.filters.get(&specifier);
@ -298,7 +305,7 @@ impl TestRun {
// channel. // channel.
create_and_run_current_thread(test::test_specifier( create_and_run_current_thread(test::test_specifier(
worker_factory, worker_factory,
permissions, permissions_container,
specifier, specifier,
worker_sender, worker_sender,
fail_fast_tracker, fail_fast_tracker,
@ -446,37 +453,42 @@ impl TestRun {
Ok(()) Ok(())
} }
fn get_args(&self) -> Vec<&str> { fn get_args(&self) -> Vec<Cow<str>> {
let mut args = vec!["deno", "test"]; let mut args = vec![Cow::Borrowed("deno"), Cow::Borrowed("test")];
args.extend( args.extend(
self self
.workspace_settings .workspace_settings
.testing .testing
.args .args
.iter() .iter()
.map(|s| s.as_str()), .map(|s| Cow::Borrowed(s.as_str())),
); );
args.push("--trace-leaks"); args.push(Cow::Borrowed("--trace-leaks"));
if self.workspace_settings.unstable && !args.contains(&"--unstable") { for unstable_feature in self.workspace_settings.unstable.as_deref() {
args.push("--unstable"); let flag = format!("--unstable-{unstable_feature}");
if !args.contains(&Cow::Borrowed(&flag)) {
args.push(Cow::Owned(flag));
}
} }
if let Some(config) = &self.workspace_settings.config { if let Some(config) = &self.workspace_settings.config {
if !args.contains(&"--config") && !args.contains(&"-c") { if !args.contains(&Cow::Borrowed("--config"))
args.push("--config"); && !args.contains(&Cow::Borrowed("-c"))
args.push(config.as_str()); {
args.push(Cow::Borrowed("--config"));
args.push(Cow::Borrowed(config.as_str()));
} }
} }
if let Some(import_map) = &self.workspace_settings.import_map { if let Some(import_map) = &self.workspace_settings.import_map {
if !args.contains(&"--import-map") { if !args.contains(&Cow::Borrowed("--import-map")) {
args.push("--import-map"); args.push(Cow::Borrowed("--import-map"));
args.push(import_map.as_str()); args.push(Cow::Borrowed(import_map.as_str()));
} }
} }
if self.kind == lsp_custom::TestRunKind::Debug if self.kind == lsp_custom::TestRunKind::Debug
&& !args.contains(&"--inspect") && !args.contains(&Cow::Borrowed("--inspect"))
&& !args.contains(&"--inspect-brk") && !args.contains(&Cow::Borrowed("--inspect-brk"))
{ {
args.push("--inspect"); args.push(Cow::Borrowed("--inspect"));
} }
args args
} }
@ -523,7 +535,7 @@ impl LspTestDescription {
&self, &self,
tests: &IndexMap<usize, LspTestDescription>, tests: &IndexMap<usize, LspTestDescription>,
) -> lsp_custom::TestIdentifier { ) -> lsp_custom::TestIdentifier {
let uri = ModuleSpecifier::parse(&self.location().file_name).unwrap(); let uri = uri_parse_unencoded(&self.location().file_name).unwrap();
let static_id = self.static_id(); let static_id = self.static_id();
let mut root_desc = self; let mut root_desc = self;
while let Some(parent_id) = root_desc.parent_id() { while let Some(parent_id) = root_desc.parent_id() {
@ -587,6 +599,9 @@ impl LspTestReporter {
let (test_module, _) = files let (test_module, _) = files
.entry(specifier.clone()) .entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string())); .or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_dynamic(desc); let (static_id, is_new) = test_module.register_dynamic(desc);
self.tests.insert( self.tests.insert(
desc.id, desc.id,
@ -597,9 +612,7 @@ impl LspTestReporter {
.client .client
.send_test_notification(TestingNotification::Module( .send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams { lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier { uri },
uri: test_module.specifier.clone(),
},
kind: lsp_custom::TestModuleNotificationKind::Insert, kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()), label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)], tests: vec![test_module.get_test_data(&static_id)],
@ -697,6 +710,9 @@ impl LspTestReporter {
let (test_module, _) = files let (test_module, _) = files
.entry(specifier.clone()) .entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string())); .or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_step_dynamic( let (static_id, is_new) = test_module.register_step_dynamic(
desc, desc,
self.tests.get(&desc.parent_id).unwrap().static_id(), self.tests.get(&desc.parent_id).unwrap().static_id(),
@ -710,9 +726,7 @@ impl LspTestReporter {
.client .client
.send_test_notification(TestingNotification::Module( .send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams { lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier { uri },
uri: test_module.specifier.clone(),
},
kind: lsp_custom::TestModuleNotificationKind::Insert, kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()), label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)], tests: vec![test_module.get_test_data(&static_id)],
@ -796,14 +810,14 @@ mod tests {
include: Some(vec![ include: Some(vec![
lsp_custom::TestIdentifier { lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: specifier.clone(), uri: url_to_uri(&specifier).unwrap(),
}, },
id: None, id: None,
step_id: None, step_id: None,
}, },
lsp_custom::TestIdentifier { lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: non_test_specifier.clone(), uri: url_to_uri(&non_test_specifier).unwrap(),
}, },
id: None, id: None,
step_id: None, step_id: None,
@ -811,7 +825,7 @@ mod tests {
]), ]),
exclude: vec![lsp_custom::TestIdentifier { exclude: vec![lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: specifier.clone(), uri: url_to_uri(&specifier).unwrap(),
}, },
id: Some( id: Some(
"69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f" "69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"

View file

@ -10,6 +10,7 @@ use crate::lsp::config;
use crate::lsp::documents::DocumentsFilter; use crate::lsp::documents::DocumentsFilter;
use crate::lsp::language_server::StateSnapshot; use crate::lsp::language_server::StateSnapshot;
use crate::lsp::performance::Performance; use crate::lsp::performance::Performance;
use crate::lsp::urls::url_to_uri;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
@ -26,12 +27,16 @@ use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult; use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
fn as_delete_notification(uri: ModuleSpecifier) -> TestingNotification { fn as_delete_notification(
TestingNotification::DeleteModule( url: &ModuleSpecifier,
) -> Result<TestingNotification, AnyError> {
Ok(TestingNotification::DeleteModule(
lsp_custom::TestModuleDeleteNotificationParams { lsp_custom::TestModuleDeleteNotificationParams {
text_document: lsp::TextDocumentIdentifier { uri }, text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(url)?,
}, },
) },
))
} }
pub type TestServerTests = pub type TestServerTests =
@ -123,20 +128,24 @@ impl TestServer {
.map(|tm| tm.as_ref().clone()) .map(|tm| tm.as_ref().clone())
.unwrap_or_else(|| TestModule::new(specifier.clone())); .unwrap_or_else(|| TestModule::new(specifier.clone()));
if !test_module.is_empty() { if !test_module.is_empty() {
client.send_test_notification( if let Ok(params) =
test_module.as_replace_notification(mru.as_ref()), test_module.as_replace_notification(mru.as_ref())
); {
client.send_test_notification(params);
}
} else if !was_empty { } else if !was_empty {
client.send_test_notification(as_delete_notification( if let Ok(params) = as_delete_notification(specifier) {
specifier.clone(), client.send_test_notification(params);
)); }
} }
tests tests
.insert(specifier.clone(), (test_module, script_version)); .insert(specifier.clone(), (test_module, script_version));
} }
} }
for key in keys { for key in &keys {
client.send_test_notification(as_delete_notification(key)); if let Ok(params) = as_delete_notification(key) {
client.send_test_notification(params);
}
} }
performance.measure(mark); performance.measure(mark);
} }

View file

@ -19,8 +19,10 @@ use super::refactor::EXTRACT_TYPE;
use super::semantic_tokens; use super::semantic_tokens;
use super::semantic_tokens::SemanticTokensBuilder; use super::semantic_tokens::SemanticTokensBuilder;
use super::text::LineIndex; use super::text::LineIndex;
use super::urls::LspClientUrl; use super::urls::uri_to_url;
use super::urls::url_to_uri;
use super::urls::INVALID_SPECIFIER; use super::urls::INVALID_SPECIFIER;
use super::urls::INVALID_URI;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::args::FmtOptionsConfig; use crate::args::FmtOptionsConfig;
@ -37,7 +39,6 @@ use deno_core::convert::ToV8;
use deno_core::error::StdAnyError; use deno_core::error::StdAnyError;
use deno_core::futures::stream::FuturesOrdered; use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -61,6 +62,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_core::PollEventLoopOptions; use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions; use deno_core::RuntimeOptions;
use deno_path_util::url_to_file_path;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
use indexmap::IndexMap; use indexmap::IndexMap;
@ -2046,7 +2048,7 @@ impl DocumentSpan {
let file_referrer = target_asset_or_doc.file_referrer(); let file_referrer = target_asset_or_doc.file_referrer();
let target_uri = language_server let target_uri = language_server
.url_map .url_map
.normalize_specifier(&target_specifier, file_referrer) .specifier_to_uri(&target_specifier, file_referrer)
.ok()?; .ok()?;
let (target_range, target_selection_range) = let (target_range, target_selection_range) =
if let Some(context_span) = &self.context_span { if let Some(context_span) = &self.context_span {
@ -2071,7 +2073,7 @@ impl DocumentSpan {
}; };
let link = lsp::LocationLink { let link = lsp::LocationLink {
origin_selection_range, origin_selection_range,
target_uri: target_uri.into_url(), target_uri,
target_range, target_range,
target_selection_range, target_selection_range,
}; };
@ -2091,11 +2093,11 @@ impl DocumentSpan {
let line_index = asset_or_doc.line_index(); let line_index = asset_or_doc.line_index();
let range = self.text_span.to_range(line_index); let range = self.text_span.to_range(line_index);
let file_referrer = asset_or_doc.file_referrer(); let file_referrer = asset_or_doc.file_referrer();
let mut target = language_server let target_uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer) .specifier_to_uri(&specifier, file_referrer)
.ok()? .ok()?;
.into_url(); let mut target = uri_to_url(&target_uri);
target.set_fragment(Some(&format!( target.set_fragment(Some(&format!(
"L{},{}", "L{},{}",
range.start.line + 1, range.start.line + 1,
@ -2154,13 +2156,10 @@ impl NavigateToItem {
let file_referrer = asset_or_doc.file_referrer(); let file_referrer = asset_or_doc.file_referrer();
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer) .specifier_to_uri(&specifier, file_referrer)
.ok()?; .ok()?;
let range = self.text_span.to_range(line_index); let range = self.text_span.to_range(line_index);
let location = lsp::Location { let location = lsp::Location { uri, range };
uri: uri.into_url(),
range,
};
let mut tags: Option<Vec<lsp::SymbolTag>> = None; let mut tags: Option<Vec<lsp::SymbolTag>> = None;
let kind_modifiers = parse_kind_modifier(&self.kind_modifiers); let kind_modifiers = parse_kind_modifier(&self.kind_modifiers);
@ -2183,6 +2182,50 @@ impl NavigateToItem {
} }
} }
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintDisplayPart {
pub text: String,
pub span: Option<TextSpan>,
pub file: Option<String>,
}
impl InlayHintDisplayPart {
pub fn to_lsp(
&self,
language_server: &language_server::Inner,
) -> lsp::InlayHintLabelPart {
let location = self.file.as_ref().map(|f| {
let specifier =
resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone());
let file_referrer =
language_server.documents.get_file_referrer(&specifier);
let uri = language_server
.url_map
.specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| INVALID_URI.clone());
let range = self
.span
.as_ref()
.and_then(|s| {
let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?;
Some(s.to_range(asset_or_doc.line_index()))
})
.unwrap_or_else(|| {
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0))
});
lsp::Location { uri, range }
});
lsp::InlayHintLabelPart {
value: self.text.clone(),
tooltip: None,
location,
command: None,
}
}
}
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub enum InlayHintKind { pub enum InlayHintKind {
Type, Type,
@ -2204,6 +2247,7 @@ impl InlayHintKind {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct InlayHint { pub struct InlayHint {
pub text: String, pub text: String,
pub display_parts: Option<Vec<InlayHintDisplayPart>>,
pub position: u32, pub position: u32,
pub kind: InlayHintKind, pub kind: InlayHintKind,
pub whitespace_before: Option<bool>, pub whitespace_before: Option<bool>,
@ -2211,10 +2255,23 @@ pub struct InlayHint {
} }
impl InlayHint { impl InlayHint {
pub fn to_lsp(&self, line_index: Arc<LineIndex>) -> lsp::InlayHint { pub fn to_lsp(
&self,
line_index: Arc<LineIndex>,
language_server: &language_server::Inner,
) -> lsp::InlayHint {
lsp::InlayHint { lsp::InlayHint {
position: line_index.position_tsc(self.position.into()), position: line_index.position_tsc(self.position.into()),
label: lsp::InlayHintLabel::String(self.text.clone()), label: if let Some(display_parts) = &self.display_parts {
lsp::InlayHintLabel::LabelParts(
display_parts
.iter()
.map(|p| p.to_lsp(language_server))
.collect(),
)
} else {
lsp::InlayHintLabel::String(self.text.clone())
},
kind: self.kind.to_lsp(), kind: self.kind.to_lsp(),
padding_left: self.whitespace_before, padding_left: self.whitespace_before,
padding_right: self.whitespace_after, padding_right: self.whitespace_after,
@ -2413,12 +2470,10 @@ impl ImplementationLocation {
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| { .unwrap_or_else(|_| INVALID_URI.clone());
LspClientUrl::new(ModuleSpecifier::parse("deno://invalid").unwrap())
});
lsp::Location { lsp::Location {
uri: uri.into_url(), uri,
range: self.document_span.text_span.to_range(line_index), range: self.document_span.text_span.to_range(line_index),
} }
} }
@ -2474,7 +2529,7 @@ impl RenameLocations {
language_server.documents.get_file_referrer(&specifier); language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref())?; .specifier_to_uri(&specifier, file_referrer.as_deref())?;
let asset_or_doc = language_server.get_asset_or_document(&specifier)?; let asset_or_doc = language_server.get_asset_or_document(&specifier)?;
// ensure TextDocumentEdit for `location.file_name`. // ensure TextDocumentEdit for `location.file_name`.
@ -2483,7 +2538,7 @@ impl RenameLocations {
uri.clone(), uri.clone(),
lsp::TextDocumentEdit { lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: uri.as_url().clone(), uri: uri.clone(),
version: asset_or_doc.document_lsp_version(), version: asset_or_doc.document_lsp_version(),
}, },
edits: edits:
@ -2685,7 +2740,7 @@ impl FileTextChanges {
.collect(); .collect();
Ok(lsp::TextDocumentEdit { Ok(lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: specifier, uri: url_to_uri(&specifier)?,
version: asset_or_doc.document_lsp_version(), version: asset_or_doc.document_lsp_version(),
}, },
edits, edits,
@ -2712,7 +2767,7 @@ impl FileTextChanges {
if self.is_new_file.unwrap_or(false) { if self.is_new_file.unwrap_or(false) {
ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create( ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(
lsp::CreateFile { lsp::CreateFile {
uri: specifier.clone(), uri: url_to_uri(&specifier)?,
options: Some(lsp::CreateFileOptions { options: Some(lsp::CreateFileOptions {
ignore_if_exists: Some(true), ignore_if_exists: Some(true),
overwrite: None, overwrite: None,
@ -2729,7 +2784,7 @@ impl FileTextChanges {
.collect(); .collect();
ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: specifier, uri: url_to_uri(&specifier)?,
version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()), version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()),
}, },
edits, edits,
@ -3127,10 +3182,10 @@ impl ReferenceEntry {
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone())); .unwrap_or_else(|_| INVALID_URI.clone());
lsp::Location { lsp::Location {
uri: uri.into_url(), uri,
range: self.document_span.text_span.to_range(line_index), range: self.document_span.text_span.to_range(line_index),
} }
} }
@ -3188,12 +3243,13 @@ impl CallHierarchyItem {
.get_file_referrer(&target_specifier); .get_file_referrer(&target_specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&target_specifier, file_referrer.as_deref()) .specifier_to_uri(&target_specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone())); .unwrap_or_else(|_| INVALID_URI.clone());
let use_file_name = self.is_source_file_item(); let use_file_name = self.is_source_file_item();
let maybe_file_path = if uri.as_url().scheme() == "file" { let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file")
specifier_to_file_path(uri.as_url()).ok() {
url_to_file_path(&uri_to_url(&uri)).ok()
} else { } else {
None None
}; };
@ -3237,7 +3293,7 @@ impl CallHierarchyItem {
lsp::CallHierarchyItem { lsp::CallHierarchyItem {
name, name,
tags, tags,
uri: uri.into_url(), uri,
detail: Some(detail), detail: Some(detail),
kind: self.kind.clone().into(), kind: self.kind.clone().into(),
range: self.span.to_range(line_index.clone()), range: self.span.to_range(line_index.clone()),
@ -3941,7 +3997,7 @@ pub struct OutliningSpan {
kind: OutliningSpanKind, kind: OutliningSpanKind,
} }
const FOLD_END_PAIR_CHARACTERS: &[u8] = &[b'}', b']', b')', b'`']; const FOLD_END_PAIR_CHARACTERS: &[u8] = b"}])`";
impl OutliningSpan { impl OutliningSpan {
pub fn to_folding_range( pub fn to_folding_range(
@ -4894,6 +4950,10 @@ pub struct UserPreferences {
pub allow_rename_of_import_path: Option<bool>, pub allow_rename_of_import_path: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub auto_import_file_exclude_patterns: Option<Vec<String>>, pub auto_import_file_exclude_patterns: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub interactive_inlay_hints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prefer_type_only_auto_imports: Option<bool>,
} }
impl UserPreferences { impl UserPreferences {
@ -4911,6 +4971,7 @@ impl UserPreferences {
include_completions_with_snippet_text: Some( include_completions_with_snippet_text: Some(
config.snippet_support_capable(), config.snippet_support_capable(),
), ),
interactive_inlay_hints: Some(true),
provide_refactor_not_applicable_reason: Some(true), provide_refactor_not_applicable_reason: Some(true),
quote_preference: Some(fmt_config.into()), quote_preference: Some(fmt_config.into()),
use_label_details_in_completion_entries: Some(true), use_label_details_in_completion_entries: Some(true),
@ -5015,6 +5076,9 @@ impl UserPreferences {
} else { } else {
Some(language_settings.preferences.quote_style) Some(language_settings.preferences.quote_style)
}, },
prefer_type_only_auto_imports: Some(
language_settings.preferences.prefer_type_only_auto_imports,
),
..base_preferences ..base_preferences
} }
} }
@ -5398,7 +5462,7 @@ mod tests {
sources: &[(&str, &str, i32, LanguageId)], sources: &[(&str, &str, i32, LanguageId)],
) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) { ) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let mut config = Config::default(); let mut config = Config::default();
config config
.tree .tree
@ -5408,7 +5472,7 @@ mod tests {
"compilerOptions": ts_config, "compilerOptions": ts_config,
}) })
.to_string(), .to_string(),
temp_dir.uri().join("deno.json").unwrap(), temp_dir.url().join("deno.json").unwrap(),
&Default::default(), &Default::default(),
) )
.unwrap(), .unwrap(),
@ -5419,7 +5483,7 @@ mod tests {
let mut documents = Documents::default(); let mut documents = Documents::default();
documents.update_config(&config, &resolver, &cache, &Default::default()); documents.update_config(&config, &resolver, &cache, &Default::default());
for (relative_specifier, source, version, language_id) in sources { for (relative_specifier, source, version, language_id) in sources {
let specifier = temp_dir.uri().join(relative_specifier).unwrap(); let specifier = temp_dir.url().join(relative_specifier).unwrap();
documents.open(specifier, *version, *language_id, (*source).into(), None); documents.open(specifier, *version, *language_id, (*source).into(), None);
} }
let snapshot = Arc::new(StateSnapshot { let snapshot = Arc::new(StateSnapshot {
@ -5477,7 +5541,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"noEmit": true, "noEmit": true,
"lib": [], "lib": [],
}), }),
@ -5489,7 +5552,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5523,7 +5586,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"jsx": "react", "jsx": "react",
"lib": ["esnext", "dom", "deno.ns"], "lib": ["esnext", "dom", "deno.ns"],
"noEmit": true, "noEmit": true,
@ -5536,7 +5598,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5549,7 +5611,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5567,7 +5628,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5580,7 +5641,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5594,7 +5654,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5626,7 +5686,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5644,7 +5703,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5657,7 +5716,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5678,7 +5736,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5724,7 +5782,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5736,7 +5793,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5802,7 +5859,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, cache) = setup( let (temp_dir, ts_server, snapshot, cache) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5829,7 +5885,7 @@ mod tests {
b"export const b = \"b\";\n", b"export const b = \"b\";\n",
) )
.unwrap(); .unwrap();
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics( .get_diagnostics(
snapshot.clone(), snapshot.clone(),
@ -5879,7 +5935,7 @@ mod tests {
[(&specifier_dep, ChangeKind::Opened)], [(&specifier_dep, ChangeKind::Opened)],
None, None,
); );
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics( .get_diagnostics(
snapshot.clone(), snapshot.clone(),
@ -5944,14 +6000,13 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
&[("a.ts", fixture, 1, LanguageId::TypeScript)], &[("a.ts", fixture, 1, LanguageId::TypeScript)],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let info = ts_server let info = ts_server
.get_completions( .get_completions(
snapshot.clone(), snapshot.clone(),
@ -5966,7 +6021,7 @@ mod tests {
trigger_kind: None, trigger_kind: None,
}, },
Default::default(), Default::default(),
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap(); .unwrap();
@ -5983,7 +6038,7 @@ mod tests {
preferences: None, preferences: None,
data: None, data: None,
}, },
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap() .unwrap()
@ -6095,7 +6150,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6105,7 +6159,7 @@ mod tests {
], ],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let fmt_options_config = FmtOptionsConfig { let fmt_options_config = FmtOptionsConfig {
semi_colons: Some(false), semi_colons: Some(false),
single_quote: Some(true), single_quote: Some(true),
@ -6126,7 +6180,7 @@ mod tests {
..Default::default() ..Default::default()
}, },
FormatCodeSettings::from(&fmt_options_config), FormatCodeSettings::from(&fmt_options_config),
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap(); .unwrap();
@ -6152,7 +6206,7 @@ mod tests {
}), }),
data: entry.data.clone(), data: entry.data.clone(),
}, },
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap() .unwrap()
@ -6204,7 +6258,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6217,8 +6270,8 @@ mod tests {
let changes = ts_server let changes = ts_server
.get_edits_for_file_rename( .get_edits_for_file_rename(
snapshot, snapshot,
temp_dir.uri().join("b.ts").unwrap(), temp_dir.url().join("b.ts").unwrap(),
temp_dir.uri().join("🦕.ts").unwrap(), temp_dir.url().join("🦕.ts").unwrap(),
FormatCodeSettings::default(), FormatCodeSettings::default(),
UserPreferences::default(), UserPreferences::default(),
) )
@ -6227,7 +6280,7 @@ mod tests {
assert_eq!( assert_eq!(
changes, changes,
vec![FileTextChanges { vec![FileTextChanges {
file_name: temp_dir.uri().join("a.ts").unwrap().to_string(), file_name: temp_dir.url().join("a.ts").unwrap().to_string(),
text_changes: vec![TextChange { text_changes: vec![TextChange {
span: TextSpan { span: TextSpan {
start: 8, start: 8,
@ -6275,7 +6328,6 @@ mod tests {
let (temp_dir, _, snapshot, _) = setup( let (temp_dir, _, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6286,7 +6338,7 @@ mod tests {
let resolved = op_resolve_inner( let resolved = op_resolve_inner(
&mut state, &mut state,
ResolveArgs { ResolveArgs {
base: temp_dir.uri().join("a.ts").unwrap().to_string(), base: temp_dir.url().join("a.ts").unwrap().to_string(),
is_base_cjs: false, is_base_cjs: false,
specifiers: vec!["./b.ts".to_string()], specifiers: vec!["./b.ts".to_string()],
}, },
@ -6295,7 +6347,7 @@ mod tests {
assert_eq!( assert_eq!(
resolved, resolved,
vec![Some(( vec![Some((
temp_dir.uri().join("b.ts").unwrap().to_string(), temp_dir.url().join("b.ts").unwrap().to_string(),
MediaType::TypeScript.as_ts_extension().to_string() MediaType::TypeScript.as_ts_extension().to_string()
))] ))]
); );

View file

@ -6,17 +6,25 @@ use deno_core::parking_lot::Mutex;
use deno_core::url::Position; use deno_core::url::Position;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use lsp_types::Uri;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use super::cache::LspCache; use super::cache::LspCache;
use super::logging::lsp_warn;
/// Used in situations where a default URL needs to be used where otherwise a /// Used in situations where a default URL needs to be used where otherwise a
/// panic is undesired. /// panic is undesired.
pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> = pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> =
Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap()); Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap());
/// Used in situations where a default URL needs to be used where otherwise a
/// panic is undesired.
pub static INVALID_URI: Lazy<Uri> =
Lazy::new(|| Uri::from_str("deno://invalid").unwrap());
/// Matches the `encodeURIComponent()` encoding from JavaScript, which matches /// Matches the `encodeURIComponent()` encoding from JavaScript, which matches
/// the component percent encoding set. /// the component percent encoding set.
/// ///
@ -47,6 +55,25 @@ const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
.add(b'+') .add(b'+')
.add(b','); .add(b',');
/// Characters that may be left unencoded in a `Url` path but not valid in a
/// `Uri` path.
const URL_TO_URI_PATH: &percent_encoding::AsciiSet =
&percent_encoding::CONTROLS
.add(b'[')
.add(b']')
.add(b'^')
.add(b'|');
/// Characters that may be left unencoded in a `Url` query but not valid in a
/// `Uri` query.
const URL_TO_URI_QUERY: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'\\').add(b'`').add(b'{').add(b'}');
/// Characters that may be left unencoded in a `Url` fragment but not valid in
/// a `Uri` fragment.
const URL_TO_URI_FRAGMENT: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'#').add(b'\\').add(b'{').add(b'}');
fn hash_data_specifier(specifier: &ModuleSpecifier) -> String { fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
let mut file_name_str = specifier.path().to_string(); let mut file_name_str = specifier.path().to_string();
if let Some(query) = specifier.query() { if let Some(query) = specifier.query() {
@ -56,7 +83,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
crate::util::checksum::gen(&[file_name_str.as_bytes()]) crate::util::checksum::gen(&[file_name_str.as_bytes()])
} }
fn to_deno_url(specifier: &Url) -> String { fn to_deno_uri(specifier: &Url) -> String {
let mut string = String::with_capacity(specifier.as_str().len() + 6); let mut string = String::with_capacity(specifier.as_str().len() + 6);
string.push_str("deno:/"); string.push_str("deno:/");
string.push_str(specifier.scheme()); string.push_str(specifier.scheme());
@ -93,58 +120,62 @@ fn from_deno_url(url: &Url) -> Option<Url> {
Url::parse(&string).ok() Url::parse(&string).ok()
} }
/// This exists to make it a little bit harder to accidentally use a `Url`
/// in the wrong place where a client url should be used.
#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
pub struct LspClientUrl(Url);
impl LspClientUrl {
pub fn new(url: Url) -> Self {
Self(url)
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn into_url(self) -> Url {
self.0
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
}
impl std::fmt::Display for LspClientUrl {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
struct LspUrlMapInner { struct LspUrlMapInner {
specifier_to_url: HashMap<ModuleSpecifier, LspClientUrl>, specifier_to_uri: HashMap<ModuleSpecifier, Uri>,
url_to_specifier: HashMap<Url, ModuleSpecifier>, uri_to_specifier: HashMap<Uri, ModuleSpecifier>,
} }
impl LspUrlMapInner { impl LspUrlMapInner {
fn put(&mut self, specifier: ModuleSpecifier, url: LspClientUrl) { fn put(&mut self, specifier: ModuleSpecifier, uri: Uri) {
self self.uri_to_specifier.insert(uri.clone(), specifier.clone());
.url_to_specifier self.specifier_to_uri.insert(specifier, uri);
.insert(url.as_url().clone(), specifier.clone());
self.specifier_to_url.insert(specifier, url);
} }
fn get_url(&self, specifier: &ModuleSpecifier) -> Option<&LspClientUrl> { fn get_uri(&self, specifier: &ModuleSpecifier) -> Option<&Uri> {
self.specifier_to_url.get(specifier) self.specifier_to_uri.get(specifier)
} }
fn get_specifier(&self, url: &Url) -> Option<&ModuleSpecifier> { fn get_specifier(&self, uri: &Uri) -> Option<&ModuleSpecifier> {
self.url_to_specifier.get(url) self.uri_to_specifier.get(uri)
} }
} }
pub fn uri_parse_unencoded(s: &str) -> Result<Uri, AnyError> {
url_to_uri(&Url::parse(s)?)
}
pub fn url_to_uri(url: &Url) -> Result<Uri, AnyError> {
let components = deno_core::url::quirks::internal_components(url);
let mut input = String::with_capacity(url.as_str().len());
input.push_str(&url.as_str()[..components.path_start as usize]);
input.push_str(
&percent_encoding::utf8_percent_encode(url.path(), URL_TO_URI_PATH)
.to_string(),
);
if let Some(query) = url.query() {
input.push('?');
input.push_str(
&percent_encoding::utf8_percent_encode(query, URL_TO_URI_QUERY)
.to_string(),
);
}
if let Some(fragment) = url.fragment() {
input.push('#');
input.push_str(
&percent_encoding::utf8_percent_encode(fragment, URL_TO_URI_FRAGMENT)
.to_string(),
);
}
Ok(Uri::from_str(&input).inspect_err(|err| {
lsp_warn!("Could not convert URL \"{url}\" to URI: {err}")
})?)
}
pub fn uri_to_url(uri: &Uri) -> Url {
Url::parse(uri.as_str()).unwrap()
}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum LspUrlKind { pub enum LspUrlKind {
File, File,
@ -167,24 +198,24 @@ impl LspUrlMap {
/// Normalize a specifier that is used internally within Deno (or tsc) to a /// Normalize a specifier that is used internally within Deno (or tsc) to a
/// URL that can be handled as a "virtual" document by an LSP client. /// URL that can be handled as a "virtual" document by an LSP client.
pub fn normalize_specifier( pub fn specifier_to_uri(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Result<LspClientUrl, AnyError> { ) -> Result<Uri, AnyError> {
if let Some(file_url) = if let Some(file_url) =
self.cache.vendored_specifier(specifier, file_referrer) self.cache.vendored_specifier(specifier, file_referrer)
{ {
return Ok(LspClientUrl(file_url)); return url_to_uri(&file_url);
} }
let mut inner = self.inner.lock(); let mut inner = self.inner.lock();
if let Some(url) = inner.get_url(specifier).cloned() { if let Some(uri) = inner.get_uri(specifier).cloned() {
Ok(url) Ok(uri)
} else { } else {
let url = if specifier.scheme() == "file" { let uri = if specifier.scheme() == "file" {
LspClientUrl(specifier.clone()) url_to_uri(specifier)?
} else { } else {
let specifier_str = if specifier.scheme() == "asset" { let uri_str = if specifier.scheme() == "asset" {
format!("deno:/asset{}", specifier.path()) format!("deno:/asset{}", specifier.path())
} else if specifier.scheme() == "data" { } else if specifier.scheme() == "data" {
let data_url = deno_graph::source::RawDataUrl::parse(specifier)?; let data_url = deno_graph::source::RawDataUrl::parse(specifier)?;
@ -200,13 +231,13 @@ impl LspUrlMap {
extension extension
) )
} else { } else {
to_deno_url(specifier) to_deno_uri(specifier)
}; };
let url = LspClientUrl(Url::parse(&specifier_str)?); let uri = uri_parse_unencoded(&uri_str)?;
inner.put(specifier.clone(), url.clone()); inner.put(specifier.clone(), uri.clone());
url uri
}; };
Ok(url) Ok(uri)
} }
} }
@ -218,12 +249,17 @@ impl LspUrlMap {
/// Note: Sometimes the url provided by the client may not have a trailing slash, /// Note: Sometimes the url provided by the client may not have a trailing slash,
/// so we need to force it to in the mapping and nee to explicitly state whether /// so we need to force it to in the mapping and nee to explicitly state whether
/// this is a file or directory url. /// this is a file or directory url.
pub fn normalize_url(&self, url: &Url, kind: LspUrlKind) -> ModuleSpecifier { pub fn uri_to_specifier(
if let Some(remote_url) = self.cache.unvendored_specifier(url) { &self,
uri: &Uri,
kind: LspUrlKind,
) -> ModuleSpecifier {
let url = uri_to_url(uri);
if let Some(remote_url) = self.cache.unvendored_specifier(&url) {
return remote_url; return remote_url;
} }
let mut inner = self.inner.lock(); let mut inner = self.inner.lock();
if let Some(specifier) = inner.get_specifier(url).cloned() { if let Some(specifier) = inner.get_specifier(uri).cloned() {
return specifier; return specifier;
} }
let mut specifier = None; let mut specifier = None;
@ -234,13 +270,13 @@ impl LspUrlMap {
LspUrlKind::File => Url::from_file_path(path).unwrap(), LspUrlKind::File => Url::from_file_path(path).unwrap(),
}); });
} }
} else if let Some(s) = file_like_to_file_specifier(url) { } else if let Some(s) = file_like_to_file_specifier(&url) {
specifier = Some(s); specifier = Some(s);
} else if let Some(s) = from_deno_url(url) { } else if let Some(s) = from_deno_url(&url) {
specifier = Some(s); specifier = Some(s);
} }
let specifier = specifier.unwrap_or_else(|| url.clone()); let specifier = specifier.unwrap_or_else(|| url.clone());
inner.put(specifier.clone(), LspClientUrl(url.clone())); inner.put(specifier.clone(), uri.clone());
specifier specifier
} }
} }
@ -288,15 +324,14 @@ mod tests {
fn test_lsp_url_map() { fn test_lsp_url_map() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = assert_eq!(
Url::parse("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap(); actual_uri.as_str(),
assert_eq!(actual_url.as_url(), &expected_url); "deno:/https/deno.land/x/pkg%401.0.0/mod.ts"
);
let actual_specifier = let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -304,18 +339,14 @@ mod tests {
fn test_lsp_url_reverse() { fn test_lsp_url_reverse() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = let fixture =
resolve_url("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap(); Uri::from_str("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
let actual_specifier = map.normalize_url(&fixture, LspUrlKind::File); let actual_specifier = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected_specifier = let expected_specifier =
Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
assert_eq!(&actual_specifier, &expected_specifier); assert_eq!(&actual_specifier, &expected_specifier);
let actual_url = map let actual_uri = map.specifier_to_uri(&actual_specifier, None).unwrap();
.normalize_specifier(&actual_specifier, None) assert_eq!(actual_uri, fixture);
.unwrap()
.as_url()
.clone();
assert_eq!(actual_url, fixture);
} }
#[test] #[test]
@ -323,14 +354,11 @@ mod tests {
// Test fix for #9741 - not properly encoding certain URLs // Test fix for #9741 - not properly encoding certain URLs
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap(); let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = Url::parse("deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts").unwrap(); assert_eq!(actual_uri.as_str(), "deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts");
assert_eq!(actual_url.as_url(), &expected_url); let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
let actual_specifier =
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -338,14 +366,13 @@ mod tests {
fn test_lsp_url_map_data() { fn test_lsp_url_map_data() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap(); let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap();
assert_eq!(actual_url.as_url(), &expected_url); assert_eq!(&uri_to_url(&actual_uri), &expected_url);
let actual_specifier = let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -353,15 +380,11 @@ mod tests {
fn test_lsp_url_map_host_with_port() { fn test_lsp_url_map_host_with_port() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap(); let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = assert_eq!(actual_uri.as_str(), "deno:/http/localhost%3A8000/mod.ts");
Url::parse("deno:/http/localhost%3A8000/mod.ts").unwrap(); let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
assert_eq!(actual_url.as_url(), &expected_url);
let actual_specifier =
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -369,11 +392,11 @@ mod tests {
#[test] #[test]
fn test_normalize_windows_path() { fn test_normalize_windows_path() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url( let fixture = Uri::from_str(
"file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", "file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
) )
.unwrap(); .unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected = let expected =
Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt") Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt")
.unwrap(); .unwrap();
@ -384,11 +407,11 @@ mod tests {
#[test] #[test]
fn test_normalize_percent_encoded_path() { fn test_normalize_percent_encoded_path() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url( let fixture = Uri::from_str(
"file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", "file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
) )
.unwrap(); .unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected = let expected =
Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt") Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt")
.unwrap(); .unwrap();
@ -398,9 +421,9 @@ mod tests {
#[test] #[test]
fn test_normalize_deno_status() { fn test_normalize_deno_status() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("deno:/status.md").unwrap(); let fixture = Uri::from_str("deno:/status.md").unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
assert_eq!(actual, fixture); assert_eq!(actual.as_str(), fixture.as_str());
} }
#[test] #[test]

View file

@ -15,7 +15,6 @@ mod js;
mod jsr; mod jsr;
mod lsp; mod lsp;
mod module_loader; mod module_loader;
mod napi;
mod node; mod node;
mod npm; mod npm;
mod ops; mod ops;
@ -32,13 +31,12 @@ mod worker;
use crate::args::flags_from_vec; use crate::args::flags_from_vec;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
use crate::args::DENO_FUTURE;
use crate::graph_container::ModuleGraphContainer;
use crate::util::display; use crate::util::display;
use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::get_v8_flags_from_env;
use crate::util::v8::init_v8_flags; use crate::util::v8::init_v8_flags;
use args::TaskFlags; use args::TaskFlags;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
@ -52,13 +50,20 @@ use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_terminal::colors; use deno_terminal::colors;
use factory::CliFactory; use factory::CliFactory;
use npm::ResolvePkgFolderFromDenoReqError;
use standalone::MODULE_NOT_FOUND; use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME;
use std::env; use std::env;
use std::future::Future; use std::future::Future;
use std::io::IsTerminal;
use std::ops::Deref; use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type. /// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
trait SubcommandOutput { trait SubcommandOutput {
fn output(self) -> Result<i32, AnyError>; fn output(self) -> Result<i32, AnyError>;
@ -110,9 +115,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::bench::run_benchmarks(flags, bench_flags).await tools::bench::run_benchmarks(flags, bench_flags).await
} }
}), }),
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async { DenoSubcommand::Bundle => exit_with_message("⚠️ `deno bundle` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
tools::bundle::bundle(flags, bundle_flags).await
}),
DenoSubcommand::Doc(doc_flags) => { DenoSubcommand::Doc(doc_flags) => {
spawn_subcommand(async { tools::doc::doc(flags, doc_flags).await }) spawn_subcommand(async { tools::doc::doc(flags, doc_flags).await })
} }
@ -120,22 +123,10 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::run::eval_command(flags, eval_flags).await tools::run::eval_command(flags, eval_flags).await
}), }),
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move { DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags); tools::installer::install_from_entrypoints(flags, &cache_flags.files).await
let emitter = factory.emitter()?;
let main_graph_container =
factory.main_module_graph_container().await?;
main_graph_container
.load_and_type_check_files(&cache_flags.files)
.await?;
emitter.cache_module_emits(&main_graph_container.graph()).await
}), }),
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move { DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags); tools::check::check(flags, check_flags).await
let main_graph_container =
factory.main_module_graph_container().await?;
main_graph_container
.load_and_type_check_files(&check_flags.files)
.await
}), }),
DenoSubcommand::Clean => spawn_subcommand(async move { DenoSubcommand::Clean => spawn_subcommand(async move {
tools::clean::clean() tools::clean::clean()
@ -171,9 +162,21 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::jupyter::kernel(flags, jupyter_flags).await tools::jupyter::kernel(flags, jupyter_flags).await
}), }),
DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async { DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async {
tools::installer::uninstall(uninstall_flags) tools::installer::uninstall(flags, uninstall_flags).await
}),
DenoSubcommand::Lsp => spawn_subcommand(async {
if std::io::stderr().is_terminal() {
log::warn!(
"{} command is intended to be run by text editors and IDEs and shouldn't be run manually.
Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction
how to setup your favorite text editor.
Press Ctrl+C to exit.
", colors::cyan("deno lsp"));
}
lsp::start().await
}), }),
DenoSubcommand::Lsp => spawn_subcommand(async { lsp::start().await }),
DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async { DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async {
if lint_flags.rules { if lint_flags.rules {
tools::lint::print_rules_list( tools::lint::print_rules_list(
@ -196,7 +199,23 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
match result { match result {
Ok(v) => Ok(v), Ok(v) => Ok(v),
Err(script_err) => { Err(script_err) => {
if script_err.to_string().starts_with(MODULE_NOT_FOUND) { if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = script_err.downcast_ref::<ResolvePkgFolderFromDenoReqError>() {
if flags.node_modules_dir.is_none() {
let mut flags = flags.deref().clone();
let watch = match &flags.subcommand {
DenoSubcommand::Run(run_flags) => run_flags.watch.clone(),
_ => unreachable!(),
};
flags.node_modules_dir = Some(deno_config::deno_json::NodeModulesDirMode::None);
// use the current lockfile, but don't write it out
if flags.frozen_lockfile.is_none() {
flags.internal.lockfile_skip_write = true;
}
return tools::run::run_script(WorkerExecutionMode::Run, Arc::new(flags), watch).await;
}
}
let script_err_msg = script_err.to_string();
if script_err_msg.starts_with(MODULE_NOT_FOUND) || script_err_msg.starts_with(UNSUPPORTED_SCHEME) {
if run_flags.bare { if run_flags.bare {
let mut cmd = args::clap_root(); let mut cmd = args::clap_root();
cmd.build(); cmd.build();
@ -218,9 +237,10 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
let task_flags = TaskFlags { let task_flags = TaskFlags {
cwd: None, cwd: None,
task: Some(run_flags.script.clone()), task: Some(run_flags.script.clone()),
is_run: true,
}; };
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone()); new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone(), true).await; let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await;
match result { match result {
Ok(v) => Ok(v), Ok(v) => Ok(v),
Err(_) => { Err(_) => {
@ -240,7 +260,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::serve::serve(flags, serve_flags).await tools::serve::serve(flags, serve_flags).await
}), }),
DenoSubcommand::Task(task_flags) => spawn_subcommand(async { DenoSubcommand::Task(task_flags) => spawn_subcommand(async {
tools::task::execute_script(flags, task_flags, false).await tools::task::execute_script(flags, task_flags).await
}), }),
DenoSubcommand::Test(test_flags) => { DenoSubcommand::Test(test_flags) => {
spawn_subcommand(async { spawn_subcommand(async {
@ -283,14 +303,26 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
"This deno was built without the \"upgrade\" feature. Please upgrade using the installation method originally used to install Deno.", "This deno was built without the \"upgrade\" feature. Please upgrade using the installation method originally used to install Deno.",
1, 1,
), ),
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async { DenoSubcommand::Vendor => exit_with_message("⚠️ `deno vendor` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
tools::vendor::vendor(flags, vendor_flags).await
}),
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async { DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
tools::registry::publish(flags, publish_flags).await tools::registry::publish(flags, publish_flags).await
}), }),
DenoSubcommand::Help(help_flags) => spawn_subcommand(async move { DenoSubcommand::Help(help_flags) => spawn_subcommand(async move {
display::write_to_stdout_ignore_sigpipe(help_flags.help.ansi().to_string().as_bytes()) use std::io::Write;
let mut stream = anstream::AutoStream::new(std::io::stdout(), if colors::use_color() {
anstream::ColorChoice::Auto
} else {
anstream::ColorChoice::Never
});
match stream.write_all(help_flags.help.ansi().to_string().as_bytes()) {
Ok(()) => Ok(()),
Err(e) => match e.kind() {
std::io::ErrorKind::BrokenPipe => Ok(()),
_ => Err(e),
},
}
}), }),
}; };
@ -357,19 +389,10 @@ pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
std::process::exit(70); std::process::exit(70);
} }
// TODO(bartlomieju): remove when `--unstable` flag is removed.
#[allow(clippy::print_stderr)]
pub(crate) fn unstable_warn_cb(feature: &str, api_name: &str) {
eprintln!(
"⚠️ {}",
colors::yellow(format!(
"The `{}` API was used with `--unstable` flag. The `--unstable` flag is deprecated and will be removed in Deno 2.0. Use granular `--unstable-{}` instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags",
api_name, feature
))
);
}
pub fn main() { pub fn main() {
#[cfg(feature = "dhat-heap")]
let profiler = dhat::Profiler::new_heap();
setup_panic_hook(); setup_panic_hook();
util::unix::raise_fd_limit(); util::unix::raise_fd_limit();
@ -390,7 +413,12 @@ pub fn main() {
run_subcommand(Arc::new(flags)).await run_subcommand(Arc::new(flags)).await
}; };
match create_and_run_current_thread_with_maybe_metrics(future) { let result = create_and_run_current_thread_with_maybe_metrics(future);
#[cfg(feature = "dhat-heap")]
drop(profiler);
match result {
Ok(exit_code) => std::process::exit(exit_code), Ok(exit_code) => std::process::exit(exit_code),
Err(err) => exit_for_error(err), Err(err) => exit_for_error(err),
} }
@ -411,25 +439,14 @@ fn resolve_flags_and_init(
Err(err) => exit_for_error(AnyError::from(err)), Err(err) => exit_for_error(AnyError::from(err)),
}; };
// TODO(bartlomieju): remove when `--unstable` flag is removed. // TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled { if flags.unstable_config.legacy_flag_enabled {
#[allow(clippy::print_stderr)] log::warn!(
if matches!(flags.subcommand, DenoSubcommand::Check(_)) {
// can't use log crate because that's not setup yet
eprintln!(
"⚠️ {}", "⚠️ {}",
colors::yellow( colors::yellow(
"The `--unstable` flag is not needed for `deno check` anymore." "The `--unstable` flag has been removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags"
) )
); );
} else {
eprintln!(
"⚠️ {}",
colors::yellow(
"The `--unstable` flag is deprecated and will be removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags"
)
);
}
} }
let default_v8_flags = match flags.subcommand { let default_v8_flags = match flags.subcommand {
@ -437,30 +454,19 @@ fn resolve_flags_and_init(
// https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214 // https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214
DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()], DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()],
_ => { _ => {
if *DENO_FUTURE {
// TODO(bartlomieju): I think this can be removed as it's handled by `deno_core` // TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
// and its settings. // and its settings.
// deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript // deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast // TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
vec!["--no-harmony-import-assertions".to_string()] vec!["--no-harmony-import-assertions".to_string()]
} else {
vec![
// TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
// and its settings.
// If we're still in v1.X version we want to support import assertions.
// V8 12.6 unshipped the support by default, so force it by passing a
// flag.
"--harmony-import-assertions".to_string(),
// Verify with DENO_FUTURE for now.
"--no-maglev".to_string(),
]
}
} }
}; };
init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env()); init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env());
// TODO(bartlomieju): remove last argument in Deno 2. // TODO(bartlomieju): remove last argument once Deploy no longer needs it
deno_core::JsRuntime::init_platform(None, !*DENO_FUTURE); deno_core::JsRuntime::init_platform(
None, /* import assertions enabled */ false,
);
util::logger::init(flags.log_level); util::logger::init(flags.log_level);
Ok(flags) Ok(flags)

View file

@ -88,11 +88,10 @@ fn main() {
let standalone = standalone::extract_standalone(Cow::Owned(args)); let standalone = standalone::extract_standalone(Cow::Owned(args));
let future = async move { let future = async move {
match standalone { match standalone {
Ok(Some(future)) => { Ok(Some(data)) => {
let (metadata, eszip) = future.await?; util::logger::init(data.metadata.log_level);
util::logger::init(metadata.log_level); load_env_vars(&data.metadata.env_vars_from_env_file);
load_env_vars(&metadata.env_vars_from_env_file); let exit_code = standalone::run(data).await?;
let exit_code = standalone::run(eszip, metadata).await?;
std::process::exit(exit_code); std::process::exit(exit_code);
} }
Ok(None) => Ok(()), Ok(None) => Ok(()),

View file

@ -2,7 +2,6 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
@ -18,13 +17,13 @@ use crate::cache::CodeCache;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::emit::Emitter; use crate::emit::Emitter;
use crate::factory::CliFactory;
use crate::graph_container::MainModuleGraphContainer; use crate::graph_container::MainModuleGraphContainer;
use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions; use crate::graph_util::CreateGraphOptions;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::node; use crate::node;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::ModuleCodeStringSource; use crate::resolver::ModuleCodeStringSource;
@ -45,7 +44,6 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::future::FutureExt; use deno_core::futures::future::FutureExt;
use deno_core::futures::Future; use deno_core::futures::Future;
use deno_core::parking_lot::Mutex;
use deno_core::resolve_url; use deno_core::resolve_url;
use deno_core::ModuleCodeString; use deno_core::ModuleCodeString;
use deno_core::ModuleLoader; use deno_core::ModuleLoader;
@ -70,54 +68,6 @@ use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?;
let cli_options = factory.cli_options()?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
if !npm_resolver.ensure_top_level_package_json_install().await? {
if let Some(lockfile) = cli_options.maybe_lockfile() {
lockfile.error_if_changed()?;
}
npm_resolver.cache_packages().await?;
}
}
// cache as many entries in the import map as we can
let resolver = factory.workspace_resolver().await?;
if let Some(import_map) = resolver.maybe_import_map() {
let roots = import_map
.imports()
.entries()
.filter_map(|entry| {
if entry.key.ends_with('/') {
None
} else {
entry.value.cloned()
}
})
.collect::<Vec<_>>();
let mut graph_permit = factory
.main_module_graph_container()
.await?
.acquire_update_permit()
.await;
let graph = graph_permit.graph_mut();
factory
.module_load_preparer()
.await?
.prepare_module_load(
graph,
&roots,
false,
factory.cli_options()?.ts_type_lib_window(),
deno_runtime::deno_permissions::PermissionsContainer::allow_all(),
)
.await?;
}
Ok(())
}
pub struct ModuleLoadPreparer { pub struct ModuleLoadPreparer {
options: Arc<CliOptions>, options: Arc<CliOptions>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
@ -156,11 +106,32 @@ impl ModuleLoadPreparer {
is_dynamic: bool, is_dynamic: bool,
lib: TsTypeLib, lib: TsTypeLib,
permissions: PermissionsContainer, permissions: PermissionsContainer,
ext_overwrite: Option<&String>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
log::debug!("Preparing module load."); log::debug!("Preparing module load.");
let _pb_clear_guard = self.progress_bar.clear_guard(); let _pb_clear_guard = self.progress_bar.clear_guard();
let mut cache = self.module_graph_builder.create_fetch_cacher(permissions); let mut cache = self.module_graph_builder.create_fetch_cacher(permissions);
if let Some(ext) = ext_overwrite {
let maybe_content_type = match ext.as_str() {
"ts" => Some("text/typescript"),
"tsx" => Some("text/tsx"),
"js" => Some("text/javascript"),
"jsx" => Some("text/jsx"),
_ => None,
};
if let Some(content_type) = maybe_content_type {
for root in roots {
cache.file_header_overrides.insert(
root.clone(),
std::collections::HashMap::from([(
"content-type".to_string(),
content_type.to_string(),
)]),
);
}
}
}
log::debug!("Building module graph."); log::debug!("Building module graph.");
let has_type_checked = !graph.roots.is_empty(); let has_type_checked = !graph.roots.is_empty();
@ -233,6 +204,7 @@ struct SharedCliModuleLoaderState {
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliGraphResolver>,
@ -251,6 +223,7 @@ impl CliModuleLoaderFactory {
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliGraphResolver>,
@ -271,6 +244,7 @@ impl CliModuleLoaderFactory {
main_module_graph_container, main_module_graph_container,
module_load_preparer, module_load_preparer,
node_resolver, node_resolver,
npm_resolver,
npm_module_loader, npm_module_loader,
parsed_source_cache, parsed_source_cache,
resolver, resolver,
@ -282,18 +256,19 @@ impl CliModuleLoaderFactory {
&self, &self,
graph_container: TGraphContainer, graph_container: TGraphContainer,
lib: TsTypeLib, lib: TsTypeLib,
root_permissions: PermissionsContainer, is_worker: bool,
dynamic_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> ModuleLoaderAndSourceMapGetter {
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
lib, lib,
root_permissions, is_worker,
dynamic_permissions, parent_permissions,
permissions,
graph_container, graph_container,
emitter: self.shared.emitter.clone(), emitter: self.shared.emitter.clone(),
parsed_source_cache: self.shared.parsed_source_cache.clone(), parsed_source_cache: self.shared.parsed_source_cache.clone(),
shared: self.shared.clone(), shared: self.shared.clone(),
prevent_v8_code_cache: Default::default(),
}))); })));
ModuleLoaderAndSourceMapGetter { ModuleLoaderAndSourceMapGetter {
module_loader: loader, module_loader: loader,
@ -305,20 +280,20 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
fn create_for_main( fn create_for_main(
&self, &self,
root_permissions: PermissionsContainer, root_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> ModuleLoaderAndSourceMapGetter {
self.create_with_lib( self.create_with_lib(
(*self.shared.main_module_graph_container).clone(), (*self.shared.main_module_graph_container).clone(),
self.shared.lib_window, self.shared.lib_window,
/* is worker */ false,
root_permissions.clone(),
root_permissions, root_permissions,
dynamic_permissions,
) )
} }
fn create_for_worker( fn create_for_worker(
&self, &self,
root_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
dynamic_permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> ModuleLoaderAndSourceMapGetter {
self.create_with_lib( self.create_with_lib(
// create a fresh module graph for the worker // create a fresh module graph for the worker
@ -326,29 +301,25 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
self.shared.graph_kind, self.shared.graph_kind,
))), ))),
self.shared.lib_worker, self.shared.lib_worker,
root_permissions, /* is worker */ true,
dynamic_permissions, parent_permissions,
permissions,
) )
} }
} }
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> { struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
lib: TsTypeLib, lib: TsTypeLib,
is_worker: bool,
/// The initial set of permissions used to resolve the static imports in the /// The initial set of permissions used to resolve the static imports in the
/// worker. These are "allow all" for main worker, and parent thread /// worker. These are "allow all" for main worker, and parent thread
/// permissions for Web Worker. /// permissions for Web Worker.
root_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
/// Permissions used to resolve dynamic imports, these get passed as permissions: PermissionsContainer,
/// "root permissions" for Web Worker.
dynamic_permissions: PermissionsContainer,
shared: Arc<SharedCliModuleLoaderState>, shared: Arc<SharedCliModuleLoaderState>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
graph_container: TGraphContainer, graph_container: TGraphContainer,
// NOTE(bartlomieju): this is temporary, for deprecated import assertions.
// Should be removed in Deno 2.
// Modules stored here should not be V8 code-cached.
prevent_v8_code_cache: Arc<Mutex<HashSet<String>>>,
} }
impl<TGraphContainer: ModuleGraphContainer> impl<TGraphContainer: ModuleGraphContainer>
@ -360,15 +331,23 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>, maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType, requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, AnyError> { ) -> Result<ModuleSource, AnyError> {
let code_source = if let Some(result) = self let code_source = match self.load_prepared_module(specifier).await? {
Some(code_source) => code_source,
None => {
if self.shared.npm_module_loader.if_in_npm_package(specifier) {
self
.shared .shared
.npm_module_loader .npm_module_loader
.load_if_in_npm_package(specifier, maybe_referrer) .load(specifier, maybe_referrer)
.await .await?
{
result?
} else { } else {
self.load_prepared_module(specifier, maybe_referrer).await? let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
return Err(anyhow!(msg));
}
}
}; };
let code = if self.shared.is_inspecting { let code = if self.shared.is_inspecting {
// we need the code with the source map in order for // we need the code with the source map in order for
@ -450,7 +429,7 @@ impl<TGraphContainer: ModuleGraphContainer>
fn inner_resolve( fn inner_resolve(
&self, &self,
specifier: &str, raw_specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
if self.shared.node_resolver.in_npm_package(referrer) { if self.shared.node_resolver.in_npm_package(referrer) {
@ -458,7 +437,7 @@ impl<TGraphContainer: ModuleGraphContainer>
self self
.shared .shared
.node_resolver .node_resolver
.resolve(specifier, referrer, NodeResolutionMode::Execution)? .resolve(raw_specifier, referrer, NodeResolutionMode::Execution)?
.into_url(), .into_url(),
); );
} }
@ -467,7 +446,7 @@ impl<TGraphContainer: ModuleGraphContainer>
let resolution = match graph.get(referrer) { let resolution = match graph.get(referrer) {
Some(Module::Js(module)) => module Some(Module::Js(module)) => module
.dependencies .dependencies
.get(specifier) .get(raw_specifier)
.map(|d| &d.maybe_code) .map(|d| &d.maybe_code)
.unwrap_or(&Resolution::None), .unwrap_or(&Resolution::None),
_ => &Resolution::None, _ => &Resolution::None,
@ -482,7 +461,7 @@ impl<TGraphContainer: ModuleGraphContainer>
)); ));
} }
Resolution::None => Cow::Owned(self.shared.resolver.resolve( Resolution::None => Cow::Owned(self.shared.resolver.resolve(
specifier, raw_specifier,
&deno_graph::Range { &deno_graph::Range {
specifier: referrer.clone(), specifier: referrer.clone(),
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
@ -511,7 +490,6 @@ impl<TGraphContainer: ModuleGraphContainer>
Some(Module::Npm(module)) => { Some(Module::Npm(module)) => {
let package_folder = self let package_folder = self
.shared .shared
.node_resolver
.npm_resolver .npm_resolver
.as_managed() .as_managed()
.unwrap() // byonm won't create a Module::Npm .unwrap() // byonm won't create a Module::Npm
@ -544,17 +522,12 @@ impl<TGraphContainer: ModuleGraphContainer>
async fn load_prepared_module( async fn load_prepared_module(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>, ) -> Result<Option<ModuleCodeStringSource>, AnyError> {
) -> Result<ModuleCodeStringSource, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let graph = self.graph_container.graph(); let graph = self.graph_container.graph();
match self.load_prepared_module_or_defer_emit( match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
&graph, Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)),
specifier, Some(CodeOrDeferredEmit::DeferredEmit {
maybe_referrer,
) {
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
Ok(CodeOrDeferredEmit::DeferredEmit {
specifier, specifier,
media_type, media_type,
source, source,
@ -567,30 +540,26 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
}) }))
} }
Err(err) => Err(err), None => Ok(None),
} }
} }
fn load_prepared_module_sync( fn load_prepared_module_sync(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>, ) -> Result<Option<ModuleCodeStringSource>, AnyError> {
) -> Result<ModuleCodeStringSource, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let graph = self.graph_container.graph(); let graph = self.graph_container.graph();
match self.load_prepared_module_or_defer_emit( match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
&graph, Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)),
specifier, Some(CodeOrDeferredEmit::DeferredEmit {
maybe_referrer,
) {
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
Ok(CodeOrDeferredEmit::DeferredEmit {
specifier, specifier,
media_type, media_type,
source, source,
@ -602,13 +571,14 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
}) }))
} }
Err(err) => Err(err), None => Ok(None),
} }
} }
@ -616,8 +586,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&self, &self,
graph: &'graph ModuleGraph, graph: &'graph ModuleGraph,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>, ) -> Result<Option<CodeOrDeferredEmit<'graph>>, AnyError> {
) -> Result<CodeOrDeferredEmit<'graph>, AnyError> {
if specifier.scheme() == "node" { if specifier.scheme() == "node" {
// Node built-in modules should be handled internally. // Node built-in modules should be handled internally.
unreachable!("Deno bug. {} was misconfigured internally.", specifier); unreachable!("Deno bug. {} was misconfigured internally.", specifier);
@ -629,11 +598,11 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type, media_type,
specifier, specifier,
.. ..
})) => Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { })) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
code: ModuleSourceCode::String(source.clone().into()), code: ModuleSourceCode::String(source.clone().into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type: *media_type, media_type: *media_type,
})), }))),
Some(deno_graph::Module::Js(JsModule { Some(deno_graph::Module::Js(JsModule {
source, source,
media_type, media_type,
@ -654,11 +623,11 @@ impl<TGraphContainer: ModuleGraphContainer>
| MediaType::Cts | MediaType::Cts
| MediaType::Jsx | MediaType::Jsx
| MediaType::Tsx => { | MediaType::Tsx => {
return Ok(CodeOrDeferredEmit::DeferredEmit { return Ok(Some(CodeOrDeferredEmit::DeferredEmit {
specifier, specifier,
media_type: *media_type, media_type: *media_type,
source, source,
}); }));
} }
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {media_type} for {specifier}") panic!("Unexpected media type {media_type} for {specifier}")
@ -668,24 +637,18 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
code: ModuleSourceCode::String(code), code: ModuleSourceCode::String(code),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type: *media_type, media_type: *media_type,
})) })))
} }
Some( Some(
deno_graph::Module::External(_) deno_graph::Module::External(_)
| deno_graph::Module::Node(_) | deno_graph::Module::Node(_)
| deno_graph::Module::Npm(_), | deno_graph::Module::Npm(_),
) )
| None => { | None => Ok(None),
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
Err(anyhow!(msg))
}
} }
} }
} }
@ -804,11 +767,12 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
} }
} }
let root_permissions = if is_dynamic { let permissions = if is_dynamic {
inner.dynamic_permissions.clone() inner.permissions.clone()
} else { } else {
inner.root_permissions.clone() inner.parent_permissions.clone()
}; };
let is_dynamic = is_dynamic || inner.is_worker; // consider workers as dynamic for permissions
let lib = inner.lib; let lib = inner.lib;
let mut update_permit = graph_container.acquire_update_permit().await; let mut update_permit = graph_container.acquire_update_permit().await;
let graph = update_permit.graph_mut(); let graph = update_permit.graph_mut();
@ -818,7 +782,8 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
&[specifier], &[specifier],
is_dynamic, is_dynamic,
lib, lib,
root_permissions, permissions,
None,
) )
.await?; .await?;
update_permit.commit(); update_permit.commit();
@ -834,14 +799,6 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
code_cache: &[u8], code_cache: &[u8],
) -> Pin<Box<dyn Future<Output = ()>>> { ) -> Pin<Box<dyn Future<Output = ()>>> {
if let Some(cache) = self.0.shared.code_cache.as_ref() { if let Some(cache) = self.0.shared.code_cache.as_ref() {
if self
.0
.prevent_v8_code_cache
.lock()
.contains(specifier.as_str())
{
return std::future::ready(()).boxed_local();
}
// This log line is also used by tests. // This log line is also used by tests.
log::debug!( log::debug!(
"Updating V8 code cache for ES module: {specifier}, [{source_hash:?}]" "Updating V8 code cache for ES module: {specifier}, [{source_hash:?}]"
@ -856,19 +813,6 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
std::future::ready(()).boxed_local() std::future::ready(()).boxed_local()
} }
fn purge_and_prevent_code_cache(&self, specifier: &str) {
if let Some(cache) = self.0.shared.code_cache.as_ref() {
// This log line is also used by tests.
log::debug!("Remove V8 code cache for ES module: {specifier}");
cache.remove_code_cache(specifier);
self
.0
.prevent_v8_code_cache
.lock()
.insert(specifier.to_string());
}
}
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> { fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
let specifier = resolve_url(file_name).ok()?; let specifier = resolve_url(file_name).ok()?;
match specifier.scheme() { match specifier.scheme() {
@ -877,7 +821,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
"wasm" | "file" | "http" | "https" | "data" | "blob" => (), "wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None, _ => return None,
} }
let source = self.0.load_prepared_module_sync(&specifier, None).ok()?; let source = self.0.load_prepared_module_sync(&specifier).ok()??;
source_map_from_code(source.code.as_bytes()) source_map_from_code(source.code.as_bytes())
} }

View file

@ -1,114 +0,0 @@
# napi
This directory contains source for Deno's Node-API implementation. It depends on
`napi_sym` and `deno_napi`.
Files are generally organized the same as in Node.js's implementation to ease in
ensuring compatibility.
## Adding a new function
Add the symbol name to
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
```diff
{
"symbols": [
...
"napi_get_undefined",
- "napi_get_null"
+ "napi_get_null",
+ "napi_get_boolean"
]
}
```
Determine where to place the implementation. `napi_get_boolean` is related to JS
values so we will place it in `js_native_api.rs`. If something is not clear,
just create a new file module.
See [`napi_sym`](../napi_sym/) for writing the implementation:
```rust
#[napi_sym::napi_sym]
pub fn napi_get_boolean(
env: *mut Env,
value: bool,
result: *mut napi_value,
) -> Result {
// ...
Ok(())
}
```
Update the generated symbol lists using the script:
```
deno run --allow-write tools/napi/generate_symbols_lists.js
```
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
test suite for Node-API.
```js
// tests/napi/boolean_test.js
import { assertEquals, loadTestLibrary } from "./common.js";
const lib = loadTestLibrary();
Deno.test("napi get boolean", function () {
assertEquals(lib.test_get_boolean(true), true);
assertEquals(lib.test_get_boolean(false), false);
});
```
```rust
// tests/napi/src/boolean.rs
use napi_sys::Status::napi_ok;
use napi_sys::ValueType::napi_boolean;
use napi_sys::*;
extern "C" fn test_boolean(
env: napi_env,
info: napi_callback_info,
) -> napi_value {
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
assert_eq!(argc, 1);
let mut ty = -1;
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
assert_eq!(ty, napi_boolean);
// Use napi_get_boolean here...
value
}
pub fn init(env: napi_env, exports: napi_value) {
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
unsafe {
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
};
}
```
```diff
// tests/napi/src/lib.rs
+ mod boolean;
...
#[no_mangle]
unsafe extern "C" fn napi_register_module_v1(
env: napi_env,
exports: napi_value,
) -> napi_value {
...
+ boolean::init(env, exports);
exports
}
```
Run the test using `cargo test -p tests/napi`.

View file

@ -1,20 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
#![allow(unused_mut)]
#![allow(non_camel_case_types)]
#![allow(clippy::undocumented_unsafe_blocks)]
//! Symbols to be exported are now defined in this JSON file.
//! The `#[napi_sym]` macro checks for missing entries and panics.
//!
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
//! which is also checked into git.
//!
//! To add a new napi function:
//! 1. Place `#[napi_sym]` on top of your implementation.
//! 2. Add the function's identifier to this JSON list.
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`.
pub mod js_native_api;
pub mod node_api;
pub mod util;

View file

@ -5,6 +5,7 @@ use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_graph::ParsedSourceStore;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
@ -16,6 +17,8 @@ use serde::Serialize;
use crate::cache::CacheDBHash; use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::resolver::CliNodeResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
pub type CliNodeCodeTranslator = pub type CliNodeCodeTranslator =
@ -54,11 +57,23 @@ pub enum CliCjsAnalysis {
pub struct CliCjsCodeAnalyzer { pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache, cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc, fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
} }
impl CliCjsCodeAnalyzer { impl CliCjsCodeAnalyzer {
pub fn new(cache: NodeAnalysisCache, fs: deno_fs::FileSystemRc) -> Self { pub fn new(
Self { cache, fs } cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
) -> Self {
Self {
cache,
fs,
node_resolver,
parsed_source_cache,
}
} }
async fn inner_cjs_analysis( async fn inner_cjs_analysis(
@ -73,7 +88,7 @@ impl CliCjsCodeAnalyzer {
return Ok(analysis); return Ok(analysis);
} }
let media_type = MediaType::from_specifier(specifier); let mut media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json { if media_type == MediaType::Json {
return Ok(CliCjsAnalysis::Cjs { return Ok(CliCjsAnalysis::Cjs {
exports: vec![], exports: vec![],
@ -81,17 +96,41 @@ impl CliCjsCodeAnalyzer {
}); });
} }
if media_type == MediaType::JavaScript {
if let Some(package_json) =
self.node_resolver.get_closest_package_json(specifier)?
{
match package_json.typ.as_str() {
"commonjs" => {
media_type = MediaType::Cjs;
}
"module" => {
media_type = MediaType::Mjs;
}
_ => {}
}
}
}
let maybe_parsed_source = self
.parsed_source_cache
.as_ref()
.and_then(|c| c.remove_parsed_source(specifier));
let analysis = deno_core::unsync::spawn_blocking({ let analysis = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source: Arc<str> = source.into(); let source: Arc<str> = source.into();
move || -> Result<_, deno_ast::ParseDiagnostic> { move || -> Result<_, deno_ast::ParseDiagnostic> {
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { let parsed_source =
maybe_parsed_source.map(Ok).unwrap_or_else(|| {
deno_ast::parse_program(deno_ast::ParseParams {
specifier, specifier,
text: source, text: source,
media_type, media_type,
capture_tokens: true, capture_tokens: true,
scope_analysis: false, scope_analysis: false,
maybe_syntax: None, maybe_syntax: None,
})
})?; })?;
if parsed_source.is_script() { if parsed_source.is_script() {
let analysis = parsed_source.analyze_cjs(); let analysis = parsed_source.analyze_cjs();
@ -99,6 +138,13 @@ impl CliCjsCodeAnalyzer {
exports: analysis.exports, exports: analysis.exports,
reexports: analysis.reexports, reexports: analysis.reexports,
}) })
} else if media_type == MediaType::Cjs {
// FIXME: `deno_ast` should internally handle MediaType::Cjs implying that
// the result must never be Esm
Ok(CliCjsAnalysis::Cjs {
exports: vec![],
reexports: vec![],
})
} else { } else {
Ok(CliCjsAnalysis::Esm) Ok(CliCjsAnalysis::Esm)
} }
@ -125,10 +171,23 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
let source = match source { let source = match source {
Some(source) => source, Some(source) => source,
None => { None => {
self if let Ok(path) = specifier.to_file_path() {
.fs if let Ok(source_from_file) =
.read_text_file_lossy_async(specifier.to_file_path().unwrap(), None) self.fs.read_text_file_lossy_async(path, None).await
.await? {
source_from_file
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} }
}; };
let analysis = self.inner_cjs_analysis(specifier, &source).await?; let analysis = self.inner_cjs_analysis(specifier, &source).await?;

View file

@ -5,317 +5,99 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_package_json::PackageJsonDepValue; use deno_core::url::Url;
use deno_runtime::deno_fs::FileSystem; use deno_resolver::npm::ByonmNpmResolver;
use deno_runtime::deno_node::DenoPkgJsonFsAdapter; use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_runtime::deno_node::PackageJson;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageJsonLoadError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::load_pkg_json;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use crate::args::NpmProcessState; use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::resolver::CliDenoResolverFs;
use deno_runtime::fs_util::specifier_to_file_path;
use super::CliNpmResolver; use super::CliNpmResolver;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError;
pub struct CliNpmResolverByonmCreateOptions { pub type CliByonmNpmResolverCreateOptions =
pub fs: Arc<dyn FileSystem>, ByonmNpmResolverCreateOptions<CliDenoResolverFs>;
// todo(dsherret): investigate removing this pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>;
pub root_node_modules_dir: Option<PathBuf>,
}
pub fn create_byonm_npm_resolver(
options: CliNpmResolverByonmCreateOptions,
) -> Arc<dyn CliNpmResolver> {
Arc::new(ByonmCliNpmResolver {
fs: options.fs,
root_node_modules_dir: options.root_node_modules_dir,
})
}
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
#[derive(Debug)] #[derive(Debug)]
pub struct ByonmCliNpmResolver { struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
fs: Arc<dyn FileSystem>,
root_node_modules_dir: Option<PathBuf>,
}
impl ByonmCliNpmResolver { impl NodeRequireResolver for CliByonmWrapper {
fn load_pkg_json( fn ensure_read_permission<'a>(
&self,
path: &Path,
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
}
}
impl ByonmCliNpmResolver {
/// Finds the ancestor package.json that contains the specified dependency.
pub fn find_ancestor_package_json_with_dep(
&self,
dep_name: &str,
referrer: &ModuleSpecifier,
) -> Option<Arc<PackageJson>> {
let referrer_path = referrer.to_file_path().ok()?;
let mut current_folder = referrer_path.parent()?;
loop {
let pkg_json_path = current_folder.join("package.json");
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
if let Some(deps) = &pkg_json.dependencies {
if deps.contains_key(dep_name) {
return Some(pkg_json);
}
}
if let Some(deps) = &pkg_json.dev_dependencies {
if deps.contains_key(dep_name) {
return Some(pkg_json);
}
}
}
if let Some(parent) = current_folder.parent() {
current_folder = parent;
} else {
return None;
}
}
}
fn resolve_pkg_json_and_alias_for_req(
&self,
req: &PackageReq,
referrer: &ModuleSpecifier,
) -> Result<(Arc<PackageJson>, String), AnyError> {
fn resolve_alias_from_pkg_json(
req: &PackageReq,
pkg_json: &PackageJson,
) -> Option<String> {
let deps = pkg_json.resolve_local_package_json_deps();
for (key, value) in deps {
if let Ok(value) = value {
match value {
PackageJsonDepValue::Req(dep_req) => {
if dep_req.name == req.name
&& dep_req.version_req.intersects(&req.version_req)
{
return Some(key);
}
}
PackageJsonDepValue::Workspace(_workspace) => {
if key == req.name && req.version_req.tag() == Some("workspace") {
return Some(key);
}
}
}
}
}
None
}
// attempt to resolve the npm specifier from the referrer's package.json,
if let Ok(file_path) = specifier_to_file_path(referrer) {
let mut current_path = file_path.as_path();
while let Some(dir_path) = current_path.parent() {
let package_json_path = dir_path.join("package.json");
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
if let Some(alias) =
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
return Ok((pkg_json, alias));
}
}
current_path = dir_path;
}
}
// otherwise, fall fallback to the project's package.json
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
let root_pkg_json_path =
root_node_modules_dir.parent().unwrap().join("package.json");
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
return Ok((pkg_json, alias));
}
}
}
bail!(
concat!(
"Could not find a matching package for 'npm:{}' in a package.json file. ",
"You must specify this as a package.json dependency when the ",
"node_modules folder is not managed by Deno.",
),
req,
);
}
}
impl NpmResolver for ByonmCliNpmResolver {
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
fn inner(
fs: &dyn FileSystem,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_referrer_file = specifier_to_file_path(referrer).ok();
let maybe_start_folder =
maybe_referrer_file.as_ref().and_then(|f| f.parent());
if let Some(start_folder) = maybe_start_folder {
for current_folder in start_folder.ancestors() {
let node_modules_folder = if current_folder.ends_with("node_modules")
{
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name(&node_modules_folder, name);
if fs.is_dir_sync(&sub_dir) {
return Ok(sub_dir);
}
}
}
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
)
}
let path = inner(&*self.fs, name, referrer)?;
self.fs.realpath_sync(&path).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err.into_io_error(),
}
.into()
})
}
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
specifier.scheme() == "file"
&& specifier
.path()
.to_ascii_lowercase()
.contains("/node_modules/")
}
}
impl NodeRequireResolver for ByonmCliNpmResolver {
fn ensure_read_permission(
&self, &self,
permissions: &mut dyn NodePermissions, permissions: &mut dyn NodePermissions,
path: &Path, path: &'a Path,
) -> Result<(), AnyError> { ) -> Result<Cow<'a, Path>, AnyError> {
if !path if !path
.components() .components()
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
{ {
permissions.check_read(path)?; permissions.check_read_path(path)
} else {
Ok(Cow::Borrowed(path))
} }
Ok(())
} }
} }
impl NpmProcessStateProvider for ByonmCliNpmResolver { impl NpmProcessStateProvider for CliByonmWrapper {
fn get_npm_process_state(&self) -> String { fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState { serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm, kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self local_node_modules_path: self
.root_node_modules_dir .0
.as_ref() .root_node_modules_dir()
.map(|p| p.to_string_lossy().to_string()), .map(|p| p.to_string_lossy().to_string()),
}) })
.unwrap() .unwrap()
} }
} }
impl CliNpmResolver for ByonmCliNpmResolver { impl CliNpmResolver for CliByonmNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> { fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
self self
} }
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> { fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
self Arc::new(CliByonmWrapper(self))
} }
fn into_process_state_provider( fn into_process_state_provider(
self: Arc<Self>, self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> { ) -> Arc<dyn NpmProcessStateProvider> {
self Arc::new(CliByonmWrapper(self))
} }
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> { fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
Arc::new(Self { Arc::new(self.clone())
fs: self.fs.clone(),
root_node_modules_dir: self.root_node_modules_dir.clone(),
})
} }
fn as_inner(&self) -> InnerCliNpmResolverRef { fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Byonm(self) InnerCliNpmResolverRef::Byonm(self)
} }
fn root_node_modules_path(&self) -> Option<&PathBuf> { fn root_node_modules_path(&self) -> Option<&Path> {
self.root_node_modules_dir.as_ref() self.root_node_modules_dir()
} }
fn resolve_pkg_folder_from_deno_module_req( fn resolve_pkg_folder_from_deno_module_req(
&self, &self,
req: &PackageReq, req: &PackageReq,
referrer: &ModuleSpecifier, referrer: &Url,
) -> Result<PathBuf, AnyError> { ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
// resolve the pkg json and alias ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req(
let (pkg_json, alias) = self, req, referrer,
self.resolve_pkg_json_and_alias_for_req(req, referrer)?; )
// now try node resolution .map_err(ResolvePkgFolderFromDenoReqError::Byonm)
for ancestor in pkg_json.path.parent().unwrap().ancestors() {
let node_modules_folder = ancestor.join("node_modules");
let sub_dir = join_package_name(&node_modules_folder, &alias);
if self.fs.is_dir_sync(&sub_dir) {
return Ok(canonicalize_path_maybe_not_exists_with_fs(
&sub_dir,
self.fs.as_ref(),
)?);
}
}
bail!(
concat!(
"Could not find \"{}\" in a node_modules folder. ",
"Deno expects the node_modules/ directory to be up to date. ",
"Did you forget to run `{}`?"
),
alias,
if *crate::args::DENO_FUTURE {
"deno install"
} else {
"npm install"
}
);
} }
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {
@ -324,12 +106,3 @@ impl CliNpmResolver for ByonmCliNpmResolver {
None None
} }
} }
fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
let mut path = path.to_path_buf();
// ensure backslashes are used on windows
for part in package_name.split('/') {
path = path.join(part);
}
path
}

View file

@ -1,295 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use std::path::PathBuf;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use crate::util::fs::canonicalize_path;
use crate::util::path::root_url_to_safe_local_dirname;
/// The global cache directory of npm packages.
#[derive(Clone, Debug)]
pub struct NpmCacheDir {
root_dir: PathBuf,
// cached url representation of the root directory
root_dir_url: Url,
// A list of all registry that were discovered via `.npmrc` files
// turned into a safe directory names.
known_registries_dirnames: Vec<String>,
}
impl NpmCacheDir {
pub fn new(root_dir: PathBuf, known_registries_urls: Vec<Url>) -> Self {
fn try_get_canonicalized_root_dir(
root_dir: &Path,
) -> Result<PathBuf, AnyError> {
if !root_dir.exists() {
std::fs::create_dir_all(root_dir)
.with_context(|| format!("Error creating {}", root_dir.display()))?;
}
Ok(canonicalize_path(root_dir)?)
}
// this may fail on readonly file systems, so just ignore if so
let root_dir =
try_get_canonicalized_root_dir(&root_dir).unwrap_or(root_dir);
let root_dir_url = Url::from_directory_path(&root_dir).unwrap();
let known_registries_dirnames: Vec<_> = known_registries_urls
.into_iter()
.map(|url| {
root_url_to_safe_local_dirname(&url)
.to_string_lossy()
.replace('\\', "/")
})
.collect();
Self {
root_dir,
root_dir_url,
known_registries_dirnames,
}
}
pub fn root_dir(&self) -> &Path {
&self.root_dir
}
pub fn root_dir_url(&self) -> &Url {
&self.root_dir_url
}
pub fn package_folder_for_id(
&self,
folder_id: &NpmPackageCacheFolderId,
registry_url: &Url,
) -> PathBuf {
if folder_id.copy_index == 0 {
self.package_folder_for_nv(&folder_id.nv, registry_url)
} else {
self
.package_name_folder(&folder_id.nv.name, registry_url)
.join(format!("{}_{}", folder_id.nv.version, folder_id.copy_index))
}
}
pub fn package_folder_for_nv(
&self,
package: &PackageNv,
registry_url: &Url,
) -> PathBuf {
self
.package_name_folder(&package.name, registry_url)
.join(package.version.to_string())
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
let mut dir = self.registry_folder(registry_url);
if name.to_lowercase() != name {
let encoded_name = mixed_case_package_name_encode(name);
// Using the encoded directory may have a collision with an actual package name
// so prefix it with an underscore since npm packages can't start with that
dir.join(format!("_{encoded_name}"))
} else {
// ensure backslashes are used on windows
for part in name.split('/') {
dir = dir.join(part);
}
dir
}
}
fn registry_folder(&self, registry_url: &Url) -> PathBuf {
self
.root_dir
.join(root_url_to_safe_local_dirname(registry_url))
}
pub fn resolve_package_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Option<NpmPackageCacheFolderId> {
let mut maybe_relative_url = None;
// Iterate through known registries and try to get a match.
for registry_dirname in &self.known_registries_dirnames {
let registry_root_dir = self
.root_dir_url
.join(&format!("{}/", registry_dirname))
// this not succeeding indicates a fatal issue, so unwrap
.unwrap();
let Some(relative_url) = registry_root_dir.make_relative(specifier)
else {
continue;
};
if relative_url.starts_with("../") {
continue;
}
maybe_relative_url = Some(relative_url);
break;
}
let mut relative_url = maybe_relative_url?;
// base32 decode the url if it starts with an underscore
// * Ex. _{base32(package_name)}/
if let Some(end_url) = relative_url.strip_prefix('_') {
let mut parts = end_url
.split('/')
.map(ToOwned::to_owned)
.collect::<Vec<_>>();
match mixed_case_package_name_decode(&parts[0]) {
Some(part) => {
parts[0] = part;
}
None => return None,
}
relative_url = parts.join("/");
}
// examples:
// * chalk/5.0.1/
// * @types/chalk/5.0.1/
// * some-package/5.0.1_1/ -- where the `_1` (/_\d+/) is a copy of the folder for peer deps
let is_scoped_package = relative_url.starts_with('@');
let mut parts = relative_url
.split('/')
.enumerate()
.take(if is_scoped_package { 3 } else { 2 })
.map(|(_, part)| part)
.collect::<Vec<_>>();
if parts.len() < 2 {
return None;
}
let version_part = parts.pop().unwrap();
let name = parts.join("/");
let (version, copy_index) =
if let Some((version, copy_count)) = version_part.split_once('_') {
(version, copy_count.parse::<u8>().ok()?)
} else {
(version_part, 0)
};
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name,
version: Version::parse_from_npm(version).ok()?,
},
copy_index,
})
}
pub fn get_cache_location(&self) -> PathBuf {
self.root_dir.clone()
}
}
pub fn mixed_case_package_name_encode(name: &str) -> String {
// use base32 encoding because it's reversible and the character set
// only includes the characters within 0-9 and A-Z so it can be lower cased
base32::encode(
base32::Alphabet::Rfc4648Lower { padding: false },
name.as_bytes(),
)
.to_lowercase()
}
pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
base32::decode(base32::Alphabet::Rfc4648Lower { padding: false }, name)
.and_then(|b| String::from_utf8(b).ok())
}
#[cfg(test)]
mod test {
use deno_core::url::Url;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use super::NpmCacheDir;
use crate::npm::cache_dir::NpmPackageCacheFolderId;
#[test]
fn should_get_package_folder() {
let deno_dir = crate::cache::DenoDir::new(None).unwrap();
let root_dir = deno_dir.npm_folder_path();
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
let cache = NpmCacheDir::new(root_dir.clone(), vec![registry_url.clone()]);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("json")
.join("1.2.5"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 1,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("json")
.join("1.2.5_1"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("_jjju6tq")
.join("2.1.5"),
);
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
nv: PackageNv {
name: "@types/JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
.join("_ib2hs4dfomxuuu2pjy")
.join("2.1.5"),
);
}
}

View file

@ -40,7 +40,7 @@ pub fn maybe_auth_header_for_npm_registry(
header::AUTHORIZATION, header::AUTHORIZATION,
header::HeaderValue::from_str(&format!( header::HeaderValue::from_str(&format!(
"Basic {}", "Basic {}",
BASE64_STANDARD.encode(&format!( BASE64_STANDARD.encode(format!(
"{}:{}", "{}:{}",
username.unwrap(), username.unwrap(),
password.unwrap() password.unwrap()

View file

@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -18,14 +19,14 @@ use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::cache::CACHE_PERM; use crate::cache::CACHE_PERM;
use crate::npm::NpmCacheDir;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::hard_link_dir_recursive; use crate::util::fs::hard_link_dir_recursive;
mod registry_info; pub mod registry_info;
mod tarball; mod tarball;
mod tarball_extract; mod tarball_extract;
@ -87,9 +88,12 @@ impl NpmCache {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name); let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name);
assert_ne!(folder_id.copy_index, 0); assert_ne!(folder_id.copy_index, 0);
let package_folder = self let package_folder = self.cache_dir.package_folder_for_id(
.cache_dir &folder_id.nv.name,
.package_folder_for_id(folder_id, registry_url); &folder_id.nv.version.to_string(),
folder_id.copy_index,
registry_url,
);
if package_folder.exists() if package_folder.exists()
// if this file exists, then the package didn't successfully initialize // if this file exists, then the package didn't successfully initialize
@ -100,9 +104,12 @@ impl NpmCache {
return Ok(()); return Ok(());
} }
let original_package_folder = self let original_package_folder = self.cache_dir.package_folder_for_id(
.cache_dir &folder_id.nv.name,
.package_folder_for_nv(&folder_id.nv, registry_url); &folder_id.nv.version.to_string(),
0, // original copy index
registry_url,
);
// it seems Windows does an "AccessDenied" error when moving a // it seems Windows does an "AccessDenied" error when moving a
// directory with hard links, so that's why this solution is done // directory with hard links, so that's why this solution is done
@ -114,7 +121,12 @@ impl NpmCache {
pub fn package_folder_for_id(&self, id: &NpmPackageCacheFolderId) -> PathBuf { pub fn package_folder_for_id(&self, id: &NpmPackageCacheFolderId) -> PathBuf {
let registry_url = self.npmrc.get_registry_url(&id.nv.name); let registry_url = self.npmrc.get_registry_url(&id.nv.name);
self.cache_dir.package_folder_for_id(id, registry_url) self.cache_dir.package_folder_for_id(
&id.nv.name,
&id.nv.version.to_string(),
id.copy_index,
registry_url,
)
} }
pub fn package_folder_for_nv(&self, package: &PackageNv) -> PathBuf { pub fn package_folder_for_nv(&self, package: &PackageNv) -> PathBuf {
@ -127,7 +139,12 @@ impl NpmCache {
package: &PackageNv, package: &PackageNv,
registry_url: &Url, registry_url: &Url,
) -> PathBuf { ) -> PathBuf {
self.cache_dir.package_folder_for_nv(package, registry_url) self.cache_dir.package_folder_for_id(
&package.name,
&package.version.to_string(),
0, // original copy_index
registry_url,
)
} }
pub fn package_name_folder(&self, name: &str) -> PathBuf { pub fn package_name_folder(&self, name: &str) -> PathBuf {
@ -146,6 +163,15 @@ impl NpmCache {
self self
.cache_dir .cache_dir
.resolve_package_folder_id_from_specifier(specifier) .resolve_package_folder_id_from_specifier(specifier)
.and_then(|cache_id| {
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name: cache_id.name,
version: Version::parse_from_npm(&cache_id.version).ok()?,
},
copy_index: cache_id.copy_index,
})
})
} }
pub fn load_package_info( pub fn load_package_info(

View file

@ -84,7 +84,7 @@ impl RegistryInfoDownloader {
self.load_package_info_inner(name).await.with_context(|| { self.load_package_info_inner(name).await.with_context(|| {
format!( format!(
"Error getting response at {} for package \"{}\"", "Error getting response at {} for package \"{}\"",
self.get_package_url(name), get_package_url(&self.npmrc, name),
name name
) )
}) })
@ -190,7 +190,7 @@ impl RegistryInfoDownloader {
fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture { fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture {
let downloader = self.clone(); let downloader = self.clone();
let package_url = self.get_package_url(name); let package_url = get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name); let registry_config = self.npmrc.get_registry_config(name);
let maybe_auth_header = let maybe_auth_header =
match maybe_auth_header_for_npm_registry(registry_config) { match maybe_auth_header_for_npm_registry(registry_config) {
@ -202,10 +202,13 @@ impl RegistryInfoDownloader {
let guard = self.progress_bar.update(package_url.as_str()); let guard = self.progress_bar.update(package_url.as_str());
let name = name.to_string(); let name = name.to_string();
async move { async move {
let maybe_bytes = downloader let client = downloader.http_client_provider.get_or_create()?;
.http_client_provider let maybe_bytes = client
.get_or_create()? .download_with_progress_and_retries(
.download_with_progress(package_url, maybe_auth_header, &guard) package_url,
maybe_auth_header,
&guard,
)
.await?; .await?;
match maybe_bytes { match maybe_bytes {
Some(bytes) => { Some(bytes) => {
@ -236,9 +239,18 @@ impl RegistryInfoDownloader {
.map(|r| r.map_err(Arc::new)) .map(|r| r.map_err(Arc::new))
.boxed_local() .boxed_local()
} }
}
pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
let registry_url = npmrc.get_registry_url(name);
// The '/' character in scoped package names "@scope/name" must be
// encoded for older third party registries. Newer registries and
// npm itself support both ways
// - encoded: https://registry.npmjs.org/@rollup%2fplugin-json
// - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json
// To support as many third party registries as possible we'll
// always encode the '/' character.
fn get_package_url(&self, name: &str) -> Url {
let registry_url = self.npmrc.get_registry_url(name);
// list of all characters used in npm packages: // list of all characters used in npm packages:
// !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~ // !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~
const ASCII_SET: percent_encoding::AsciiSet = const ASCII_SET: percent_encoding::AsciiSet =
@ -250,11 +262,13 @@ impl RegistryInfoDownloader {
.remove(b'*') .remove(b'*')
.remove(b'-') .remove(b'-')
.remove(b'.') .remove(b'.')
.remove(b'/')
.remove(b'@') .remove(b'@')
.remove(b'_') .remove(b'_')
.remove(b'~'); .remove(b'~');
let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET); let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET);
registry_url.join(&name.to_string()).unwrap() registry_url
} // Ensure that scoped package name percent encoding is lower cased
// to match npm.
.join(&name.to_string().replace("%2F", "%2f"))
.unwrap()
} }

View file

@ -172,7 +172,7 @@ impl TarballCache {
let guard = tarball_cache.progress_bar.update(&dist.tarball); let guard = tarball_cache.progress_bar.update(&dist.tarball);
let result = tarball_cache.http_client_provider let result = tarball_cache.http_client_provider
.get_or_create()? .get_or_create()?
.download_with_progress(tarball_uri, maybe_auth_header, &guard) .download_with_progress_and_retries(tarball_uri, maybe_auth_header, &guard)
.await; .await;
let maybe_bytes = match result { let maybe_bytes = match result {
Ok(maybe_bytes) => maybe_bytes, Ok(maybe_bytes) => maybe_bytes,

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -7,6 +8,7 @@ use std::sync::Arc;
use cache::RegistryInfoDownloader; use cache::RegistryInfoDownloader;
use cache::TarballCache; use cache::TarballCache;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
@ -19,10 +21,11 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::colors;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveError;
@ -32,9 +35,10 @@ use resolution::AddPkgReqsResult;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig; use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::args::NpmProcessState; use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
use crate::args::PackageJsonInstallDepsProvider; use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
@ -49,9 +53,9 @@ use self::resolvers::NpmPackageFsResolver;
use super::CliNpmResolver; use super::CliNpmResolver;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::NpmCacheDir; use super::ResolvePkgFolderFromDenoReqError;
mod cache; pub mod cache;
mod registry; mod registry;
mod resolution; mod resolution;
mod resolvers; mod resolvers;
@ -71,7 +75,7 @@ pub struct CliNpmResolverManagedCreateOptions {
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>, pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo, pub npm_system_info: NpmSystemInfo,
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>, pub npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
pub npmrc: Arc<ResolvedNpmRc>, pub npmrc: Arc<ResolvedNpmRc>,
pub lifecycle_scripts: LifecycleScriptsConfig, pub lifecycle_scripts: LifecycleScriptsConfig,
} }
@ -97,7 +101,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
npm_api, npm_api,
npm_cache, npm_cache,
options.npmrc, options.npmrc,
options.package_json_deps_provider, options.npm_install_deps_provider,
options.text_only_progress_bar, options.text_only_progress_bar,
options.maybe_node_modules_path, options.maybe_node_modules_path,
options.npm_system_info, options.npm_system_info,
@ -122,7 +126,7 @@ pub async fn create_managed_npm_resolver(
npm_api, npm_api,
npm_cache, npm_cache,
options.npmrc, options.npmrc,
options.package_json_deps_provider, options.npm_install_deps_provider,
options.text_only_progress_bar, options.text_only_progress_bar,
options.maybe_node_modules_path, options.maybe_node_modules_path,
options.npm_system_info, options.npm_system_info,
@ -139,7 +143,7 @@ fn create_inner(
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<NpmCache>,
npm_rc: Arc<ResolvedNpmRc>, npm_rc: Arc<ResolvedNpmRc>,
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
text_only_progress_bar: crate::util::progress_bar::ProgressBar, text_only_progress_bar: crate::util::progress_bar::ProgressBar,
node_modules_dir_path: Option<PathBuf>, node_modules_dir_path: Option<PathBuf>,
npm_system_info: NpmSystemInfo, npm_system_info: NpmSystemInfo,
@ -161,7 +165,7 @@ fn create_inner(
let fs_resolver = create_npm_fs_resolver( let fs_resolver = create_npm_fs_resolver(
fs.clone(), fs.clone(),
npm_cache.clone(), npm_cache.clone(),
&package_json_deps_provider, &npm_install_deps_provider,
&text_only_progress_bar, &text_only_progress_bar,
resolution.clone(), resolution.clone(),
tarball_cache.clone(), tarball_cache.clone(),
@ -175,7 +179,7 @@ fn create_inner(
maybe_lockfile, maybe_lockfile,
npm_api, npm_api,
npm_cache, npm_cache,
package_json_deps_provider, npm_install_deps_provider,
resolution, resolution,
tarball_cache, tarball_cache,
text_only_progress_bar, text_only_progress_bar,
@ -187,6 +191,7 @@ fn create_inner(
fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> { fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> {
Arc::new(NpmCache::new( Arc::new(NpmCache::new(
NpmCacheDir::new( NpmCacheDir::new(
&DenoCacheEnvFsAdapter(options.fs.as_ref()),
options.npm_global_cache_dir.clone(), options.npm_global_cache_dir.clone(),
options.npmrc.get_all_known_registries_urls(), options.npmrc.get_all_known_registries_urls(),
), ),
@ -261,7 +266,7 @@ pub struct ManagedCliNpmResolver {
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<NpmCache>,
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<TarballCache>,
text_only_progress_bar: ProgressBar, text_only_progress_bar: ProgressBar,
@ -286,7 +291,7 @@ impl ManagedCliNpmResolver {
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>, npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>, npm_cache: Arc<NpmCache>,
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>, npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<TarballCache>,
text_only_progress_bar: ProgressBar, text_only_progress_bar: ProgressBar,
@ -299,7 +304,7 @@ impl ManagedCliNpmResolver {
maybe_lockfile, maybe_lockfile,
npm_api, npm_api,
npm_cache, npm_cache,
package_json_deps_provider, npm_install_deps_provider,
text_only_progress_bar, text_only_progress_bar,
resolution, resolution,
tarball_cache, tarball_cache,
@ -406,8 +411,7 @@ impl ManagedCliNpmResolver {
} }
} }
if result.dependencies_result.is_ok() { if result.dependencies_result.is_ok() {
result.dependencies_result = result.dependencies_result = self.cache_packages().await;
self.cache_packages().await.map_err(AnyError::from);
} }
result result
@ -427,6 +431,16 @@ impl ManagedCliNpmResolver {
self.resolution.snapshot() self.resolution.snapshot()
} }
pub fn top_package_req_for_name(&self, name: &str) -> Option<PackageReq> {
let package_reqs = self.resolution.package_reqs();
let mut entries = package_reqs
.iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by_key(|(_, nv)| &nv.version);
Some(entries.last()?.0.clone())
}
pub fn serialized_valid_snapshot_for_system( pub fn serialized_valid_snapshot_for_system(
&self, &self,
system_info: &NpmSystemInfo, system_info: &NpmSystemInfo,
@ -466,6 +480,25 @@ impl ManagedCliNpmResolver {
self.resolution.resolve_pkg_id_from_pkg_req(req) self.resolution.resolve_pkg_id_from_pkg_req(req)
} }
pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match err {
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
return Err(
AnyError::from(err.clone())
.context("Failed to install from package.json"),
);
}
deno_package_json::PackageJsonDepValueParseError::Unsupported {
..
} => {
log::warn!("{} {} in package.json", colors::yellow("Warning"), err)
}
}
}
Ok(())
}
/// Ensures that the top level `package.json` dependencies are installed. /// Ensures that the top level `package.json` dependencies are installed.
/// This may set up the `node_modules` directory. /// This may set up the `node_modules` directory.
/// ///
@ -477,7 +510,8 @@ impl ManagedCliNpmResolver {
if !self.top_level_install_flag.raise() { if !self.top_level_install_flag.raise() {
return Ok(false); // already did this return Ok(false); // already did this
} }
let pkg_json_remote_pkgs = self.package_json_deps_provider.remote_pkgs();
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() { if pkg_json_remote_pkgs.is_empty() {
return Ok(false); return Ok(false);
} }
@ -560,11 +594,11 @@ impl NpmResolver for ManagedCliNpmResolver {
} }
impl NodeRequireResolver for ManagedCliNpmResolver { impl NodeRequireResolver for ManagedCliNpmResolver {
fn ensure_read_permission( fn ensure_read_permission<'a>(
&self, &self,
permissions: &mut dyn NodePermissions, permissions: &mut dyn NodePermissions,
path: &Path, path: &'a Path,
) -> Result<(), AnyError> { ) -> Result<Cow<'a, Path>, AnyError> {
self.fs_resolver.ensure_read_permission(permissions, path) self.fs_resolver.ensure_read_permission(permissions, path)
} }
} }
@ -573,7 +607,7 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String { fn get_npm_process_state(&self) -> String {
npm_process_state( npm_process_state(
self.resolution.serialized_valid_snapshot(), self.resolution.serialized_valid_snapshot(),
self.fs_resolver.node_modules_path().map(|p| p.as_path()), self.fs_resolver.node_modules_path(),
) )
} }
} }
@ -606,7 +640,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
create_npm_fs_resolver( create_npm_fs_resolver(
self.fs.clone(), self.fs.clone(),
self.npm_cache.clone(), self.npm_cache.clone(),
&self.package_json_deps_provider, &self.npm_install_deps_provider,
&self.text_only_progress_bar, &self.text_only_progress_bar,
npm_resolution.clone(), npm_resolution.clone(),
self.tarball_cache.clone(), self.tarball_cache.clone(),
@ -617,7 +651,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.maybe_lockfile.clone(), self.maybe_lockfile.clone(),
self.npm_api.clone(), self.npm_api.clone(),
self.npm_cache.clone(), self.npm_cache.clone(),
self.package_json_deps_provider.clone(), self.npm_install_deps_provider.clone(),
npm_resolution, npm_resolution,
self.tarball_cache.clone(), self.tarball_cache.clone(),
self.text_only_progress_bar.clone(), self.text_only_progress_bar.clone(),
@ -630,7 +664,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
InnerCliNpmResolverRef::Managed(self) InnerCliNpmResolverRef::Managed(self)
} }
fn root_node_modules_path(&self) -> Option<&PathBuf> { fn root_node_modules_path(&self) -> Option<&Path> {
self.fs_resolver.node_modules_path() self.fs_resolver.node_modules_path()
} }
@ -638,9 +672,13 @@ impl CliNpmResolver for ManagedCliNpmResolver {
&self, &self,
req: &PackageReq, req: &PackageReq,
_referrer: &ModuleSpecifier, _referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> { ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
let pkg_id = self.resolve_pkg_id_from_pkg_req(req)?; let pkg_id = self
self.resolve_pkg_folder_from_pkg_id(&pkg_id) .resolve_pkg_id_from_pkg_req(req)
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
self
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
} }
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {

View file

@ -22,6 +22,7 @@ use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::VersionReq; use deno_semver::VersionReq;
@ -317,7 +318,7 @@ fn get_npm_pending_resolver(
// WARNING: When bumping this version, check if anything needs to be // WARNING: When bumping this version, check if anything needs to be
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js // updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
types_node_version_req: Some( types_node_version_req: Some(
VersionReq::parse_from_npm("18.0.0 - 18.16.19").unwrap(), VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
), ),
}, },
) )
@ -329,16 +330,10 @@ fn populate_lockfile_from_snapshot(
) { ) {
let mut lockfile = lockfile.lock(); let mut lockfile = lockfile.lock();
for (package_req, nv) in snapshot.package_reqs() { for (package_req, nv) in snapshot.package_reqs() {
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
lockfile.insert_package_specifier( lockfile.insert_package_specifier(
format!("npm:{}", package_req), JsrDepPackageReq::npm(package_req.clone()),
format!( format!("{}{}", id.nv.version, id.peer_deps_serialized()),
"npm:{}",
snapshot
.resolve_package_from_deno_module(nv)
.unwrap()
.id
.as_serialized()
),
); );
} }
for package in snapshot.all_packages_for_every_system() { for package in snapshot.all_packages_for_every_system() {

View file

@ -1,5 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
pub mod bin_entries;
pub mod lifecycle_scripts;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::path::Path; use std::path::Path;
@ -30,7 +34,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
fn root_dir_url(&self) -> &Url; fn root_dir_url(&self) -> &Url;
/// The local node_modules folder if it is applicable to the implementation. /// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&PathBuf>; fn node_modules_path(&self) -> Option<&Path>;
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>; fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
@ -59,11 +63,12 @@ pub trait NpmPackageFsResolver: Send + Sync {
async fn cache_packages(&self) -> Result<(), AnyError>; async fn cache_packages(&self) -> Result<(), AnyError>;
fn ensure_read_permission( #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a>(
&self, &self,
permissions: &mut dyn NodePermissions, permissions: &mut dyn NodePermissions,
path: &Path, path: &'a Path,
) -> Result<(), AnyError>; ) -> Result<Cow<'a, Path>, AnyError>;
} }
#[derive(Debug)] #[derive(Debug)]
@ -82,11 +87,15 @@ impl RegistryReadPermissionChecker {
} }
} }
pub fn ensure_registry_read_permission( pub fn ensure_registry_read_permission<'a>(
&self, &self,
permissions: &mut dyn NodePermissions, permissions: &mut dyn NodePermissions,
path: &Path, path: &'a Path,
) -> Result<(), AnyError> { ) -> Result<Cow<'a, Path>, AnyError> {
if permissions.query_read_all() {
return Ok(Cow::Borrowed(path)); // skip permissions checks below
}
// allow reading if it's in the node_modules // allow reading if it's in the node_modules
let is_path_in_node_modules = path.starts_with(&self.registry_path) let is_path_in_node_modules = path.starts_with(&self.registry_path)
&& path && path
@ -115,25 +124,26 @@ impl RegistryReadPermissionChecker {
}, },
} }
}; };
let Some(registry_path_canon) = canonicalize(&self.registry_path)? else { if let Some(registry_path_canon) = canonicalize(&self.registry_path)? {
return Ok(()); // not exists, allow reading if let Some(path_canon) = canonicalize(path)? {
};
let Some(path_canon) = canonicalize(path)? else {
return Ok(()); // not exists, allow reading
};
if path_canon.starts_with(registry_path_canon) { if path_canon.starts_with(registry_path_canon) {
return Ok(()); return Ok(Cow::Owned(path_canon));
}
} else if path.starts_with(registry_path_canon)
|| path.starts_with(&self.registry_path)
{
return Ok(Cow::Borrowed(path));
}
} }
} }
permissions.check_read(path) permissions.check_read_path(path)
} }
} }
/// Caches all the packages in parallel. /// Caches all the packages in parallel.
pub async fn cache_packages( pub async fn cache_packages(
packages: Vec<NpmResolutionPackage>, packages: &[NpmResolutionPackage],
tarball_cache: &Arc<TarballCache>, tarball_cache: &Arc<TarballCache>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let mut futures_unordered = futures::stream::FuturesUnordered::new(); let mut futures_unordered = futures::stream::FuturesUnordered::new();

View file

@ -12,12 +12,12 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
#[derive(Default)] #[derive(Default)]
pub(super) struct BinEntries { pub struct BinEntries<'a> {
/// Packages that have colliding bin names /// Packages that have colliding bin names
collisions: HashSet<NpmPackageId>, collisions: HashSet<&'a NpmPackageId>,
seen_names: HashMap<String, NpmPackageId>, seen_names: HashMap<&'a str, &'a NpmPackageId>,
/// The bin entries /// The bin entries
entries: Vec<(NpmResolutionPackage, PathBuf)>, entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
} }
/// Returns the name of the default binary for the given package. /// Returns the name of the default binary for the given package.
@ -31,37 +31,32 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
.map_or(package.id.nv.name.as_str(), |(_, name)| name) .map_or(package.id.nv.name.as_str(), |(_, name)| name)
} }
impl BinEntries { impl<'a> BinEntries<'a> {
pub(super) fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
/// Add a new bin entry (package with a bin field) /// Add a new bin entry (package with a bin field)
pub(super) fn add( pub fn add(
&mut self, &mut self,
package: NpmResolutionPackage, package: &'a NpmResolutionPackage,
package_path: PathBuf, package_path: PathBuf,
) { ) {
// check for a new collision, if we haven't already // check for a new collision, if we haven't already
// found one // found one
match package.bin.as_ref().unwrap() { match package.bin.as_ref().unwrap() {
deno_npm::registry::NpmPackageVersionBinEntry::String(_) => { deno_npm::registry::NpmPackageVersionBinEntry::String(_) => {
let bin_name = default_bin_name(&package); let bin_name = default_bin_name(package);
if let Some(other) = self if let Some(other) = self.seen_names.insert(bin_name, &package.id) {
.seen_names self.collisions.insert(&package.id);
.insert(bin_name.to_string(), package.id.clone())
{
self.collisions.insert(package.id.clone());
self.collisions.insert(other); self.collisions.insert(other);
} }
} }
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for name in entries.keys() { for name in entries.keys() {
if let Some(other) = if let Some(other) = self.seen_names.insert(name, &package.id) {
self.seen_names.insert(name.to_string(), package.id.clone()) self.collisions.insert(&package.id);
{
self.collisions.insert(package.id.clone());
self.collisions.insert(other); self.collisions.insert(other);
} }
} }
@ -74,7 +69,11 @@ impl BinEntries {
fn for_each_entry( fn for_each_entry(
&mut self, &mut self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
mut f: impl FnMut( mut already_seen: impl FnMut(
&Path,
&str, // bin script
) -> Result<(), AnyError>,
mut new: impl FnMut(
&NpmResolutionPackage, &NpmResolutionPackage,
&Path, &Path,
&str, // bin name &str, // bin name
@ -95,18 +94,20 @@ impl BinEntries {
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
let name = default_bin_name(package); let name = default_bin_name(package);
if !seen.insert(name) { if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name // we already set up a bin entry with this name
continue; continue;
} }
f(package, package_path, name, script)?; new(package, package_path, name, script)?;
} }
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for (name, script) in entries { for (name, script) in entries {
if !seen.insert(name) { if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name // we already set up a bin entry with this name
continue; continue;
} }
f(package, package_path, name, script)?; new(package, package_path, name, script)?;
} }
} }
} }
@ -117,23 +118,27 @@ impl BinEntries {
} }
/// Collect the bin entries into a vec of (name, script path) /// Collect the bin entries into a vec of (name, script path)
pub(super) fn into_bin_files( pub fn into_bin_files(
mut self, mut self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> { ) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new(); let mut bins = Vec::new();
self self
.for_each_entry(snapshot, |_, package_path, name, script| { .for_each_entry(
snapshot,
|_, _| Ok(()),
|_, package_path, name, script| {
bins.push((name.to_string(), package_path.join(script))); bins.push((name.to_string(), package_path.join(script)));
Ok(()) Ok(())
}) },
)
.unwrap(); .unwrap();
bins bins
} }
/// Finish setting up the bin entries, writing the necessary files /// Finish setting up the bin entries, writing the necessary files
/// to disk. /// to disk.
pub(super) fn finish( pub fn finish(
mut self, mut self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path, bin_node_modules_dir_path: &Path,
@ -144,7 +149,17 @@ impl BinEntries {
)?; )?;
} }
self.for_each_entry(snapshot, |package, package_path, name, script| { self.for_each_entry(
snapshot,
|_package_path, _script| {
#[cfg(unix)]
{
let path = _package_path.join(_script);
make_executable_if_exists(&path)?;
}
Ok(())
},
|package, package_path, name, script| {
set_up_bin_entry( set_up_bin_entry(
package, package,
name, name,
@ -152,7 +167,8 @@ impl BinEntries {
package_path, package_path,
bin_node_modules_dir_path, bin_node_modules_dir_path,
) )
})?; },
)?;
Ok(()) Ok(())
} }
@ -162,8 +178,8 @@ impl BinEntries {
// that has a bin entry, then sort them by depth // that has a bin entry, then sort them by depth
fn sort_by_depth( fn sort_by_depth(
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
bin_entries: &mut [(NpmResolutionPackage, PathBuf)], bin_entries: &mut [(&NpmResolutionPackage, PathBuf)],
collisions: &mut HashSet<NpmPackageId>, collisions: &mut HashSet<&NpmPackageId>,
) { ) {
enum Entry<'a> { enum Entry<'a> {
Pkg(&'a NpmPackageId), Pkg(&'a NpmPackageId),
@ -217,7 +233,7 @@ fn sort_by_depth(
}); });
} }
pub(super) fn set_up_bin_entry( pub fn set_up_bin_entry(
package: &NpmResolutionPackage, package: &NpmResolutionPackage,
bin_name: &str, bin_name: &str,
#[allow(unused_variables)] bin_script: &str, #[allow(unused_variables)] bin_script: &str,
@ -259,6 +275,32 @@ fn set_up_bin_shim(
Ok(()) Ok(())
} }
#[cfg(unix)]
/// Make the file at `path` executable if it exists.
/// Returns `true` if the file exists, `false` otherwise.
fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
use std::io;
use std::os::unix::fs::PermissionsExt;
let mut perms = match std::fs::metadata(path) {
Ok(metadata) => metadata.permissions(),
Err(err) => {
if err.kind() == io::ErrorKind::NotFound {
return Ok(false);
}
return Err(err.into());
}
};
if perms.mode() & 0o111 == 0 {
// if the original file is not executable, make it executable
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(path, perms).with_context(|| {
format!("Setting permissions on '{}'", path.display())
})?;
}
Ok(true)
}
#[cfg(unix)] #[cfg(unix)]
fn symlink_bin_entry( fn symlink_bin_entry(
_package: &NpmResolutionPackage, _package: &NpmResolutionPackage,
@ -272,11 +314,10 @@ fn symlink_bin_entry(
let link = bin_node_modules_dir_path.join(bin_name); let link = bin_node_modules_dir_path.join(bin_name);
let original = package_path.join(bin_script); let original = package_path.join(bin_script);
use std::os::unix::fs::PermissionsExt; let found = make_executable_if_exists(&original).with_context(|| {
let mut perms = match std::fs::metadata(&original) { format!("Can't set up '{}' bin at {}", bin_name, original.display())
Ok(metadata) => metadata.permissions(), })?;
Err(err) => { if !found {
if err.kind() == io::ErrorKind::NotFound {
log::warn!( log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"), deno_terminal::colors::yellow("Warning"),
@ -286,18 +327,7 @@ fn symlink_bin_entry(
); );
return Ok(()); return Ok(());
} }
return Err(err).with_context(|| {
format!("Can't set up '{}' bin at {}", bin_name, original.display())
});
}
};
if perms.mode() & 0o111 == 0 {
// if the original file is not executable, make it executable
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(&original, perms).with_context(|| {
format!("Setting permissions on '{}'", original.display())
})?;
}
let original_relative = let original_relative =
crate::util::path::relative_path(bin_node_modules_dir_path, &original) crate::util::path::relative_path(bin_node_modules_dir_path, &original)
.unwrap_or(original); .unwrap_or(original);

View file

@ -0,0 +1,368 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use super::bin_entries::BinEntries;
use crate::args::LifecycleScriptsConfig;
use crate::task_runner::TaskStdio;
use crate::util::progress_bar::ProgressBar;
use deno_core::anyhow::Context;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use std::borrow::Cow;
use std::rc::Rc;
use std::path::Path;
use std::path::PathBuf;
use deno_core::error::AnyError;
use deno_npm::NpmResolutionPackage;
pub trait LifecycleScriptsStrategy {
fn can_run_scripts(&self) -> bool {
true
}
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf;
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> Result<(), AnyError>;
fn has_warned(&self, package: &NpmResolutionPackage) -> bool;
fn has_run(&self, package: &NpmResolutionPackage) -> bool;
fn did_run_scripts(
&self,
package: &NpmResolutionPackage,
) -> Result<(), AnyError>;
}
pub struct LifecycleScripts<'a> {
packages_with_scripts: Vec<(&'a NpmResolutionPackage, PathBuf)>,
packages_with_scripts_not_run: Vec<(&'a NpmResolutionPackage, PathBuf)>,
config: &'a LifecycleScriptsConfig,
strategy: Box<dyn LifecycleScriptsStrategy + 'a>,
}
impl<'a> LifecycleScripts<'a> {
pub fn new<T: LifecycleScriptsStrategy + 'a>(
config: &'a LifecycleScriptsConfig,
strategy: T,
) -> Self {
Self {
config,
packages_with_scripts: Vec::new(),
packages_with_scripts_not_run: Vec::new(),
strategy: Box::new(strategy),
}
}
}
fn has_lifecycle_scripts(
package: &NpmResolutionPackage,
package_path: &Path,
) -> bool {
if let Some(install) = package.scripts.get("install") {
// default script
if !is_broken_default_install_script(install, package_path) {
return true;
}
}
package.scripts.contains_key("preinstall")
|| package.scripts.contains_key("postinstall")
}
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
// but it always fails if the package excludes the `binding.gyp` file when they publish.
// (for example, `fsevents` hits this)
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
}
impl<'a> LifecycleScripts<'a> {
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
if !self.strategy.can_run_scripts() {
return false;
}
use crate::args::PackagesAllowedScripts;
match &self.config.allowed {
PackagesAllowedScripts::All => true,
// TODO: make this more correct
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
let s = s.strip_prefix("npm:").unwrap_or(s);
s == package_nv.name || s == package_nv.to_string()
}),
PackagesAllowedScripts::None => false,
}
}
/// Register a package for running lifecycle scripts, if applicable.
///
/// `package_path` is the path containing the package's code (its root dir).
/// `package_meta_path` is the path to serve as the base directory for lifecycle
/// script-related metadata (e.g. to store whether the scripts have been run already)
pub fn add(
&mut self,
package: &'a NpmResolutionPackage,
package_path: Cow<Path>,
) {
if has_lifecycle_scripts(package, &package_path) {
if self.can_run_scripts(&package.id.nv) {
if !self.strategy.has_run(package) {
self
.packages_with_scripts
.push((package, package_path.into_owned()));
}
} else if !self.strategy.has_run(package)
&& (self.config.explicit_install || !self.strategy.has_warned(package))
{
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
// being run, and it's also very popular - any project using Vite would raise warnings.
{
let nv = &package.id.nv;
if nv.name == "esbuild"
&& nv.version >= Version::parse_standard("0.18.0").unwrap()
{
return;
}
}
self
.packages_with_scripts_not_run
.push((package, package_path.into_owned()));
}
}
}
pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> {
if !self.packages_with_scripts_not_run.is_empty() {
self
.strategy
.warn_on_scripts_not_run(&self.packages_with_scripts_not_run)?;
}
Ok(())
}
pub async fn finish(
self,
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: Option<&Path>,
progress_bar: &ProgressBar,
) -> Result<(), AnyError> {
self.warn_not_run_scripts()?;
let get_package_path =
|p: &NpmResolutionPackage| self.strategy.package_path(p);
let mut failed_packages = Vec::new();
if !self.packages_with_scripts.is_empty() {
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base =
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?;
let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
root_node_modules_dir_path,
);
let mut env_vars = crate::task_runner::real_env_vars();
// we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file
//
// SAFETY: we are sharing a single temp file across all of the scripts. the file position
// will be shared among these, which is okay since we run only one script at a time.
// However, if we concurrently run scripts in the future we will
// have to have multiple temp files.
let temp_file_fd =
deno_runtime::ops::process::npm_process_state_tempfile(
process_state.as_bytes(),
).context("failed to create npm process state tempfile for running lifecycle scripts")?;
// SAFETY: fd/handle is valid
let _temp_file =
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
env_vars.insert(
deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
.to_string(),
(temp_file_fd as usize).to_string(),
);
for (package, package_path) in self.packages_with_scripts {
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = resolve_custom_commands_from_deps(
base.clone(),
package,
snapshot,
get_package_path,
)?;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = package.scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, &package_path)
{
continue;
}
let _guard = progress_bar.update_with_prompt(
crate::util::progress_bar::ProgressMessagePrompt::Initialize,
&format!("{}: running '{script_name}' script", package.id.nv),
);
let crate::task_runner::TaskResult {
exit_code,
stderr,
stdout,
} = crate::task_runner::run_task(
crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: &package_path,
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd,
argv: &[],
root_node_modules_dir: root_node_modules_dir_path,
stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
}),
},
)
.await?;
let stdout = stdout.unwrap();
let stderr = stderr.unwrap();
if exit_code != 0 {
log::warn!(
"error: script '{}' in '{}' failed with exit code {}{}{}",
script_name,
package.id.nv,
exit_code,
if !stdout.trim_ascii().is_empty() {
format!(
"\nstdout:\n{}\n",
String::from_utf8_lossy(&stdout).trim()
)
} else {
String::new()
},
if !stderr.trim_ascii().is_empty() {
format!(
"\nstderr:\n{}\n",
String::from_utf8_lossy(&stderr).trim()
)
} else {
String::new()
},
);
failed_packages.push(&package.id.nv);
// assume if earlier script fails, later ones will fail too
break;
}
}
}
self.strategy.did_run_scripts(package)?;
}
}
if failed_packages.is_empty() {
Ok(())
} else {
Err(AnyError::msg(format!(
"failed to run scripts for packages: {}",
failed_packages
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>()
.join(", ")
)))
}
}
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
fn resolve_baseline_custom_commands(
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages(
custom_commands,
snapshot,
packages,
get_package_path,
)
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
for package in packages {
let package_path = get_package_path(package);
if package.bin.is_some() {
bin_entries.add(package, package_path);
}
}
let bins = bin_entries.into_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
Ok(commands)
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
fn resolve_custom_commands_from_deps(
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
resolve_custom_commands_from_packages(
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
get_package_path,
)
}

View file

@ -2,16 +2,19 @@
//! Code for global npm cache resolution. //! Code for global npm cache resolution.
use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use crate::colors;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::url::Url; use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
@ -19,10 +22,14 @@ use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError; use node_resolver::errors::ReferrerNotFoundError;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use super::super::cache::NpmCache; use super::super::cache::NpmCache;
use super::super::cache::TarballCache; use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution; use super::super::resolution::NpmResolution;
use super::common::cache_packages; use super::common::cache_packages;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsResolver; use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker; use super::common::RegistryReadPermissionChecker;
@ -34,6 +41,7 @@ pub struct GlobalNpmPackageResolver {
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker, registry_read_permission_checker: RegistryReadPermissionChecker,
lifecycle_scripts: LifecycleScriptsConfig,
} }
impl GlobalNpmPackageResolver { impl GlobalNpmPackageResolver {
@ -43,6 +51,7 @@ impl GlobalNpmPackageResolver {
tarball_cache: Arc<TarballCache>, tarball_cache: Arc<TarballCache>,
resolution: Arc<NpmResolution>, resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo, system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self { ) -> Self {
Self { Self {
registry_read_permission_checker: RegistryReadPermissionChecker::new( registry_read_permission_checker: RegistryReadPermissionChecker::new(
@ -53,6 +62,7 @@ impl GlobalNpmPackageResolver {
tarball_cache, tarball_cache,
resolution, resolution,
system_info, system_info,
lifecycle_scripts,
} }
} }
} }
@ -63,7 +73,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
self.cache.root_dir_url() self.cache.root_dir_url()
} }
fn node_modules_path(&self) -> Option<&PathBuf> { fn node_modules_path(&self) -> Option<&Path> {
None None
} }
@ -149,8 +159,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
let package_partitions = self let package_partitions = self
.resolution .resolution
.all_system_packages_partitioned(&self.system_info); .all_system_packages_partitioned(&self.system_info);
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
cache_packages(package_partitions.packages, &self.tarball_cache).await?;
// create the copy package folders // create the copy package folders
for copy in package_partitions.copy_packages { for copy in package_partitions.copy_packages {
@ -159,16 +168,103 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
.ensure_copy_package(&copy.get_package_cache_folder_id())?; .ensure_copy_package(&copy.get_package_cache_folder_id())?;
} }
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
&self.lifecycle_scripts,
GlobalLifecycleScripts::new(self, &self.lifecycle_scripts.root_dir),
);
for package in &package_partitions.packages {
let package_folder = self.cache.package_folder_for_nv(&package.id.nv);
lifecycle_scripts.add(package, Cow::Borrowed(&package_folder));
}
lifecycle_scripts.warn_not_run_scripts()?;
Ok(()) Ok(())
} }
fn ensure_read_permission( fn ensure_read_permission<'a>(
&self, &self,
permissions: &mut dyn NodePermissions, permissions: &mut dyn NodePermissions,
path: &Path, path: &'a Path,
) -> Result<(), AnyError> { ) -> Result<Cow<'a, Path>, AnyError> {
self self
.registry_read_permission_checker .registry_read_permission_checker
.ensure_registry_read_permission(permissions, path) .ensure_registry_read_permission(permissions, path)
} }
} }
struct GlobalLifecycleScripts<'a> {
resolver: &'a GlobalNpmPackageResolver,
path_hash: u64,
}
impl<'a> GlobalLifecycleScripts<'a> {
fn new(resolver: &'a GlobalNpmPackageResolver, root_dir: &Path) -> Self {
let mut hasher = FastInsecureHasher::new_without_deno_version();
hasher.write(root_dir.to_string_lossy().as_bytes());
let path_hash = hasher.finish();
Self {
resolver,
path_hash,
}
}
fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
self
.package_path(package)
.join(format!(".scripts-warned-{}", self.path_hash))
}
}
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
for GlobalLifecycleScripts<'a>
{
fn can_run_scripts(&self) -> bool {
false
}
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
self.resolver.cache.package_folder_for_nv(&package.id.nv)
}
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> std::result::Result<(), deno_core::anyhow::Error> {
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
for (package, _) in packages {
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
}
log::warn!("");
log::warn!(
"┠─ {}",
colors::italic("This may cause the packages to not work correctly.")
);
log::warn!("┠─ {}", colors::italic("Lifecycle scripts are only supported when using a `node_modules` directory."));
log::warn!(
"┠─ {}",
colors::italic("Enable it in your deno config file:")
);
log::warn!("┖─ {}", colors::bold("\"nodeModulesDir\": \"auto\""));
for (package, _) in packages {
std::fs::write(self.warned_scripts_file(package), "")?;
}
Ok(())
}
fn did_run_scripts(
&self,
_package: &NpmResolutionPackage,
) -> std::result::Result<(), deno_core::anyhow::Error> {
Ok(())
}
fn has_warned(&self, package: &NpmResolutionPackage) -> bool {
self.warned_scripts_file(package).exists()
}
fn has_run(&self, _package: &NpmResolutionPackage) -> bool {
false
}
}

Some files were not shown because too many files have changed in this diff Show more