0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-08 07:16:56 -05:00
This commit is contained in:
Divy Srivastava 2024-11-23 09:40:46 +05:30
commit 68507171b2
5948 changed files with 127128 additions and 61035 deletions

View file

@ -1,9 +1,8 @@
FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye FROM mcr.microsoft.com/vscode/devcontainers/rust:1-bullseye
# Install cmake and protobuf-compiler # Install cmake
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y cmake \ && apt-get install -y cmake \
&& apt-get install -y protobuf-compiler \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install Deno # Install Deno

View file

@ -4,6 +4,7 @@
"include": [ "include": [
"ban-untagged-todo", "ban-untagged-todo",
"camelcase", "camelcase",
"no-console",
"guard-for-in" "guard-for-in"
], ],
"exclude": [ "exclude": [

View file

@ -31,6 +31,8 @@
"cli/tsc/dts/lib.scripthost.d.ts", "cli/tsc/dts/lib.scripthost.d.ts",
"cli/tsc/dts/lib.webworker*.d.ts", "cli/tsc/dts/lib.webworker*.d.ts",
"cli/tsc/dts/typescript.d.ts", "cli/tsc/dts/typescript.d.ts",
"cli/tools/doc/prism.css",
"cli/tools/doc/prism.js",
"ext/websocket/autobahn/reports", "ext/websocket/autobahn/reports",
"gh-pages", "gh-pages",
"target", "target",
@ -39,10 +41,14 @@
"tests/node_compat/runner/TODO.md", "tests/node_compat/runner/TODO.md",
"tests/node_compat/test", "tests/node_compat/test",
"tests/registry/", "tests/registry/",
"tests/specs/bench/default_ts",
"tests/specs/fmt", "tests/specs/fmt",
"tests/specs/lint/bom", "tests/specs/lint/bom",
"tests/specs/lint/default_ts",
"tests/specs/lint/syntax_error_reporting", "tests/specs/lint/syntax_error_reporting",
"tests/specs/publish/no_check_surfaces_syntax_error", "tests/specs/publish/no_check_surfaces_syntax_error",
"tests/specs/run/default_ts",
"tests/specs/test/default_ts",
"tests/testdata/byte_order_mark.ts", "tests/testdata/byte_order_mark.ts",
"tests/testdata/encoding", "tests/testdata/encoding",
"tests/testdata/file_extensions/ts_with_js_extension.js", "tests/testdata/file_extensions/ts_with_js_extension.js",
@ -56,20 +62,23 @@
"tests/testdata/run/byte_order_mark.ts", "tests/testdata/run/byte_order_mark.ts",
"tests/testdata/run/error_syntax_empty_trailing_line.mjs", "tests/testdata/run/error_syntax_empty_trailing_line.mjs",
"tests/testdata/run/inline_js_source_map*", "tests/testdata/run/inline_js_source_map*",
"tests/testdata/test/glob/",
"tests/testdata/test/markdown_windows.md", "tests/testdata/test/markdown_windows.md",
"tests/util/std", "tests/util/std",
"tests/wpt/runner/expectation.json", "tests/wpt/runner/expectation.json",
"tests/wpt/runner/manifest.json", "tests/wpt/runner/manifest.json",
"tests/wpt/suite", "tests/wpt/suite",
"third_party" "third_party",
"tests/specs/run/shebang_with_json_imports_tsc",
"tests/specs/run/shebang_with_json_imports_swc",
"tests/specs/run/ext_flag_takes_precedence_over_extension",
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.91.4.wasm", "https://plugins.dprint.dev/typescript-0.93.2.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/json-0.19.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.1.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.2.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm",
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0", "https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.3.0.wasm" "https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
] ]
} }

View file

@ -4,7 +4,6 @@ about: Report an issue found in the Deno CLI.
title: '' title: ''
labels: '' labels: ''
assignees: '' assignees: ''
--- ---
Version: Deno x.x.x Version: Deno x.x.x

View file

@ -4,5 +4,4 @@ about: Suggest a feature for the Deno CLI.
title: '' title: ''
labels: '' labels: ''
assignees: '' assignees: ''
--- ---

View file

@ -2,10 +2,15 @@ name: cargo_publish
on: workflow_dispatch on: workflow_dispatch
# Ensures only one publish is running at a time
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs: jobs:
build: build:
name: cargo publish name: cargo publish
runs-on: ubuntu-20.04-xl runs-on: ubuntu-24.04-xl
timeout-minutes: 90 timeout-minutes: 90
env: env:
@ -28,16 +33,10 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install protoc
uses: arduino/setup-protoc@v3
with:
version: '21.12'
repo-token: '${{ secrets.GITHUB_TOKEN }}'
- name: Publish - name: Publish
env: env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}

View file

@ -5,15 +5,16 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 9; const cacheVersion = 27;
const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
const ubuntuARMRunner = "ubicloud-standard-16-arm"; const ubuntuARMRunner = "ubicloud-standard-16-arm";
const windowsX86Runner = "windows-2022"; const windowsX86Runner = "windows-2022";
const windowsX86XlRunner = "windows-2022-xl"; const windowsX86XlRunner = "windows-2022-xl";
const macosX86Runner = "macos-13"; const macosX86Runner = "macos-13";
const macosArmRunner = "macos-14"; const macosArmRunner = "macos-14";
const selfHostedMacosArmRunner = "self-hosted";
const Runners = { const Runners = {
linuxX86: { linuxX86: {
@ -40,7 +41,8 @@ const Runners = {
macosArm: { macosArm: {
os: "macos", os: "macos",
arch: "aarch64", arch: "aarch64",
runner: macosArmRunner, runner:
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
}, },
windowsX86: { windowsX86: {
os: "windows", os: "windows",
@ -59,7 +61,7 @@ const prCacheKeyPrefix =
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
// Note that you may need to add more version to the `apt-get remove` line below if you change this // Note that you may need to add more version to the `apt-get remove` line below if you change this
const llvmVersion = 18; const llvmVersion = 19;
const installPkgsCommand = const installPkgsCommand =
`sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`; `sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`;
const sysRootStep = { const sysRootStep = {
@ -71,7 +73,7 @@ export DEBIAN_FRONTEND=noninteractive
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove \ sudo apt-get -qq remove \
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" |
@ -86,7 +88,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -191,14 +193,9 @@ const installNodeStep = {
uses: "actions/setup-node@v4", uses: "actions/setup-node@v4",
with: { "node-version": 18 }, with: { "node-version": 18 },
}; };
const installProtocStep = {
name: "Install protoc",
uses: "arduino/setup-protoc@v3",
with: { "version": "21.12", "repo-token": "${{ secrets.GITHUB_TOKEN }}" },
};
const installDenoStep = { const installDenoStep = {
name: "Install Deno", name: "Install Deno",
uses: "denoland/setup-deno@v1", uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" }, with: { "deno-version": "v1.x" },
}; };
@ -354,7 +351,7 @@ const ci = {
needs: ["pre_build"], needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}", if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": "${{ matrix.runner }}", "runs-on": "${{ matrix.runner }}",
"timeout-minutes": 150, "timeout-minutes": 180,
defaults: { defaults: {
run: { run: {
// GH actions does not fail fast by default on // GH actions does not fail fast by default on
@ -494,7 +491,6 @@ const ci = {
if: "matrix.job == 'bench' || matrix.job == 'test'", if: "matrix.job == 'bench' || matrix.job == 'test'",
...installNodeStep, ...installNodeStep,
}, },
installProtocStep,
{ {
if: [ if: [
"matrix.profile == 'release' &&", "matrix.profile == 'release' &&",
@ -649,7 +645,7 @@ const ci = {
name: "test_format.js", name: "test_format.js",
if: "matrix.job == 'lint' && matrix.os == 'linux'", if: "matrix.job == 'lint' && matrix.os == 'linux'",
run: run:
"deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check", "deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check",
}, },
{ {
name: "Lint PR title", name: "Lint PR title",
@ -664,7 +660,7 @@ const ci = {
name: "lint.js", name: "lint.js",
if: "matrix.job == 'lint'", if: "matrix.job == 'lint'",
run: run:
"deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js", "deno run --allow-write --allow-read --allow-run --allow-net ./tools/lint.js",
}, },
{ {
name: "jsdoc_checker.js", name: "jsdoc_checker.js",
@ -768,8 +764,10 @@ const ci = {
run: [ run: [
"cd target/release", "cd target/release",
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno", "zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"strip denort", "strip denort",
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort", "zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"./deno types > lib.deno.d.ts", "./deno types > lib.deno.d.ts",
].join("\n"), ].join("\n"),
}, },
@ -794,8 +792,10 @@ const ci = {
"--entitlements-xml-file=cli/entitlements.plist", "--entitlements-xml-file=cli/entitlements.plist",
"cd target/release", "cd target/release",
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno", "zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
"strip denort", "strip denort",
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort", "zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
] ]
.join("\n"), .join("\n"),
}, },
@ -810,7 +810,9 @@ const ci = {
shell: "pwsh", shell: "pwsh",
run: [ run: [
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
].join("\n"), ].join("\n"),
}, },
{ {
@ -823,6 +825,7 @@ const ci = {
].join("\n"), ].join("\n"),
run: [ run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
"echo ${{ github.sha }} > canary-latest.txt", "echo ${{ github.sha }} > canary-latest.txt",
'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt', 'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt',
].join("\n"), ].join("\n"),
@ -836,7 +839,7 @@ const ci = {
"!startsWith(github.ref, 'refs/tags/')", "!startsWith(github.ref, 'refs/tags/')",
].join("\n"), ].join("\n"),
run: run:
"target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js", "target/release/deno run -A --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js",
}, },
{ {
name: "Test (full, debug)", name: "Test (full, debug)",
@ -889,9 +892,9 @@ const ci = {
DENO_BIN: "./target/debug/deno", DENO_BIN: "./target/debug/deno",
}, },
run: [ run: [
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup", " ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"', ' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"',
].join("\n"), ].join("\n"),
}, },
@ -902,9 +905,9 @@ const ci = {
DENO_BIN: "./target/release/deno", DENO_BIN: "./target/release/deno",
}, },
run: [ run: [
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup", " ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\", "deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts run --quiet --release \\", " ./tests/wpt/wpt.ts run --quiet --release \\",
' --binary="$DENO_BIN" \\', ' --binary="$DENO_BIN" \\',
" --json=wpt.json \\", " --json=wpt.json \\",
@ -968,8 +971,7 @@ const ci = {
"git clone --depth 1 --branch gh-pages \\", "git clone --depth 1 --branch gh-pages \\",
" https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \\", " https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \\",
" gh-pages", " gh-pages",
"./target/release/deno run --allow-all --unstable \\", "./target/release/deno run --allow-all ./tools/build_benchmark_jsons.js --release",
" ./tools/build_benchmark_jsons.js --release",
"cd gh-pages", "cd gh-pages",
'git config user.email "propelml@gmail.com"', 'git config user.email "propelml@gmail.com"',
'git config user.name "denobot"', 'git config user.name "denobot"',
@ -1005,8 +1007,10 @@ const ci = {
"github.repository == 'denoland/deno' &&", "github.repository == 'denoland/deno' &&",
"startsWith(github.ref, 'refs/tags/')", "startsWith(github.ref, 'refs/tags/')",
].join("\n"), ].join("\n"),
run: run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
}, },
{ {
name: "Upload release to dl.deno.land (windows)", name: "Upload release to dl.deno.land (windows)",
@ -1020,8 +1024,10 @@ const ci = {
env: { env: {
CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe", CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe",
}, },
run: run: [
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/', 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
].join("\n"),
}, },
{ {
name: "Create release notes", name: "Create release notes",
@ -1051,15 +1057,25 @@ const ci = {
with: { with: {
files: [ files: [
"target/release/deno-x86_64-pc-windows-msvc.zip", "target/release/deno-x86_64-pc-windows-msvc.zip",
"target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/denort-x86_64-pc-windows-msvc.zip", "target/release/denort-x86_64-pc-windows-msvc.zip",
"target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/deno-x86_64-unknown-linux-gnu.zip", "target/release/deno-x86_64-unknown-linux-gnu.zip",
"target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-x86_64-unknown-linux-gnu.zip", "target/release/denort-x86_64-unknown-linux-gnu.zip",
"target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-x86_64-apple-darwin.zip", "target/release/deno-x86_64-apple-darwin.zip",
"target/release/deno-x86_64-apple-darwin.zip.sha256sum",
"target/release/denort-x86_64-apple-darwin.zip", "target/release/denort-x86_64-apple-darwin.zip",
"target/release/denort-x86_64-apple-darwin.zip.sha256sum",
"target/release/deno-aarch64-unknown-linux-gnu.zip", "target/release/deno-aarch64-unknown-linux-gnu.zip",
"target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-aarch64-unknown-linux-gnu.zip", "target/release/denort-aarch64-unknown-linux-gnu.zip",
"target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-aarch64-apple-darwin.zip", "target/release/deno-aarch64-apple-darwin.zip",
"target/release/deno-aarch64-apple-darwin.zip.sha256sum",
"target/release/denort-aarch64-apple-darwin.zip", "target/release/denort-aarch64-apple-darwin.zip",
"target/release/denort-aarch64-apple-darwin.zip.sha256sum",
"target/release/deno_src.tar.gz", "target/release/deno_src.tar.gz",
"target/release/lib.deno.d.ts", "target/release/lib.deno.d.ts",
].join("\n"), ].join("\n"),
@ -1078,6 +1094,7 @@ const ci = {
"./target", "./target",
"!./target/*/gn_out", "!./target/*/gn_out",
"!./target/*/*.zip", "!./target/*/*.zip",
"!./target/*/*.sha256sum",
"!./target/*/*.tar.gz", "!./target/*/*.tar.gz",
].join("\n"), ].join("\n"),
key: prCacheKeyPrefix + "${{ github.sha }}", key: prCacheKeyPrefix + "${{ github.sha }}",

View file

@ -48,7 +48,7 @@ jobs:
- pre_build - pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}' if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
runs-on: '${{ matrix.runner }}' runs-on: '${{ matrix.runner }}'
timeout-minutes: 150 timeout-minutes: 180
defaults: defaults:
run: run:
shell: bash shell: bash
@ -62,18 +62,18 @@ jobs:
profile: debug profile: debug
- os: macos - os: macos
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-13'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || ''macos-13'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: macos-14 runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: debug profile: debug
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-14'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
@ -84,33 +84,33 @@ jobs:
profile: debug profile: debug
- os: windows - os: windows
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: test job: test
profile: release profile: release
use_sysroot: true use_sysroot: true
wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}' wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: bench job: bench
profile: release profile: release
use_sysroot: true use_sysroot: true
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: test job: test
profile: debug profile: debug
use_sysroot: true use_sysroot: true
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: lint job: lint
profile: debug profile: debug
- os: linux - os: linux
@ -178,7 +178,7 @@ jobs:
if: '!(matrix.skip)' if: '!(matrix.skip)'
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')' - if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
name: Install Deno name: Install Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install Python - name: Install Python
@ -199,12 +199,6 @@ jobs:
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: 18
- name: Install protoc
uses: arduino/setup-protoc@v3
with:
version: '21.12'
repo-token: '${{ secrets.GITHUB_TOKEN }}'
if: '!(matrix.skip)'
- if: |- - if: |-
!(matrix.skip) && (matrix.profile == 'release' && !(matrix.skip) && (matrix.profile == 'release' &&
matrix.job == 'test' && matrix.job == 'test' &&
@ -258,22 +252,22 @@ jobs:
# to complete. # to complete.
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" |
sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-18.list sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-19.list
curl https://apt.llvm.org/llvm-snapshot.gpg.key | curl https://apt.llvm.org/llvm-snapshot.gpg.key |
gpg --dearmor | gpg --dearmor |
sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg
sudo apt-get update sudo apt-get update
# this was unreliable sometimes, so try again if it fails # this was unreliable sometimes, so try again if it fails
sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19
# Fix alternatives # Fix alternatives
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -305,8 +299,8 @@ jobs:
CARGO_PROFILE_RELEASE_LTO=false CARGO_PROFILE_RELEASE_LTO=false
RUSTFLAGS<<__1 RUSTFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -316,8 +310,8 @@ jobs:
__1 __1
RUSTDOCFLAGS<<__1 RUSTDOCFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -325,7 +319,7 @@ jobs:
--cfg tokio_unstable --cfg tokio_unstable
$RUSTFLAGS $RUSTFLAGS
__1 __1
CC=/usr/bin/clang-18 CC=/usr/bin/clang-19
CFLAGS=-flto=thin $CFLAGS CFLAGS=-flto=thin $CFLAGS
" > $GITHUB_ENV " > $GITHUB_ENV
- name: Remove macOS cURL --ipv4 flag - name: Remove macOS cURL --ipv4 flag
@ -367,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '9-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '9-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -381,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '9-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -389,7 +383,7 @@ jobs:
cache-path: ./target cache-path: ./target
- name: test_format.js - name: test_format.js
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')' if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check
- name: Lint PR title - name: Lint PR title
if: '!(matrix.skip) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && matrix.os == ''linux'')' if: '!(matrix.skip) && (matrix.job == ''lint'' && github.event_name == ''pull_request'' && matrix.os == ''linux'')'
env: env:
@ -397,7 +391,7 @@ jobs:
run: deno run ./tools/verify_pr_title.js "$PR_TITLE" run: deno run ./tools/verify_pr_title.js "$PR_TITLE"
- name: lint.js - name: lint.js
if: '!(matrix.skip) && (matrix.job == ''lint'')' if: '!(matrix.skip) && (matrix.job == ''lint'')'
run: deno run --unstable --allow-write --allow-read --allow-run --allow-net ./tools/lint.js run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/lint.js
- name: jsdoc_checker.js - name: jsdoc_checker.js
if: '!(matrix.skip) && (matrix.job == ''lint'')' if: '!(matrix.skip) && (matrix.job == ''lint'')'
run: deno run --allow-read --allow-env --allow-sys ./tools/jsdoc_checker.js run: deno run --allow-read --allow-env --allow-sys ./tools/jsdoc_checker.js
@ -449,8 +443,10 @@ jobs:
run: |- run: |-
cd target/release cd target/release
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
strip denort strip denort
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
./deno types > lib.deno.d.ts ./deno types > lib.deno.d.ts
- name: Pre-release (mac) - name: Pre-release (mac)
if: |- if: |-
@ -466,8 +462,10 @@ jobs:
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
cd target/release cd target/release
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum
strip denort strip denort
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum
- name: Pre-release (windows) - name: Pre-release (windows)
if: |- if: |-
!(matrix.skip) && (matrix.os == 'windows' && !(matrix.skip) && (matrix.os == 'windows' &&
@ -477,7 +475,9 @@ jobs:
shell: pwsh shell: pwsh
run: |- run: |-
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
- name: Upload canary to dl.deno.land - name: Upload canary to dl.deno.land
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -486,6 +486,7 @@ jobs:
github.ref == 'refs/heads/main') github.ref == 'refs/heads/main')
run: |- run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/ gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/
echo ${{ github.sha }} > canary-latest.txt echo ${{ github.sha }} > canary-latest.txt
gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt
- name: Autobahn testsuite - name: Autobahn testsuite
@ -494,7 +495,7 @@ jobs:
matrix.job == 'test' && matrix.job == 'test' &&
matrix.profile == 'release' && matrix.profile == 'release' &&
!startsWith(github.ref, 'refs/tags/')) !startsWith(github.ref, 'refs/tags/'))
run: target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js run: target/release/deno run -A --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js
- name: 'Test (full, debug)' - name: 'Test (full, debug)'
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -531,18 +532,18 @@ jobs:
env: env:
DENO_BIN: ./target/debug/deno DENO_BIN: ./target/debug/deno
run: |- run: |-
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN" ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"
- name: Run web platform tests (release) - name: Run web platform tests (release)
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')' if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')'
env: env:
DENO_BIN: ./target/release/deno DENO_BIN: ./target/release/deno
run: |- run: |-
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\ deno run -A --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --release \ ./tests/wpt/wpt.ts run --quiet --release \
--binary="$DENO_BIN" \ --binary="$DENO_BIN" \
--json=wpt.json \ --json=wpt.json \
@ -590,8 +591,7 @@ jobs:
git clone --depth 1 --branch gh-pages \ git clone --depth 1 --branch gh-pages \
https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \ https://${DENOBOT_PAT}@github.com/denoland/benchmark_data.git \
gh-pages gh-pages
./target/release/deno run --allow-all --unstable \ ./target/release/deno run --allow-all ./tools/build_benchmark_jsons.js --release
./tools/build_benchmark_jsons.js --release
cd gh-pages cd gh-pages
git config user.email "propelml@gmail.com" git config user.email "propelml@gmail.com"
git config user.name "denobot" git config user.name "denobot"
@ -616,7 +616,9 @@ jobs:
matrix.profile == 'release' && matrix.profile == 'release' &&
github.repository == 'denoland/deno' && github.repository == 'denoland/deno' &&
startsWith(github.ref, 'refs/tags/')) startsWith(github.ref, 'refs/tags/'))
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/' run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Upload release to dl.deno.land (windows) - name: Upload release to dl.deno.land (windows)
if: |- if: |-
!(matrix.skip) && (matrix.os == 'windows' && !(matrix.skip) && (matrix.os == 'windows' &&
@ -626,7 +628,9 @@ jobs:
startsWith(github.ref, 'refs/tags/')) startsWith(github.ref, 'refs/tags/'))
env: env:
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe' CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/' run: |-
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
- name: Create release notes - name: Create release notes
if: |- if: |-
!(matrix.skip) && (matrix.job == 'test' && !(matrix.skip) && (matrix.job == 'test' &&
@ -648,15 +652,25 @@ jobs:
with: with:
files: |- files: |-
target/release/deno-x86_64-pc-windows-msvc.zip target/release/deno-x86_64-pc-windows-msvc.zip
target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum
target/release/denort-x86_64-pc-windows-msvc.zip target/release/denort-x86_64-pc-windows-msvc.zip
target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum
target/release/deno-x86_64-unknown-linux-gnu.zip target/release/deno-x86_64-unknown-linux-gnu.zip
target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/denort-x86_64-unknown-linux-gnu.zip target/release/denort-x86_64-unknown-linux-gnu.zip
target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/deno-x86_64-apple-darwin.zip target/release/deno-x86_64-apple-darwin.zip
target/release/deno-x86_64-apple-darwin.zip.sha256sum
target/release/denort-x86_64-apple-darwin.zip target/release/denort-x86_64-apple-darwin.zip
target/release/denort-x86_64-apple-darwin.zip.sha256sum
target/release/deno-aarch64-unknown-linux-gnu.zip target/release/deno-aarch64-unknown-linux-gnu.zip
target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/denort-aarch64-unknown-linux-gnu.zip target/release/denort-aarch64-unknown-linux-gnu.zip
target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/deno-aarch64-apple-darwin.zip target/release/deno-aarch64-apple-darwin.zip
target/release/deno-aarch64-apple-darwin.zip.sha256sum
target/release/denort-aarch64-apple-darwin.zip target/release/denort-aarch64-apple-darwin.zip
target/release/denort-aarch64-apple-darwin.zip.sha256sum
target/release/deno_src.tar.gz target/release/deno_src.tar.gz
target/release/lib.deno.d.ts target/release/lib.deno.d.ts
body_path: target/release/release-notes.md body_path: target/release/release-notes.md
@ -669,11 +683,12 @@ jobs:
./target ./target
!./target/*/gn_out !./target/*/gn_out
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '9-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
needs: needs:
- build - build
if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main' if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main'

View file

@ -7,7 +7,7 @@ on:
jobs: jobs:
update-dl-version: update-dl-version:
name: update dl.deno.land version name: update dl.deno.land version
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
if: github.repository == 'denoland/deno' if: github.repository == 'denoland/deno'
steps: steps:
- name: Authenticate with Google Cloud - name: Authenticate with Google Cloud

View file

@ -0,0 +1,62 @@
name: promote_to_release
on:
workflow_dispatch:
inputs:
releaseKind:
description: 'Kind of release'
type: choice
options:
- rc
- lts
required: true
commitHash:
description: Commit to promote to release
required: true
jobs:
promote-to-release:
name: Promote to Release
runs-on: macOS-latest
if: github.repository == 'denoland/deno'
steps:
- name: Clone repository
uses: actions/checkout@v4
with:
token: ${{ secrets.DENOBOT_PAT }}
submodules: recursive
- name: Authenticate with Google Cloud
uses: google-github-actions/auth@v1
with:
project_id: denoland
credentials_json: ${{ secrets.GCP_SA_KEY }}
export_environment_variables: true
create_credentials_file: true
- name: Setup gcloud
uses: google-github-actions/setup-gcloud@v1
with:
project_id: denoland
- name: Install deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
- name: Install rust-codesign
run: |-
./tools/install_prebuilt.js rcodesign
echo $GITHUB_WORKSPACE/third_party/prebuilt/mac >> $GITHUB_PATH
- name: Promote to Release
env:
APPLE_CODESIGN_KEY: '${{ secrets.APPLE_CODESIGN_KEY }}'
APPLE_CODESIGN_PASSWORD: '${{ secrets.APPLE_CODESIGN_PASSWORD }}'
run: |
deno run -A ./tools/release/promote_to_release.ts ${{github.event.inputs.releaseKind}} ${{github.event.inputs.commitHash}}
- name: Upload archives to dl.deno.land
run: |
gsutil -h "Cache-Control: public, max-age=3600" cp ./*.zip gs://dl.deno.land/release/$(cat release-${{github.event.inputs.releaseKind}}-latest.txt)/
gsutil -h "Cache-Control: no-cache" cp release-${{github.event.inputs.releaseKind}}-latest.txt gs://dl.deno.land/release-${{github.event.inputs.releaseKind}}-latest.txt

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: start release name: start release
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
env: env:
@ -34,7 +34,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: version bump name: version bump
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 90 timeout-minutes: 90
env: env:
@ -39,7 +39,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -20,7 +20,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
deno-version: [v1.x, canary] deno-version: [v1.x, canary]
os: [ubuntu-22.04-xl] os: [ubuntu-24.04-xl]
steps: steps:
- name: Clone repository - name: Clone repository
@ -30,7 +30,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Setup Deno - name: Setup Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: ${{ matrix.deno-version }} deno-version: ${{ matrix.deno-version }}
@ -66,9 +66,9 @@ jobs:
- name: Run web platform tests - name: Run web platform tests
shell: bash shell: bash
run: | run: |
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \ deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts setup ./tests/wpt/wpt.ts setup
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \ deno run -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts run \ \ ./tests/wpt/wpt.ts run \ \
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero --binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero

2640
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,6 @@ resolver = "2"
members = [ members = [
"bench_util", "bench_util",
"cli", "cli",
"cli/napi/sym",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
@ -19,15 +18,17 @@ members = [
"ext/io", "ext/io",
"ext/kv", "ext/kv",
"ext/napi", "ext/napi",
"ext/napi/sym",
"ext/net", "ext/net",
"ext/node", "ext/node",
"ext/node_resolver",
"ext/url", "ext/url",
"ext/web", "ext/web",
"ext/webgpu", "ext/webgpu",
"ext/webidl", "ext/webidl",
"ext/websocket", "ext/websocket",
"ext/webstorage", "ext/webstorage",
"resolvers/deno",
"resolvers/node",
"runtime", "runtime",
"runtime/permissions", "runtime/permissions",
"tests", "tests",
@ -44,54 +45,62 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.40.0", features = ["transpiling"] } deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.299.0" } deno_core = { version = "0.322.0" }
deno_bench_util = { version = "0.157.0", path = "./bench_util" } deno_bench_util = { version = "0.173.0", path = "./bench_util" }
deno_lockfile = "0.20.0" deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
deno_media_type = { version = "0.1.4", features = ["module_specifier"] } deno_lockfile = "=0.23.1"
deno_permissions = { version = "0.23.0", path = "./runtime/permissions" } deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_runtime = { version = "0.172.0", path = "./runtime" } deno_npm = "=0.25.4"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.39.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.188.0", path = "./runtime" }
deno_semver = "=0.5.16"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.93.0", path = "./cli/napi/sym" } napi_sym = { version = "0.109.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1" denokv_proto = "0.8.4"
denokv_remote = "0.8.1" denokv_remote = "0.8.4"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features # denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.1" } denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts # exts
deno_broadcast_channel = { version = "0.157.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.173.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.95.0", path = "./ext/cache" } deno_cache = { version = "0.111.0", path = "./ext/cache" }
deno_canvas = { version = "0.32.0", path = "./ext/canvas" } deno_canvas = { version = "0.48.0", path = "./ext/canvas" }
deno_console = { version = "0.163.0", path = "./ext/console" } deno_console = { version = "0.179.0", path = "./ext/console" }
deno_cron = { version = "0.43.0", path = "./ext/cron" } deno_cron = { version = "0.59.0", path = "./ext/cron" }
deno_crypto = { version = "0.177.0", path = "./ext/crypto" } deno_crypto = { version = "0.193.0", path = "./ext/crypto" }
deno_fetch = { version = "0.187.0", path = "./ext/fetch" } deno_fetch = { version = "0.203.0", path = "./ext/fetch" }
deno_ffi = { version = "0.150.0", path = "./ext/ffi" } deno_ffi = { version = "0.166.0", path = "./ext/ffi" }
deno_fs = { version = "0.73.0", path = "./ext/fs" } deno_fs = { version = "0.89.0", path = "./ext/fs" }
deno_http = { version = "0.161.0", path = "./ext/http" } deno_http = { version = "0.177.0", path = "./ext/http" }
deno_io = { version = "0.73.0", path = "./ext/io" } deno_io = { version = "0.89.0", path = "./ext/io" }
deno_kv = { version = "0.71.0", path = "./ext/kv" } deno_kv = { version = "0.87.0", path = "./ext/kv" }
deno_napi = { version = "0.94.0", path = "./ext/napi" } deno_napi = { version = "0.110.0", path = "./ext/napi" }
deno_net = { version = "0.155.0", path = "./ext/net" } deno_net = { version = "0.171.0", path = "./ext/net" }
deno_node = { version = "0.100.0", path = "./ext/node" } deno_node = { version = "0.116.0", path = "./ext/node" }
deno_tls = { version = "0.150.0", path = "./ext/tls" } deno_tls = { version = "0.166.0", path = "./ext/tls" }
deno_url = { version = "0.163.0", path = "./ext/url" } deno_url = { version = "0.179.0", path = "./ext/url" }
deno_web = { version = "0.194.0", path = "./ext/web" } deno_web = { version = "0.210.0", path = "./ext/web" }
deno_webgpu = { version = "0.130.0", path = "./ext/webgpu" } deno_webgpu = { version = "0.146.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.163.0", path = "./ext/webidl" } deno_webidl = { version = "0.179.0", path = "./ext/webidl" }
deno_websocket = { version = "0.168.0", path = "./ext/websocket" } deno_websocket = { version = "0.184.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.158.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.174.0", path = "./ext/webstorage" }
node_resolver = { version = "0.2.0", path = "./ext/node_resolver" }
# resolvers
deno_resolver = { version = "0.11.0", path = "./resolvers/deno" }
node_resolver = { version = "0.18.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
async-trait = "0.1.73" async-trait = "0.1.73"
base32 = "=0.4.0" base32 = "=0.5.1"
base64 = "0.21.4" base64 = "0.21.7"
bencher = "0.1" bencher = "0.1"
boxed_error = "0.2.2"
brotli = "6.0.0" brotli = "6.0.0"
bytes = "1.4.0" bytes = "1.4.0"
cache_control = "=0.2.0" cache_control = "=0.2.0"
@ -99,23 +108,27 @@ cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now() # Instead use util::time::utc_now()
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] } chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
color-print = "0.3.5"
console_static_text = "=0.8.1" console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.10.2" deno_cache_dir = "=0.13.2"
deno_package_json = { version = "=0.1.1", default-features = false } deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] } elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
encoding_rs = "=0.8.33" encoding_rs = "=0.8.33"
fast-socks5 = "0.9.6"
faster-hex = "0.9" faster-hex = "0.9"
fastwebsockets = { version = "0.6", features = ["upgrade", "unstable-split"] } fastwebsockets = { version = "0.8", features = ["upgrade", "unstable-split"] }
filetime = "0.2.16" filetime = "0.2.16"
flate2 = { version = "1.0.26", default-features = false } flate2 = { version = "1.0.30", default-features = false }
fs3 = "0.5.0" fs3 = "0.5.0"
futures = "0.3.21" futures = "0.3.21"
glob = "0.3.1" glob = "0.3.1"
h2 = "0.4.4" h2 = "0.4.4"
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
http = "1.0" http = "1.0"
http-body = "1.0" http-body = "1.0"
http-body-util = "0.1.2" http-body-util = "0.1.2"
@ -123,37 +136,37 @@ http_v02 = { package = "http", version = "0.2.9" }
httparse = "1.8.0" httparse = "1.8.0"
hyper = { version = "1.4.1", features = ["full"] } hyper = { version = "1.4.1", features = ["full"] }
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] } hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
hyper-util = { version = "=0.1.6", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3" ipnet = "2.3"
jsonc-parser = { version = "=0.23.0", features = ["serde"] } jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.126" libc = "0.2.126"
libz-sys = { version = "1.1", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
log = "0.4.20" log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.94.1" # used by tower-lsp and "proposed" feature is unstable in patch releases lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
memmem = "0.1.1" memmem = "0.1.1"
monch = "=0.5.0" monch = "=0.5.0"
notify = { version = "=5.0.0", default-features = false, features = ["macos_kqueue"] } notify = { version = "=6.1.1", default-features = false, features = ["macos_kqueue"] }
num-bigint = { version = "0.4", features = ["rand"] } num-bigint = { version = "0.4", features = ["rand"] }
once_cell = "1.17.1" once_cell = "1.17.1"
os_pipe = { version = "=1.1.5", features = ["io_safety"] } os_pipe = { version = "=1.1.5", features = ["io_safety"] }
p224 = { version = "0.13.0", features = ["ecdh"] } p224 = { version = "0.13.0", features = ["ecdh"] }
p256 = { version = "0.13.2", features = ["ecdh"] } p256 = { version = "0.13.2", features = ["ecdh", "jwk"] }
p384 = { version = "0.13.0", features = ["ecdh"] } p384 = { version = "0.13.0", features = ["ecdh", "jwk"] }
parking_lot = "0.12.0" parking_lot = "0.12.0"
percent-encoding = "2.3.0" percent-encoding = "2.3.0"
phf = { version = "0.11", features = ["macros"] } phf = { version = "0.11", features = ["macros"] }
pin-project = "1.0.11" # don't pin because they yank crates from cargo pin-project = "1.0.11" # don't pin because they yank crates from cargo
pretty_assertions = "=1.4.0" pretty_assertions = "=1.4.0"
prost = "0.11" prost = "0.13"
prost-build = "0.11" prost-build = "0.13"
rand = "=0.8.5" rand = "=0.8.5"
regex = "^1.7.0" regex = "^1.7.0"
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
ring = "^0.17.0" ring = "^0.17.0"
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] } rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled"] }
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] } rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
rustls-pemfile = "2" rustls-pemfile = "2"
rustls-tokio-stream = "=0.3.0" rustls-tokio-stream = "=0.3.0"
@ -161,6 +174,7 @@ rustls-webpki = "0.102"
rustyline = "=13.0.0" rustyline = "=13.0.0"
saffron = "=0.1.0" saffron = "=0.1.0"
scopeguard = "1.2.0" scopeguard = "1.2.0"
sec1 = "0.7"
serde = { version = "1.0.149", features = ["derive"] } serde = { version = "1.0.149", features = ["derive"] }
serde_bytes = "0.11" serde_bytes = "0.11"
serde_json = "1.0.85" serde_json = "1.0.85"
@ -182,18 +196,25 @@ tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring
tokio-socks = "0.5.1" tokio-socks = "0.5.1"
tokio-util = "0.7.4" tokio-util = "0.7.4"
tower = { version = "0.4.13", default-features = false, features = ["util"] } tower = { version = "0.4.13", default-features = false, features = ["util"] }
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] } tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
tower-lsp = { version = "=0.20.0", features = ["proposed"] } tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
tower-service = "0.3.2" tower-service = "0.3.2"
twox-hash = "=1.6.3" twox-hash = "=1.6.3"
# Upgrading past 2.4.1 may cause WPT failures url = { version = "2.5", features = ["serde", "expose_internals"] }
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] } uuid = { version = "1.3.0", features = ["v4"] }
webpki-root-certs = "0.26.5"
webpki-roots = "0.26" webpki-roots = "0.26"
which = "4.2.5" which = "4.2.5"
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] } yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4" zstd = "=0.12.4"
opentelemetry = "0.27.0"
opentelemetry-http = "0.27.0"
opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] }
opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] }
opentelemetry_sdk = "0.27.0"
# crypto # crypto
hkdf = "0.12.3" hkdf = "0.12.3"
rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node
@ -208,15 +229,14 @@ quote = "1"
syn = { version = "2", features = ["full", "extra-traits"] } syn = { version = "2", features = ["full", "extra-traits"] }
# unix # unix
nix = "=0.26.2" nix = "=0.27.1"
# windows deps # windows deps
junction = "=0.2.0" junction = "=0.2.0"
winapi = "=0.3.9" winapi = "=0.3.9"
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry"] } windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
winres = "=0.1.12" winres = "=0.1.12"
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.release] [profile.release]
codegen-units = 1 codegen-units = 1
incremental = true incremental = true
@ -228,12 +248,11 @@ opt-level = 'z' # Optimize for size
inherits = "release" inherits = "release"
debug = true debug = true
# NB: the `bench` and `release` profiles must remain EXACTLY the same. # Faster to compile than `release` but with similar performance.
[profile.bench] [profile.release-lite]
codegen-units = 1 inherits = "release"
incremental = true codegen-units = 128
lto = true lto = "thin"
opt-level = 'z' # Optimize for size
# Key generation is too slow on `debug` # Key generation is too slow on `debug`
[profile.dev.package.num-bigint-dig] [profile.dev.package.num-bigint-dig]
@ -243,80 +262,6 @@ opt-level = 3
[profile.dev.package.v8] [profile.dev.package.v8]
opt-level = 1 opt-level = 1
# Optimize these packages for performance.
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.bench.package.async-compression]
opt-level = 3
[profile.bench.package.base64-simd]
opt-level = 3
[profile.bench.package.brotli]
opt-level = 3
[profile.bench.package.brotli-decompressor]
opt-level = 3
[profile.bench.package.bytes]
opt-level = 3
[profile.bench.package.deno_bench_util]
opt-level = 3
[profile.bench.package.deno_broadcast_channel]
opt-level = 3
[profile.bench.package.deno_core]
opt-level = 3
[profile.bench.package.deno_crypto]
opt-level = 3
[profile.bench.package.deno_fetch]
opt-level = 3
[profile.bench.package.deno_ffi]
opt-level = 3
[profile.bench.package.deno_http]
opt-level = 3
[profile.bench.package.deno_napi]
opt-level = 3
[profile.bench.package.deno_net]
opt-level = 3
[profile.bench.package.deno_node]
opt-level = 3
[profile.bench.package.deno_runtime]
opt-level = 3
[profile.bench.package.deno_tls]
opt-level = 3
[profile.bench.package.deno_url]
opt-level = 3
[profile.bench.package.deno_web]
opt-level = 3
[profile.bench.package.deno_websocket]
opt-level = 3
[profile.bench.package.fastwebsockets]
opt-level = 3
[profile.bench.package.flate2]
opt-level = 3
[profile.bench.package.futures-util]
opt-level = 3
[profile.bench.package.hyper]
opt-level = 3
[profile.bench.package.miniz_oxide]
opt-level = 3
[profile.bench.package.num-bigint-dig]
opt-level = 3
[profile.bench.package.rand]
opt-level = 3
[profile.bench.package.serde]
opt-level = 3
[profile.bench.package.serde_v8]
opt-level = 3
[profile.bench.package.test_napi]
opt-level = 3
[profile.bench.package.tokio]
opt-level = 3
[profile.bench.package.url]
opt-level = 3
[profile.bench.package.v8]
opt-level = 3
[profile.bench.package.zstd]
opt-level = 3
[profile.bench.package.zstd-sys]
opt-level = 3
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
[profile.release.package.async-compression] [profile.release.package.async-compression]
opt-level = 3 opt-level = 3
[profile.release.package.base64-simd] [profile.release.package.base64-simd]
@ -375,6 +320,8 @@ opt-level = 3
opt-level = 3 opt-level = 3
[profile.release.package.serde_v8] [profile.release.package.serde_v8]
opt-level = 3 opt-level = 3
[profile.release.package.libsui]
opt-level = 3
[profile.release.package.test_napi] [profile.release.package.test_napi]
opt-level = 3 opt-level = 3
[profile.release.package.tokio] [profile.release.package.tokio]

View file

@ -46,6 +46,12 @@ brew install deno
choco install deno choco install deno
``` ```
[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows):
```powershell
winget install --id=DenoLand.Deno
```
### Build and install from source ### Build and install from source
Complete instructions for building Deno from source can be found in the manual Complete instructions for building Deno from source can be found in the manual

View file

@ -6,6 +6,960 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.1.1 / 2024.11.21
- docs(add): clarification to add command (#26968)
- docs(doc): fix typo in doc subcommand help output (#26321)
- fix(node): regression where ts files were sometimes resolved instead of js
(#26971)
- fix(task): ensure root config always looks up dependencies in root (#26959)
- fix(watch): don't panic if there's no path provided (#26972)
- fix: Buffer global in --unstable-node-globals (#26973)
### 2.1.0 / 2024.11.21
- feat(cli): add `--unstable-node-globals` flag (#26617)
- feat(cli): support multiple env file argument (#26527)
- feat(compile): ability to embed directory in executable (#26939)
- feat(compile): ability to embed local data files (#26934)
- feat(ext/fetch): Make fetch client parameters configurable (#26909)
- feat(ext/fetch): allow embedders to use `hickory_dns_resolver` instead of
default `GaiResolver` (#26740)
- feat(ext/fs): add ctime to Deno.stats and use it in node compat layer (#24801)
- feat(ext/http): Make http server parameters configurable (#26785)
- feat(ext/node): perf_hooks.monitorEventLoopDelay() (#26905)
- feat(fetch): accept async iterables for body (#26882)
- feat(fmt): support SQL (#26750)
- feat(info): show location for Web Cache (#26205)
- feat(init): add --npm flag to initialize npm projects (#26896)
- feat(jupyter): Add `Deno.jupyter.image` API (#26284)
- feat(lint): Add checked files list to the JSON output(#26936)
- feat(lsp): auto-imports with @deno-types directives (#26821)
- feat(node): stabilize detecting if CJS via `"type": "commonjs"` in a
package.json (#26439)
- feat(permission): support suffix wildcards in `--allow-env` flag (#25255)
- feat(publish): add `--set-version <version>` flag (#26141)
- feat(runtime): remove public OTEL trace API (#26854)
- feat(task): add --eval flag (#26943)
- feat(task): dependencies (#26467)
- feat(task): support object notation, remove support for JSDocs (#26886)
- feat(task): workspace support with --filter and --recursive (#26949)
- feat(watch): log which file changed on HMR or watch change (#25801)
- feat: OpenTelemetry Tracing API and Exporting (#26710)
- feat: Wasm module support (#26668)
- feat: fmt and lint respect .gitignore file (#26897)
- feat: permission stack traces in ops (#26938)
- feat: subcommand to view and update outdated dependencies (#26942)
- feat: upgrade V8 to 13.0 (#26851)
- fix(cli): preserve comments in doc tests (#26828)
- fix(cli): show prefix hint when installing a package globally (#26629)
- fix(ext/cache): gracefully error when cache creation failed (#26895)
- fix(ext/http): prefer brotli for `accept-encoding: gzip, deflate, br, zstd`
(#26814)
- fix(ext/node): New async setInterval function to improve the nodejs
compatibility (#26703)
- fix(ext/node): add autoSelectFamily option to net.createConnection (#26661)
- fix(ext/node): handle `--allow-sys=inspector` (#26836)
- fix(ext/node): increase tolerance for interval test (#26899)
- fix(ext/node): process.getBuiltinModule (#26833)
- fix(ext/node): use ERR_NOT_IMPLEMENTED for notImplemented (#26853)
- fix(ext/node): zlib.crc32() (#26856)
- fix(ext/webgpu): Create GPUQuerySet converter before usage (#26883)
- fix(ext/websocket): initialize `error` attribute of WebSocket ErrorEvent
(#26796)
- fix(ext/webstorage): use error class for sqlite error case (#26806)
- fix(fmt): error instead of panic on unstable format (#26859)
- fix(fmt): formatting of .svelte files (#26948)
- fix(install): percent encodings in interactive progress bar (#26600)
- fix(install): re-setup bin entries after running lifecycle scripts (#26752)
- fix(lockfile): track dependencies specified in TypeScript compiler options
(#26551)
- fix(lsp): ignore editor indent settings if deno.json is present (#26912)
- fix(lsp): skip code action edits that can't be converted (#26831)
- fix(node): handle resolving ".//<something>" in npm packages (#26920)
- fix(node/crypto): support promisify on generateKeyPair (#26913)
- fix(permissions): say to use --allow-run instead of --allow-all (#26842)
- fix(publish): improve error message when missing exports (#26945)
- fix: otel resiliency (#26857)
- fix: update message for unsupported schemes with npm and jsr (#26884)
- perf(compile): code cache (#26528)
- perf(windows): delay load webgpu and some other dlls (#26917)
- perf: use available system memory for v8 isolate memory limit (#26868)
### 2.0.6 / 2024.11.10
- feat(ext/http): abort event when request is cancelled (#26781)
- feat(ext/http): abort signal when request is cancelled (#26761)
- feat(lsp): auto-import completions from byonm dependencies (#26680)
- fix(ext/cache): don't panic when creating cache (#26780)
- fix(ext/node): better inspector support (#26471)
- fix(fmt): don't use self-closing tags in HTML (#26754)
- fix(install): cache jsr deps from all workspace config files (#26779)
- fix(node:zlib): gzip & gzipSync should accept ArrayBuffer (#26762)
- fix: performance.timeOrigin (#26787)
### 2.0.5 / 2024.11.05
- fix(add): better error message when adding package that only has pre-release
versions (#26724)
- fix(add): only add npm deps to package.json if it's at least as close as
deno.json (#26683)
- fix(cli): set `npm_config_user_agent` when running npm packages or tasks
(#26639)
- fix(coverage): exclude comment lines from coverage reports (#25939)
- fix(ext/node): add `findSourceMap` to the default export of `node:module`
(#26720)
- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node
format (#26632)
- fix(ext/node): resolve exports even if parent module filename isn't present
(#26553)
- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647)
- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723)
- fix(fmt): fix several HTML and components issues (#26654)
- fix(fmt): ignore file directive for YAML files (#26717)
- fix(install): handle invalid function error, and fallback to junctions
regardless of the error (#26730)
- fix(lsp): include unstable features from editor settings (#26655)
- fix(lsp): scope attribution for lazily loaded assets (#26699)
- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive`
(#24702)
- fix(serve): support serve hmr (#26078)
- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627)
- fix(workspace): support wildcard packages (#26568)
- fix: clamp smi in fast calls by default (#26506)
- fix: improved support for cjs and cts modules (#26558)
- fix: op_run_microtasks crash (#26718)
- fix: panic_hook hangs without procfs (#26732)
- fix: remove permission check in op_require_node_module_paths (#26645)
- fix: surface package.json location on dep parse failure (#26665)
- perf(lsp): don't walk coverage directory (#26715)
### 2.0.4 / 2024.10.29
- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621)
- Revert "fix(ext/node): use primordials in `ext/node/polyfills/https.ts`
(#26323)" (#26613)
- feat(lsp): "typescript.preferences.preferTypeOnlyAutoImports" setting (#26546)
- fix(check): expose more globals from @types/node (#26603)
- fix(check): ignore resolving `jsxImportSource` when jsx is not used in graph
(#26548)
- fix(cli): Make --watcher CLEAR_SCREEN clear scrollback buffer as well as
visible screen (#25997)
- fix(compile): regression handling redirects (#26586)
- fix(ext/napi): export dynamic symbols list for {Free,Open}BSD (#26605)
- fix(ext/node): add path to `fs.stat` and `fs.statSync` error (#26037)
- fix(ext/node): compatibility with {Free,Open}BSD (#26604)
- fix(ext/node): use primordials in
ext\node\polyfills\internal\crypto\_randomInt.ts (#26534)
- fix(install): cache json exports of JSR packages (#26552)
- fix(install): regression - do not panic when config file contains \r\n
newlines (#26547)
- fix(lsp): make missing import action fix infallible (#26539)
- fix(npm): match npm bearer token generation (#26544)
- fix(upgrade): stop running `deno lsp` processes on windows before attempting
to replace executable (#26542)
- fix(watch): don't panic on invalid file specifiers (#26577)
- fix: do not panic when failing to write to http cache (#26591)
- fix: provide hints in terminal errors for Node.js globals (#26610)
- fix: report exceptions from nextTick (#26579)
- fix: support watch flag to enable watching other files than the main module on
serve subcommand (#26622)
- perf: pass transpiled module to deno_core as known string (#26555)
### 2.0.3 / 2024.10.25
- feat(lsp): interactive inlay hints (#26382)
- fix: support node-api in denort (#26389)
- fix(check): support `--frozen` on deno check (#26479)
- fix(cli): increase size of blocking task threadpool on windows (#26465)
- fix(config): schemas for lint rule and tag autocompletion (#26515)
- fix(ext/console): ignore casing for named colors in css parsing (#26466)
- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486)
- fix(ext/node): cancel pending ipc writes on channel close (#26504)
- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475)
- fix(ext/node): only set our end of child process pipe to nonblocking mode
(#26495)
- fix(ext/node): properly map reparse point error in readlink (#26375)
- fix(ext/node): refactor http.ServerResponse into function class (#26210)
- fix(ext/node): stub HTTPParser internal binding (#26401)
- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323)
- fix(fmt): --ext flag requires to pass files (#26525)
- fix(fmt): upgrade formatters (#26469)
- fix(help): missing package specifier (#26380)
- fix(info): resolve workspace member mappings (#26350)
- fix(install): better json editing (#26450)
- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501)
- fix(install): cache type only module deps in `deno install` (#26497)
- fix(install): don't cache json exports of JSR packages (for now) (#26530)
- fix(install): update lockfile when using package.json (#26458)
- fix(lsp): import-map-remap quickfix for type imports (#26454)
- fix(node/util): support array formats in `styleText` (#26507)
- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476)
- fix(npm): ensure scoped package name is encoded in URLs (#26390)
- fix(npm): support version ranges with && or comma (#26453)
- fix: `.npmrc` settings not being passed to install/add command (#26473)
- fix: add 'fmt-component' to unstable features in schema file (#26526)
- fix: share inotify fd across watchers (#26200)
- fix: unpin tokio version (#26457)
- perf(compile): pass module source data from binary directly to v8 (#26494)
- perf: avoid multiple calls to runMicrotask (#26378)
### 2.0.2 / 2024.10.17
- fix(cli): set napi object property properly (#26344)
- fix(ext/node): add null check for kStreamBaseField (#26368)
- fix(install): don't attempt to cache specifiers that point to directories
(#26369)
- fix(jupyter): fix panics for overslow subtraction (#26371)
- fix(jupyter): update to the new logo (#26353)
- fix(net): don't try to set nodelay on upgrade streams (#26342)
- fix(node/fs): copyFile with `COPYFILE_EXCL` should not throw if the
destination doesn't exist (#26360)
- fix(node/http): normalize header names in `ServerResponse` (#26339)
- fix(runtime): send ws ping frames from inspector server (#26352)
- fix: don't warn on ignored signals on windows (#26332)
### 2.0.1 / 2024.10.16
- feat(lsp): "deno/didRefreshDenoConfigurationTree" notifications (#26215)
- feat(unstable): `--unstable-detect-cjs` for respecting explicit
`"type": "commonjs"` (#26149)
- fix(add): create deno.json when running `deno add jsr:<pkg>` (#26275)
- fix(add): exact version should not have range `^` specifier (#26302)
- fix(child_process): map node `--no-warnings` flag to `--quiet` (#26288)
- fix(cli): add prefix to install commands in help (#26318)
- fix(cli): consolidate pkg parser for install & remove (#26298)
- fix(cli): named export takes precedence over default export in doc testing
(#26112)
- fix(cli): improve deno info output for npm packages (#25906)
- fix(console/ext/repl): support using parseFloat() (#25900)
- fix(ext/console): apply coloring for console.table (#26280)
- fix(ext/napi): pass user context to napi_threadsafe_fn finalizers (#26229)
- fix(ext/node): allow writing to tty columns (#26201)
- fix(ext/node): compute pem length (upper bound) for key exports (#26231)
- fix(ext/node): fix dns.lookup result ordering (#26264)
- fix(ext/node): handle http2 server ending stream (#26235)
- fix(ext/node): implement TCP.setNoDelay (#26263)
- fix(ext/node): timingSafeEqual account for AB byteOffset (#26292)
- fix(ext/node): use primordials in `ext/node/polyfills/internal/buffer.mjs`
(#24993)
- fix(ext/webgpu): allow GL backend on Windows (#26206)
- fix(install): duplicate dependencies in `package.json` (#26128)
- fix(install): handle pkg with dep on self when pkg part of peer dep resolution
(#26277)
- fix(install): retry downloads of registry info / tarballs (#26278)
- fix(install): support installing npm package with alias (#26246)
- fix(jupyter): copy kernels icons to the kernel directory (#26084)
- fix(jupyter): keep running event loop when waiting for messages (#26049)
- fix(lsp): relative completions for bare import-mapped specifiers (#26137)
- fix(node): make `process.stdout.isTTY` writable (#26130)
- fix(node/util): export `styleText` from `node:util` (#26194)
- fix(npm): support `--allow-scripts` on `deno run` (and `deno add`,
`deno test`, etc) (#26075)
- fix(repl): importing json files (#26053)
- fix(repl): remove check flags (#26140)
- fix(unstable/worker): ensure import permissions are passed (#26101)
- fix: add hint for missing `document` global in terminal error (#26218)
- fix: do not panic on wsl share file paths on windows (#26081)
- fix: do not panic running remote cjs module (#26259)
- fix: do not panic when using methods on classes and interfaces in deno doc
html output (#26100)
- fix: improve suggestions and hints when using CommonJS modules (#26287)
- fix: node-api function call should use preamble (#26297)
- fix: panic in `prepare_stack_trace_callback` when global interceptor throws
(#26241)
- fix: use syntect for deno doc html generation (#26322)
- perf(http): avoid clone getting request method and url (#26250)
- perf(http): cache webidl.converters lookups in ext/fetch/23_response.js
(#26256)
- perf(http): make heap allocation for path conditional (#26289)
- perf: use fast calls for microtask ops (#26236)
### 2.0.0 / 2024.10.09
Read announcement blog post at: https://deno.com/blog/v2
- BREAKING: `DENO_FUTURE=1` by default, or welcome to Deno 2.0 (#25213)
- BREAKING: disallow `new Deno.FsFile()` (#25478)
- BREAKING: drop support for Deno.run.{clearEnv,gid,uid} (#25371)
- BREAKING: improve types for `Deno.serve` (#25369)
- BREAKING: improved error code accuracy (#25383)
- BREAKING: make supported compilerOptions an allow list (#25432)
- BREAKING: move `width` and `height` options to `UnsafeWindowSurface`
constructor (#24200)
- BREAKING: remove --allow-hrtime (#25367)
- BREAKING: remove "emit" and "map" from deno info output (#25468)
- BREAKING: remove `--allow-none` flag (#25337)
- BREAKING: remove `--jobs` flag (#25336)
- BREAKING: remove `--trace-ops` (#25344)
- BREAKING: remove `--ts` flag (#25338)
- BREAKING: remove `--unstable` flag (#25522)
- BREAKING: remove `deno bundle` (#25339)
- BREAKING: remove `deno vendor` (#25343)
- BREAKING: remove `Deno.[Tls]Listener.prototype.rid` (#25556)
- BREAKING: remove `Deno.{Conn,TlsConn,TcpConn,UnixConn}.prototype.rid` (#25446)
- BREAKING: remove `Deno.{Reader,Writer}[Sync]` and `Deno.Closer` (#25524)
- BREAKING: remove `Deno.Buffer` (#25441)
- BREAKING: remove `Deno.close()` (#25347)
- BREAKING: remove `Deno.ConnectTlsOptions.{certChain,certFile,privateKey}` and
`Deno.ListenTlsOptions.certChain,certFile,keyFile}` (#25525)
- BREAKING: remove `Deno.copy()` (#25345)
- BREAKING: remove `Deno.customInspect` (#25348)
- BREAKING: remove `Deno.fdatasync[Sync]()` (#25520)
- BREAKING: remove `Deno.File` (#25447)
- BREAKING: remove `Deno.flock[Sync]()` (#25350)
- BREAKING: remove `Deno.FsFile.prototype.rid` (#25499)
- BREAKING: remove `Deno.fstat[Sync]()` (#25351)
- BREAKING: remove `Deno.FsWatcher.prototype.rid` (#25444)
- BREAKING: remove `Deno.fsync[Sync]()` (#25448)
- BREAKING: remove `Deno.ftruncate[Sync]()` (#25412)
- BREAKING: remove `Deno.funlock[Sync]()` (#25442)
- BREAKING: remove `Deno.futime[Sync]()` (#25252)
- BREAKING: remove `Deno.iter[Sync]()` (#25346)
- BREAKING: remove `Deno.read[Sync]()` (#25409)
- BREAKING: remove `Deno.readAll[Sync]()` (#25386)
- BREAKING: remove `Deno.seek[Sync]()` (#25449)
- BREAKING: remove `Deno.Seeker[Sync]` (#25551)
- BREAKING: remove `Deno.shutdown()` (#25253)
- BREAKING: remove `Deno.write[Sync]()` (#25408)
- BREAKING: remove `Deno.writeAll[Sync]()` (#25407)
- BREAKING: remove deprecated `UnsafeFnPointer` constructor type with untyped
`Deno.PointerObject` parameter (#25577)
- BREAKING: remove deprecated files config (#25535)
- BREAKING: Remove obsoleted Temporal APIs part 2 (#25505)
- BREAKING: remove remaining web types for compatibility (#25334)
- BREAKING: remove support for remote import maps in deno.json (#25836)
- BREAKING: rename "deps" remote cache folder to "remote" (#25969)
- BREAKING: soft-remove `Deno.isatty()` (#25410)
- BREAKING: soft-remove `Deno.run()` (#25403)
- BREAKING: soft-remove `Deno.serveHttp()` (#25451)
- BREAKING: undeprecate `Deno.FsWatcher.prototype.return()` (#25623)
- feat: add `--allow-import` flag (#25469)
- feat: Add a hint on error about 'Relative import path ... not prefixed with
...' (#25430)
- feat: Add better error messages for unstable APIs (#25519)
- feat: Add suggestion for packages using Node-API addons (#25975)
- feat: Allow importing .cjs files (#25426)
- feat: default to TS for file extension and support ext flag in more scenarios
(#25472)
- feat: deprecate import assertions (#25281)
- feat: Don't warn about --allow-script when using esbuild (#25894)
- feat: hide several --unstable-* flags (#25378)
- feat: improve lockfile v4 to store normalized version constraints and be more
terse (#25247)
- feat: improve warnings for deprecations and lifecycle script for npm packages
(#25694)
- feat: include version number in all --json based outputs (#25335)
- feat: lockfile v4 by default (#25165)
- feat: make 'globalThis.location' a configurable property (#25812)
- feat: print `Listening on` messages on stderr instead of stdout (#25491)
- feat: remove `--lock-write` flag (#25214)
- feat: require jsr prefix for `deno install` and `deno add` (#25698)
- feat: require(esm) (#25501)
- feat: Show hints when using `window` global (#25805)
- feat: stabilize `Deno.createHttpClient()` (#25569)
- feat: suggest `deno install --entrypoint` instead of `deno cache` (#25228)
- feat: support DENO_LOG env var instead of RUST_LOG (#25356)
- feat: TypeScript 5.6 and `npm:@types/node@22` (#25614)
- feat: Update no-window lint rule (#25486)
- feat: update warning message for --allow-run with no list (#25693)
- feat: warn when using `--allow-run` with no allow list (#25215)
- feat(add): Add npm packages to package.json if present (#25477)
- feat(add): strip package subpath when adding a package (#25419)
- feat(add/install): Flag to add dev dependency to package.json (#25495)
- feat(byonm): support `deno run npm:<package>` when package is not in
package.json (#25981)
- feat(check): turn on noImplicitOverride (#25695)
- feat(check): turn on useUnknownInCatchVariables (#25465)
- feat(cli): evaluate code snippets in JSDoc and markdown (#25220)
- feat(cli): give access to `process` global everywhere (#25291)
- feat(cli): use NotCapable error for permission errors (#25431)
- feat(config): Node modules option for 2.0 (#25299)
- feat(ext/crypto): import and export p521 keys (#25789)
- feat(ext/crypto): X448 support (#26043)
- feat(ext/kv): configurable limit params (#25174)
- feat(ext/node): add abort helpers, process & streams fix (#25262)
- feat(ext/node): add rootCertificates to node:tls (#25707)
- feat(ext/node): buffer.transcode() (#25972)
- feat(ext/node): export 'promises' symbol from 'node:timers' (#25589)
- feat(ext/node): export missing constants from 'zlib' module (#25584)
- feat(ext/node): export missing symbols from domain, puncode, repl, tls
(#25585)
- feat(ext/node): export more symbols from streams and timers/promises (#25582)
- feat(ext/node): expose ES modules for _ modules (#25588)
- feat(flags): allow double commas to escape values in path based flags (#25453)
- feat(flags): support user provided args in repl subcommand (#25605)
- feat(fmt): better error on malfored HTML files (#25853)
- feat(fmt): stabilize CSS, HTML and YAML formatters (#25753)
- feat(fmt): support vto and njk extensions (#25831)
- feat(fmt): upgrade markup_fmt (#25768)
- feat(install): deno install with entrypoint (#25411)
- feat(install): warn repeatedly about not-run lifecycle scripts on explicit
installs (#25878)
- feat(lint): add `no-process-global` lint rule (#25709)
- feat(lsp): add a message when someone runs 'deno lsp' manually (#26051)
- feat(lsp): auto-import types with 'import type' (#25662)
- feat(lsp): html/css/yaml file formatting (#25353)
- feat(lsp): quick fix for @deno-types="npm:@types/*" (#25954)
- feat(lsp): turn on useUnknownInCatchVariables (#25474)
- feat(lsp): unstable setting as list (#25552)
- feat(permissions): `Deno.mainModule` doesn't require permissions (#25667)
- feat(permissions): allow importing from cdn.jsdelivr.net by default (#26013)
- feat(serve): Support second parameter in deno serve (#25606)
- feat(tools/doc): display subitems in symbol overviews where applicable
(#25885)
- feat(uninstall): alias to 'deno remove' if -g flag missing (#25461)
- feat(upgrade): better error message on failure (#25503)
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
- feat(upgrade): support LTS release channel (#25123)
- fix: add link to env var docs (#25557)
- fix: add suggestion how to fix importing CJS module (#21764)
- fix: add test ensuring als works across dynamic import (#25593)
- fix: better error for Deno.UnsafeWindowSurface, correct HttpClient name,
cleanup unused code (#25833)
- fix: cjs resolution cases (#25739)
- fix: consistent with deno_config and treat `"experimentalDecorators"` as
deprecated (#25735)
- fix: delete old Deno 1.x headers file when loading cache (#25283)
- fix: do not panic running invalid file specifier (#25530)
- fix: don't include extensionless files in file collection for lint & fmt by
default (#25721)
- fix: don't prompt when using `Deno.permissions.request` with `--no-prompt`
(#25811)
- fix: eagerly error for specifier with empty version constraint (#25944)
- fix: enable `Win32_Security` feature in `windows-sys` (#26007)
- fix: error on unsupported compiler options (#25714)
- fix: error out if a valid flag is passed before a subcommand (#25830)
- fix: fix jupyter display function type (#25326)
- fix: Float16Array type (#25506)
- fix: handle showing warnings while the progress bar is shown (#25187)
- fix: Hide 'deno cache' from help output (#25960)
- fix: invalid ipv6 hostname on `deno serve` (#25482)
- fix: linux canonicalization checks (#24641)
- fix: lock down allow-run permissions more (#25370)
- fix: make some warnings more standard (#25324)
- fix: no cmd prefix in help output go links (#25459)
- fix: only enable byonm if workspace root has pkg json (#25379)
- fix: panic when require(esm) (#25769)
- fix: precompile preserve SVG camelCase attributes (#25945)
- fix: reland async context (#25140)
- fix: remove --allow-run warning when using deno without args or subcommand
(#25684)
- fix: remove entrypoint hack for Deno 2.0 (#25332)
- fix: remove recently added deno.json node_modules aliasing (#25542)
- fix: remove the typo in the help message (#25962)
- fix: removed unstable-htttp from deno help (#25216)
- fix: replace `npm install` hint with `deno install` hint (#25244)
- fix: trim space around DENO_AUTH_TOKENS (#25147)
- fix: update deno_doc (#25290)
- fix: Update deno_npm to fix `deno install` with crossws (#25837)
- fix: update hint for `deno add <package>` (#25455)
- fix: update malva in deno to support astro css comments (#25553)
- fix: update nodeModulesDir config JSON schema (#25653)
- fix: update patchver to 0.2 (#25952)
- fix: update sui to 0.4 (#25942)
- fix: upgrade deno_ast 0.42 (#25313)
- fix: upgrade deno_core to 0.307.0 (#25287)
- fix(add/install): default to "latest" tag for npm packages in
`deno add npm:pkg` (#25858)
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
- fix(BREAKING): make dns record types have consistent naming (#25357)
- fix(byonm): resolve npm deps of jsr deps (#25399)
- fix(check): ignore noImplicitOverrides in remote modules (#25854)
- fix(check): move is cjs check from resolving to loading (#25597)
- fix(check): properly surface dependency errors in types file of js file
(#25860)
- fix(cli): `deno task` exit with status 0 (#25637)
- fix(cli): Default to auto with --node-modules-dir flag (#25772)
- fix(cli): handle edge cases around `export`s in doc tests and default export
(#25720)
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
permissions (#25398)
- fix(cli): Only set allow net flag for deno serve if not already allowed all
(#25743)
- fix(cli): Warn on not-run lifecycle scripts with global cache (#25786)
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
- fix(compile): support 'deno compile' in RC and LTS releases (#25875)
- fix(config): validate export names (#25436)
- fix(coverage): ignore urls from doc testing (#25736)
- fix(doc): surface graph errors as warnings (#25888)
- fix(dts): stabilize `fetch` declaration for use with `Deno.HttpClient`
(#25683)
- fix(ext/console): more precision in console.time (#25723)
- fix(ext/console): prevent duplicate error printing when the cause is assigned
(#25327)
- fix(ext/crypto): ensure EC public keys are exported uncompressed (#25766)
- fix(ext/crypto): fix identity test for x25519 derive bits (#26011)
- fix(ext/crypto): reject empty usages in SubtleCrypto#importKey (#25759)
- fix(ext/crypto): support md4 digest algorithm (#25656)
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
- fix(ext/fetch): fix lowercase http_proxy classified as https (#25686)
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
- fix(ext/http): do not set localhost to hostname unnecessarily (#24777)
- fix(ext/http): gracefully handle Response.error responses (#25712)
- fix(ext/node): add `FileHandle#writeFile` (#25555)
- fix(ext/node): add `vm.constants` (#25630)
- fix(ext/node): Add missing `node:path` exports (#25567)
- fix(ext/node): Add missing node:fs and node:constants exports (#25568)
- fix(ext/node): add stubs for `node:trace_events` (#25628)
- fix(ext/node): attach console stream properties (#25617)
- fix(ext/node): avoid showing `UNKNOWN` error from TCP handle (#25550)
- fix(ext/node): close upgraded socket when the underlying http connection is
closed (#25387)
- fix(ext/node): delay accept() call 2 ticks in net.Server#listen (#25481)
- fix(ext/node): don't throw error for unsupported signal binding on windows
(#25699)
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
- fix(ext/node): export `process.allowedNodeEnvironmentFlags` (#25629)
- fix(ext/node): export JWK public key (#25239)
- fix(ext/node): export request and response clases from `http2` module (#25592)
- fix(ext/node): fix `Cipheriv#update(string, undefined)` (#25571)
- fix(ext/node): fix Decipheriv when autoPadding disabled (#25598)
- fix(ext/node): fix process.stdin.pause() (#25864)
- fix(ext/node): Fix vm sandbox object panic (#24985)
- fix(ext/node): http2session ready state (#25143)
- fix(ext/node): Implement detached option in `child_process` (#25218)
- fix(ext/node): import EC JWK keys (#25266)
- fix(ext/node): import JWK octet key pairs (#25180)
- fix(ext/node): import RSA JWK keys (#25267)
- fix(ext/node): register `node:wasi` built-in (#25134)
- fix(ext/node): remove unimplemented promiseHook stubs (#25979)
- fix(ext/node): report freemem() on Linux in bytes (#25511)
- fix(ext/node): Rewrite `node:v8` serialize/deserialize (#25439)
- fix(ext/node): session close during stream setup (#25170)
- fix(ext/node): Stream should be instance of EventEmitter (#25527)
- fix(ext/node): stub `inspector/promises` (#25635)
- fix(ext/node): stub `process.cpuUsage()` (#25462)
- fix(ext/node): stub cpu_info() for OpenBSD (#25807)
- fix(ext/node): support x509 certificates in `createPublicKey` (#25731)
- fix(ext/node): throw when loading `cpu-features` module (#25257)
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
- fix(ext/node): use primordials in `ext/node/polyfills/console.ts` (#25572)
- fix(ext/node): use primordials in ext/node/polyfills/wasi.ts (#25608)
- fix(ext/node): validate input lengths in `Cipheriv` and `Decipheriv` (#25570)
- fix(ext/web): don't ignore capture in EventTarget.removeEventListener (#25788)
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
- fix(ext/webgpu): sync category comment (#25580)
- fix(ext/webstorage): make `getOwnPropertyDescriptor` with symbol return
`undefined` (#13348)
- fix(flags): --allow-all should conflict with lower permissions (#25909)
- fix(flags): don't treat empty run command as task subcommand (#25708)
- fix(flags): move some content from docs.deno.com into help output (#25951)
- fix(flags): properly error out for urls (#25770)
- fix(flags): require global flag for permission flags in install subcommand
(#25391)
- fix(fmt): --check was broken for CSS, YAML and HTML (#25848)
- fix(fmt): fix incorrect quotes in components (#25249)
- fix(fmt): fix tabs in YAML (#25536)
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
(#25222)
- fix(info): error instead of panic for npm specifiers when using byonm (#25947)
- fix(info): move "version" field to top of json output (#25890)
- fix(inspector): Fix panic when re-entering runtime ops (#25537)
- fix(install): compare versions directly to decide whether to create a child
node_modules dir for a workspace member (#26001)
- fix(install): Make sure target node_modules exists when symlinking (#25494)
- fix(install): recommend using `deno install -g` when using a single http url
(#25388)
- fix(install): store tags associated with package in node_modules dir (#26000)
- fix(install): surface package.json dependency errors (#26023)
- fix(install): Use relative symlinks in deno install (#25164)
- fix(installl): make bin entries executable even if not put in
`node_modules/.bin` (#25873)
- fix(jupyter): allow unstable flags (#25483)
- fix(lint): correctly handle old jsx in linter (#25902)
- fix(lint): support linting jsr pkg without version field (#25230)
- fix(lockfile): use loose deserialization for version constraints (#25660)
- fix(lsp): encode url parts before parsing as uri (#25509)
- fix(lsp): exclude missing import quick fixes with bad resolutions (#26025)
- fix(lsp): panic on url_to_uri() (#25238)
- fix(lsp): properly resolve jsxImportSource for caching (#25688)
- fix(lsp): update diagnostics on npm install (#25352)
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
- fix(no-slow-types): better `override` handling (#25989)
- fix(node): Don't error out if we fail to statically analyze CJS re-export
(#25748)
- fix(node): fix worker_threads issues blocking Angular support (#26024)
- fix(node): implement libuv APIs needed to support `npm:sqlite3` (#25893)
- fix(node): Include "node" condition during CJS re-export analysis (#25785)
- fix(node): Pass NPM_PROCESS_STATE to subprocesses via temp file instead of env
var (#25896)
- fix(node/byonm): do not accidentally resolve bare node built-ins (#25543)
- fix(node/cluster): improve stubs to make log4js work (#25146)
- fix(npm): better error handling for remote npm deps (#25670)
- fix(npm): root package has peer dependency on itself (#26022)
- fix(permissions): disallow any `LD_` or `DYLD_` prefixed env var without full
--allow-run permissions (#25271)
- fix(permissions): disallow launching subprocess with LD_PRELOAD env var
without full run permissions (#25221)
- fix(publish): ensure provenance is spec compliant (#25200)
- fix(regression): do not expose resolved path in Deno.Command permission denied
error (#25434)
- fix(runtime): don't error `child.output()` on consumed stream (#25657)
- fix(runtime): use more null proto objects again (#25040)
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
instantiated without snapshot (#25280)
- fix(task): correct name for scoped npm package binaries (#25390)
- fix(task): support tasks with colons in name in `deno run` (#25233)
- fix(task): use current executable for deno even when not named deno (#26019)
- fix(types): simplify mtls related types (#25658)
- fix(upgrade): more informative information on invalid version (#25319)
- fix(windows): Deno.Command - align binary resolution with linux and mac
(#25429)
- fix(workspace): handle when config has members when specified via --config
(#25988)
- perf: fast path for cached dyn imports (#25636)
- perf: Use -O3 for sui in release builds (#26010)
- perf(cache): single cache file for remote modules (#24983)
- perf(cache): single cache file for typescript emit (#24994)
- perf(ext/fetch): improve decompression throughput by upgrading `tower_http`
(#25806)
- perf(ext/node): reduce some allocations in require (#25197)
- perf(ext/web): optimize performance.measure() (#25774)
### 1.46.3 / 2024.09.04
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
permissions (#25398)
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
- fix(ext/node): session close during stream setup (#25170)
- fix(publish): ensure provenance is spec compliant (#25200)
- fix(upgrade): more informative information on invalid version (#25319)
- fix: fix jupyter display function type (#25326)
### 1.46.2 / 2024.08.29
- Revert "feat(fetch): accept async iterables for body" (#25207)
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
- fix(ext/node): export JWK public key (#25239)
- fix(ext/node): import EC JWK keys (#25266)
- fix(ext/node): import JWK octet key pairs (#25180)
- fix(ext/node): import RSA JWK keys (#25267)
- fix(ext/node): throw when loading `cpu-features` module (#25257)
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
- fix(fmt): fix incorrect quotes in components (#25249)
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
(#25222)
- fix(install): Use relative symlinks in deno install (#25164)
- fix(lsp): panic on url_to_uri() (#25238)
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
- fix(node/cluster): improve stubs to make log4js work (#25146)
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
instantiated without snapshot (#25280)
- fix(task): support tasks with colons in name in `deno run` (#25233)
- fix: handle showing warnings while the progress bar is shown (#25187)
- fix: reland async context (#25140)
- fix: removed unstable-htttp from deno help (#25216)
- fix: replace `npm install` hint with `deno install` hint (#25244)
- fix: update deno_doc (#25290)
- fix: upgrade deno_core to 0.307.0 (#25287)
- perf(ext/node): reduce some allocations in require (#25197)
### 1.46.1 / 2024.08.22
- fix(ext/node): http2session ready state (#25143)
- fix(ext/node): register `node:wasi` built-in (#25134)
- fix(urlpattern): fallback to empty string for undefined group values (#25151)
- fix: trim space around DENO_AUTH_TOKENS (#25147)
### 1.46.0 / 2024.08.22
- BREAKING(temporal/unstable): Remove obsoleted Temporal APIs (#24836)
- BREAKING(webgpu/unstable): Replace async .requestAdapterInfo() with sync .info
(#24783)
- feat: `deno compile --icon <ico>` (#25039)
- feat: `deno init --serve` (#24897)
- feat: `deno upgrade --rc` (#24905)
- feat: Add Deno.ServeDefaultExport type (#24879)
- feat: async context (#24402)
- feat: better help output (#24958)
- feat: codesign for deno compile binaries (#24604)
- feat: deno clean (#24950)
- feat: deno remove (#24952)
- feat: deno run <task> (#24891)
- feat: Deprecate "import assertions" with a warning (#24743)
- feat: glob and directory support for `deno check` and `deno cache` cli arg
paths (#25001)
- feat: Print deprecation message for npm packages (#24992)
- feat: refresh "Download" progress bar with a spinner (#24913)
- feat: Rename --unstable-hmr to --watch-hmr (#24975)
- feat: support short flags for permissions (#24883)
- feat: treat bare deno command with run arguments as deno run (#24887)
- feat: upgrade deno_core (#24886)
- feat: upgrade deno_core (#25042)
- feat: upgrade V8 to 12.8 (#24693)
- feat: Upgrade V8 to 12.9 (#25138)
- feat: vm rewrite (#24596)
- feat(clean): add progress bar (#25026)
- feat(cli): Add --env-file as alternative to --env (#24555)
- feat(cli/tools): add a subcommand `--hide-stacktraces` for test (#24095)
- feat(config): Support frozen lockfile config option in deno.json (#25100)
- feat(config/jsr): add license field (#25056)
- feat(coverage): add breadcrumbs to deno coverage `--html` report (#24860)
- feat(ext/node): rewrite crypto keys (#24463)
- feat(ext/node): support http2session.socket (#24786)
- feat(fetch): accept async iterables for body (#24623)
- feat(flags): improve help output and make `deno run` list tasks (#25108)
- feat(fmt): support CSS, SCSS, Sass and Less (#24870)
- feat(fmt): support HTML, Svelte, Vue, Astro and Angular (#25019)
- feat(fmt): support YAML (#24717)
- feat(FUTURE): terse lockfile (v4) (#25059)
- feat(install): change 'Add ...' message (#24949)
- feat(lint): Add lint for usage of node globals (with autofix) (#25048)
- feat(lsp): node specifier completions (#24904)
- feat(lsp): registry completions for import-mapped specifiers (#24792)
- feat(node): support `username` and `_password` in `.npmrc` file (#24793)
- feat(permissions): link to docs in permission prompt (#24948)
- feat(publish): error on missing license file (#25011)
- feat(publish): suggest importing `jsr:@std/` for `deno.land/std` urls (#25046)
- feat(serve): Opt-in parallelism for `deno serve` (#24920)
- feat(test): rename --allow-none to --permit-no-files (#24809)
- feat(unstable): ability to use a local copy of jsr packages (#25068)
- feat(unstable/fmt): move yaml formatting behind unstable flag (#24848)
- feat(upgrade): refresh output (#24911)
- feat(upgrade): support `deno upgrade 1.46.0` (#25096)
- feat(urlpattern): add ignoreCase option & hasRegExpGroups property, and fix
spec discrepancies (#24741)
- feat(watch): add watch paths to test subcommand (#24771)
- fix: `node:inspector` not being registered (#25007)
- fix: `rename` watch event missing (#24893)
- fix: actually add missing `node:readline/promises` module (#24772)
- fix: adapt to new jupyter runtime API and include session IDs (#24762)
- fix: add permission name when accessing a special file errors (#25085)
- fix: adjust suggestion for lockfile regeneration (#25107)
- fix: cache bust jsr meta file when version not found in dynamic branches
(#24928)
- fix: CFunctionInfo and CTypeInfo leaks (#24634)
- fix: clean up flag help output (#24686)
- fix: correct JSON config schema to show vendor option as stable (#25090)
- fix: dd-trace http message compat (#25021)
- fix: deserialize lockfile v3 straight (#25121)
- fix: Don't panic if fail to handle JS stack frame (#25122)
- fix: Don't panic if failed to add system certificate (#24823)
- fix: Don't shell out to `unzip` in deno upgrade/compile (#24926)
- fix: enable the reporting of parsing related problems when running deno lint
(#24332)
- fix: errors with CallSite methods (#24907)
- fix: include already seen deps in lockfile dep tracking (#24556)
- fix: log current version when using deno upgrade (#25079)
- fix: make `deno add` output more deterministic (#25083)
- fix: make vendor cache manifest more deterministic (#24658)
- fix: missing `emitWarning` import (#24587)
- fix: regressions around Error.prepareStackTrace (#24839)
- fix: stub `node:module.register()` (#24965)
- fix: support `npm:bindings` and `npm:callsites` packages (#24727)
- fix: unblock fsevents native module (#24542)
- fix: update deno_doc (#24972)
- fix: update dry run success message (#24885)
- fix: update lsp error message of 'relative import path' to 'use deno add' for
npm/jsr packages (#24524)
- fix: upgrade deno_core to 0.298.0 (#24709)
- fix: warn about import assertions when using typescript (#25135)
- fix(add): better error message providing scoped pkg missing leading `@` symbol
(#24961)
- fix(add): Better error message when missing npm specifier (#24970)
- fix(add): error when config file contains importMap field (#25115)
- fix(add): Handle packages without root exports (#25102)
- fix(add): Support dist tags in deno add (#24960)
- fix(cli): add NAPI support in standalone mode (#24642)
- fix(cli): Create child node_modules for conflicting dependency versions,
respect aliases in package.json (#24609)
- fix(cli): Respect implied BYONM from DENO_FUTURE in `deno task` (#24652)
- fix(cli): shorten examples in help text (#24374)
- fix(cli): support --watch when running cjs npm packages (#25038)
- fix(cli): Unhide publish subcommand help string (#24787)
- fix(cli): update permission prompt message for compiled binaries (#24081)
- fix(cli/init): broken link in deno init sample template (#24545)
- fix(compile): adhoc codesign mach-o by default (#24824)
- fix(compile): make output more deterministic (#25092)
- fix(compile): support workspace members importing other members (#24909)
- fix(compile/windows): handle cjs re-export of relative path with parent
component (#24795)
- fix(config): regression - should not discover npm workspace for nested
deno.json not in workspace (#24559)
- fix(cron): improve error message for invalid cron names (#24644)
- fix(docs): fix some deno.land/manual broken urls (#24557)
- fix(ext/console): Error Cause Not Inspect-Formatted when printed (#24526)
- fix(ext/console): render properties of Intl.Locale (#24827)
- fix(ext/crypto): respect offsets when writing into ab views in randomFillSync
(#24816)
- fix(ext/fetch): include TCP src/dst socket info in error messages (#24939)
- fix(ext/fetch): include URL and error details on fetch failures (#24910)
- fix(ext/fetch): respect authority from URL (#24705)
- fix(ext/fetch): use correct ALPN to proxies (#24696)
- fix(ext/fetch): use correct ALPN to socks5 proxies (#24817)
- fix(ext/http): correctly consume response body in `Deno.serve` (#24811)
- fix(ext/net): validate port in Deno.{connect,serve,listen} (#24399)
- fix(ext/node): add `CipherIv.setAutoPadding()` (#24940)
- fix(ext/node): add crypto.diffieHellman (#24938)
- fix(ext/node): client closing streaming request shouldn't terminate http
server (#24946)
- fix(ext/node): createBrotliCompress params (#24984)
- fix(ext/node): do not expose `self` global in node (#24637)
- fix(ext/node): don't concat set-cookie in ServerResponse.appendHeader (#25000)
- fix(ext/node): don't throw when calling PerformanceObserver.observe (#25036)
- fix(ext/node): ed25519 signing and cipheriv autopadding fixes (#24957)
- fix(ext/node): fix prismjs compatibiliy in Web Worker (#25062)
- fix(ext/node): handle node child_process with --v8-options flag (#24804)
- fix(ext/node): handle prefix mapping for IPv4-mapped IPv6 addresses (#24546)
- fix(ext/node): http request uploads of subarray of buffer should work (#24603)
- fix(ext/node): improve shelljs compat with managed npm execution (#24912)
- fix(ext/node): node:zlib coerces quality 10 to 9.5 (#24850)
- fix(ext/node): pass content-disposition header as string instead of bytes
(#25128)
- fix(ext/node): prevent panic in http2.connect with uppercase header names
(#24780)
- fix(ext/node): read correct CPU usage stats on Linux (#24732)
- fix(ext/node): rewrite X509Certificate resource and add `publicKey()` (#24988)
- fix(ext/node): stat.mode on windows (#24434)
- fix(ext/node): support ieee-p1363 ECDSA signatures and pss salt len (#24981)
- fix(ext/node): use pem private keys in createPublicKey (#24969)
- fix(ext/node/net): emit `error` before `close` when connection is refused
(#24656)
- fix(ext/web): make CompressionResource garbage collectable (#24884)
- fix(ext/web): make TextDecoderResource use cppgc (#24888)
- fix(ext/webgpu): assign missing `constants` property of shader about
`GPUDevice.createRenderPipeline[Async]` (#24803)
- fix(ext/webgpu): don't crash while constructing GPUOutOfMemoryError (#24807)
- fix(ext/webgpu): GPUDevice.createRenderPipelineAsync should return a Promise
(#24349)
- fix(ext/websocket): unhandled close rejection in WebsocketStream (#25125)
- fix(fmt): handle using stmt in for of stmt (#24834)
- fix(fmt): regression with pipe in code blocks in tables (#25098)
- fix(fmt): upgrade to dprint-plugin-markdown 0.17.4 (#25075)
- fix(fmt): was sometimes putting comments in front of commas in parameter lists
(#24650)
- fix(future): Emit `deno install` warning less often, suggest `deno install` in
error message (#24706)
- fix(http): Adjust hostname display for Windows when using 0.0.0.0 (#24698)
- fix(init): use bare specifier for `jsr:@std/assert` (#24581)
- fix(install): Properly handle dist tags when setting up node_modules (#24968)
- fix(lint): support linting tsx/jsx from stdin (#24955)
- fix(lsp): directly use file referrer when loading document (#24997)
- fix(lsp): don't always use byonm resolver when DENO_FUTURE=1 (#24865)
- fix(lsp): hang when caching failed (#24651)
- fix(lsp): import map lookup for jsr subpath auto import (#25025)
- fix(lsp): include scoped import map keys in completions (#25047)
- fix(lsp): resolve jsx import source with types mode (#25064)
- fix(lsp): rewrite import for 'infer return type' action (#24685)
- fix(lsp): scope attribution for asset documents (#24663)
- fix(lsp): support npm workspaces and fix some resolution issues (#24627)
- fix(node): better detection for when to surface node resolution errors
(#24653)
- fix(node): cjs pkg dynamically importing esm-only pkg fails (#24730)
- fix(node): Create additional pipes for child processes (#25016)
- fix(node): Fix `--allow-scripts` with no `deno.json` (#24533)
- fix(node): Fix node IPC serialization for objects with undefined values
(#24894)
- fix(node): revert invalid package target change (#24539)
- fix(node): Rework node:child_process IPC (#24763)
- fix(node): Run node compat tests listed in the `ignore` field (and fix the
ones that fail) (#24631)
- fix(node): support `tty.hasColors()` and `tty.getColorDepth()` (#24619)
- fix(node): support wildcards in package.json imports (#24794)
- fix(node/crypto): Assign publicKey and privateKey with let instead of const
(#24943)
- fix(node/fs): node:fs.read and write should accept typed arrays other than
Uint8Array (#25030)
- fix(node/fs): Use correct offset and length in node:fs.read and write (#25049)
- fix(node/fs/promises): watch should be async iterable (#24805)
- fix(node/http): wrong `req.url` value (#25081)
- fix(node/inspector): Session constructor should not throw (#25041)
- fix(node/timers/promises): add scheduler APIs (#24802)
- fix(node/tty): fix `tty.WriteStream.hasColor` with different args (#25094)
- fix(node/util): add missing `debug` alias of `debuglog` (#24944)
- fix(node/worker_threads): support `port.once()` (#24725)
- fix(npm): handle packages with only pre-released 0.0.0 versions (#24563)
- fix(npm): use start directory deno.json as "root deno.json config" in npm
workspace (#24538)
- fix(npmrc): skip loading .npmrc in home dir on permission error (#24758)
- fix(publish): show dirty files on dirty check failure (#24541)
- fix(publish): surface syntax errors when using --no-check (#24620)
- fix(publish): warn about missing license file (#24677)
- fix(publish): workspace included license file had incorrect path (#24747)
- fix(repl): Prevent panic on broken pipe (#21945)
- fix(runtime/windows): fix calculation of console size (#23873)
- fix(std/http2): release window capacity back to remote stream (#24576)
- fix(tls): print a warning if a system certificate can't be loaded (#25023)
- fix(types): Conform lib.deno_web.d.ts to lib.dom.d.ts and lib.webworker.d.ts
(#24599)
- fix(types): fix streams types (#24770)
- fix(unstable): move sloppy-import warnings to lint rule (#24710)
- fix(unstable): panic when running deno install with DENO_FUTURE=1 (#24866)
- fix(unstable/compile): handle byonm import in sub dir (#24755)
- fix(upgrade): better error message when check_exe fails (#25133)
- fix(upgrade): correctly compute latest version based on current release
channel (#25087)
- fix(upgrade): do not error if config in cwd invalid (#24689)
- fix(upgrade): fallback to Content-Length header for progress bar (#24923)
- fix(upgrade): return no RC versions if fetching fails (#25013)
- fix(upgrade): support RC release with --version flag (#25091)
- fix(upgrade): use proper version display (#25029)
- fix(urlpattern): correct typings for added APIs (#24881)
- fix(webgpu): Fix `GPUAdapter#isFallbackAdapter` and `GPUAdapter#info`
properties (#24914)
- fix(workspace): do not resolve to self for npm pkg depending on matching req
(#24591)
- fix(workspace): support resolving bare specifiers to npm pkgs within a
workspace (#24611)
- fix(workspaces/publish): include the license file from the workspace root if
not in pkg (#24714)
- perf: skip saving to emit cache after first failure (#24896)
- perf: update deno_ast to 0.41 (#24819)
- perf: update deno_doc (#24700)
- perf(ext/crypto): make randomUUID() 5x faster (#24510)
- perf(ext/fetch): speed up `resp.clone()` (#24812)
- perf(ext/http): Reduce size of `ResponseBytesInner` (#24840)
- perf(ext/node): improve `Buffer` from string performance (#24567)
- perf(ext/node): optimize fs.exists[Sync] (#24613)
- perf(lsp): remove fallback config scopes for workspace folders (#24868)
- refactor: `version` module exports a single const struct (#25014)
- refactor: decouple node resolution from deno_core (#24724)
- refactor: move importMap with imports/scopes diagnostic to deno_config
(#24553)
- refactor: remove version::is_canary(), use ReleaseChannel instead (#25053)
- refactor: show release channel in `deno --version` (#25061)
- refactor: update to deno_config 0.25 (#24645)
- refactor: update to use deno_package_json (#24688)
- refactor(ext/node): create separate ops for node:http module (#24788)
- refactor(fetch): reimplement fetch with hyper instead of reqwest (#24237)
- refactor(lint): move reporters to separate module (#24757)
- refactor(node): internally add `.code()` to node resolution errors (#24610)
- refactor(upgrade): cleanup pass (#24954)
- refactor(upgrade): make fetching latest version async (#24919)
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)
- Reland "refactor(fetch): reimplement fetch with hyper instead of reqwest"
(#24593)
### 1.45.5 / 2024.07.31
- fix(cli): Unhide publish subcommand help string (#24787)
- fix(compile/windows): handle cjs re-export of relative path with parent
component (#24795)
- fix(ext/node): handle node child_process with --v8-options flag (#24804)
- fix(ext/node): prevent panic in http2.connect with uppercase header names
(#24780)
- fix(ext/webgpu): don't crash while constructing GPUOutOfMemoryError (#24807)
- fix(http): Adjust hostname display for Windows when using 0.0.0.0 (#24698)
- fix(node): Rework node:child_process IPC (#24763)
- fix(node): support wildcards in package.json imports (#24794)
- fix(node/fs/promises): watch should be async iterable (#24805)
- fix(node/timers/promises): add scheduler APIs (#24802)
- fix(npmrc): skip loading .npmrc in home dir on permission error (#24758)
- fix(types): fix streams types (#24770)
- fix(unstable/compile): handle byonm import in sub dir (#24755)
- fix: actually add missing `node:readline/promises` module (#24772)
- fix: adapt to new jupyter runtime API and include session IDs (#24762)
- perf(ext/fetch): speed up `resp.clone()` (#24812)
- perf(ext/node): improve `Buffer` from string performance (#24567)
### 1.45.4 / 2024.07.26 ### 1.45.4 / 2024.07.26
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692) - Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.157.0" version = "0.173.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "1.45.4" version = "2.1.1"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -38,6 +38,11 @@ path = "./bench/lsp_bench_standalone.rs"
[features] [features]
default = ["upgrade", "__vendored_zlib_ng"] default = ["upgrade", "__vendored_zlib_ng"]
# A feature that enables heap profiling with dhat on Linux.
# 1. Compile with `cargo build --profile=release-with-debug --features=dhat-heap`
# 2. Run the executable. It will output a dhat-heap.json file.
# 3. Open the json file in https://nnethercote.github.io/dh_view/dh_view.html
dhat-heap = ["dhat"]
# A feature that enables the upgrade subcommand and the background check for # A feature that enables the upgrade subcommand and the background check for
# available updates (of deno binary). This is typically disabled for (Linux) # available updates (of deno binary). This is typically disabled for (Linux)
# distribution packages. # distribution packages.
@ -64,44 +69,45 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir.workspace = true
deno_config = { version = "=0.26.1", features = ["workspace", "sync"] } deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.144.0", features = ["html", "syntect"] } deno_doc = { version = "0.160.0", features = ["rust", "comrak"] }
deno_emit = "=0.43.1" deno_graph = { version = "=0.85.0" }
deno_graph = { version = "=0.80.1", features = ["tokio_executor"] } deno_lint = { version = "=0.68.0", features = ["docs"] }
deno_lint = { version = "=0.61.0", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm = "=0.21.4" deno_npm.workspace = true
deno_package_json.workspace = true deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver.workspace = true
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver = "=0.5.7" deno_semver.workspace = true
deno_task_shell = "=0.17.0" deno_task_shell = "=0.18.1"
deno_terminal.workspace = true deno_terminal.workspace = true
eszip = "=0.72.2" libsui = "0.5.0"
napi_sym.workspace = true
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14"
async-trait.workspace = true async-trait.workspace = true
base32.workspace = true
base64.workspace = true base64.workspace = true
bincode = "=1.3.3" bincode = "=1.3.3"
bytes.workspace = true bytes.workspace = true
cache_control.workspace = true cache_control.workspace = true
chrono = { workspace = true, features = ["now"] } chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.4.17", features = ["env", "string"] } clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
clap_complete = "=4.4.7" clap_complete = "=4.5.24"
clap_complete_fig = "=4.4.2" clap_complete_fig = "=4.5.2"
color-print = "0.3.5" color-print.workspace = true
console_static_text.workspace = true console_static_text.workspace = true
dashmap = "5.5.3" dashmap.workspace = true
data-encoding.workspace = true data-encoding.workspace = true
dhat = { version = "0.3.3", optional = true }
dissimilar = "=1.0.4" dissimilar = "=1.0.4"
dotenvy = "0.15.7" dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.3" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.1" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.91.4" dprint-plugin-typescript = "=0.93.2"
env_logger = "=0.10.0" env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
@ -113,15 +119,17 @@ http.workspace = true
http-body.workspace = true http-body.workspace = true
http-body-util.workspace = true http-body-util.workspace = true
hyper-util.workspace = true hyper-util.workspace = true
import_map = { version = "=0.20.0", features = ["ext"] } import_map = { version = "=0.20.1", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser.workspace = true jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
lazy-regex.workspace = true lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
libz-sys.workspace = true libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.11.0"
markup_fmt = "=0.16.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true
@ -131,6 +139,7 @@ p256.workspace = true
pathdiff = "0.2.1" pathdiff = "0.2.1"
percent-encoding.workspace = true percent-encoding.workspace = true
phf.workspace = true phf.workspace = true
pretty_yaml = "=0.5.0"
quick-junit = "^0.3.5" quick-junit = "^0.3.5"
rand = { workspace = true, features = ["small_rng"] } rand = { workspace = true, features = ["small_rng"] }
regex.workspace = true regex.workspace = true
@ -142,6 +151,9 @@ serde_repr.workspace = true
sha2.workspace = true sha2.workspace = true
shell-escape = "=0.1.5" shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] } spki = { version = "0.7", features = ["pem"] }
# NOTE(bartlomieju): using temporary fork for now, revert back to `sqlformat-rs` later
sqlformat = { package = "deno_sqlformat", version = "0.3.2" }
strsim = "0.11.1"
tar.workspace = true tar.workspace = true
tempfile.workspace = true tempfile.workspace = true
text-size = "=1.1.0" text-size = "=1.1.0"
@ -150,11 +162,14 @@ thiserror.workspace = true
tokio.workspace = true tokio.workspace = true
tokio-util.workspace = true tokio-util.workspace = true
tower-lsp.workspace = true tower-lsp.workspace = true
tracing = { version = "0.1", features = ["log", "default"] }
twox-hash.workspace = true twox-hash.workspace = true
typed-arena = "=2.0.1" typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] } uuid = { workspace = true, features = ["serde"] }
walkdir = "=2.3.2"
which.workspace = true which.workspace = true
zeromq.workspace = true zeromq.workspace = true
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
zstd.workspace = true zstd.workspace = true
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
@ -168,7 +183,6 @@ nix.workspace = true
deno_bench_util.workspace = true deno_bench_util.workspace = true
pretty_assertions.workspace = true pretty_assertions.workspace = true
test_util.workspace = true test_util.workspace = true
walkdir = "=2.3.2"
[package.metadata.winres] [package.metadata.winres]
# This section defines the metadata that appears in the deno.exe PE header. # This section defines the metadata that appears in the deno.exe PE header.

View file

@ -2,6 +2,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use deno_config::deno_json::TsConfigForEmit;
use deno_core::serde_json; use deno_core::serde_json;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -69,7 +70,41 @@ pub fn deno_json_deps(
let values = imports_values(config.json.imports.as_ref()) let values = imports_values(config.json.imports.as_ref())
.into_iter() .into_iter()
.chain(scope_values(config.json.scopes.as_ref())); .chain(scope_values(config.json.scopes.as_ref()));
values_to_set(values) let mut set = values_to_set(values);
if let Some(serde_json::Value::Object(compiler_options)) =
&config.json.compiler_options
{
// add jsxImportSource
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSource")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add jsxImportSourceTypes
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSourceTypes")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add the dependencies in the types array
if let Some(serde_json::Value::Array(types)) = compiler_options.get("types")
{
for value in types {
if let serde_json::Value::String(value) = value {
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
}
}
}
set
} }
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> { fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
@ -97,11 +132,34 @@ fn values_to_set<'a>(
) -> HashSet<JsrDepPackageReq> { ) -> HashSet<JsrDepPackageReq> {
let mut entries = HashSet::new(); let mut entries = HashSet::new();
for value in values { for value in values {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) { if let Some(dep_req) = value_to_dep_req(value) {
entries.insert(JsrDepPackageReq::jsr(req_ref.into_inner().req)); entries.insert(dep_req);
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
entries.insert(JsrDepPackageReq::npm(req_ref.into_inner().req));
} }
} }
entries entries
} }
fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::jsr(req_ref.into_inner().req))
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::npm(req_ref.into_inner().req))
} else {
None
}
}
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
if let Some(ignored_options) = &ts_config.maybe_ignored_options {
log::warn!("{}", ignored_options);
}
let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else {
return;
};
if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) {
log::warn!(
"{} experimentalDecorators compiler option is deprecated and may be removed at any time",
deno_runtime::colors::yellow("Warning"),
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -50,8 +50,8 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
out.push(format!("{}:{}", host, port.0)); out.push(format!("{}:{}", host, port.0));
} }
} else { } else {
host_and_port.parse::<NetDescriptor>().map_err(|e| { NetDescriptor::parse(&host_and_port).map_err(|e| {
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string())
})?; })?;
out.push(host_and_port) out.push(host_and_port)
} }

View file

@ -3,7 +3,6 @@
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_runtime::deno_permissions::PermissionsContainer;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
@ -17,7 +16,7 @@ pub async fn resolve_import_map_value_from_specifier(
Ok(serde_json::from_str(&data_url_text)?) Ok(serde_json::from_str(&data_url_text)?)
} else { } else {
let file = file_fetcher let file = file_fetcher
.fetch(specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(specifier)
.await? .await?
.into_text_decoded()?; .into_text_decoded()?;
Ok(serde_json::from_str(&file.source)?) Ok(serde_json::from_str(&file.source)?)

View file

@ -1,6 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeSet; use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use deno_config::deno_json::ConfigFile; use deno_config::deno_json::ConfigFile;
@ -12,6 +12,7 @@ use deno_core::parking_lot::MutexGuard;
use deno_lockfile::WorkspaceMemberConfig; use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq;
use crate::cache; use crate::cache;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
@ -23,11 +24,20 @@ use crate::args::InstallKind;
use deno_lockfile::Lockfile; use deno_lockfile::Lockfile;
#[derive(Debug)]
pub struct CliLockfileReadFromPathOptions {
pub file_path: PathBuf,
pub frozen: bool,
/// Causes the lockfile to only be read from, but not written to.
pub skip_write: bool,
}
#[derive(Debug)] #[derive(Debug)]
pub struct CliLockfile { pub struct CliLockfile {
lockfile: Mutex<Lockfile>, lockfile: Mutex<Lockfile>,
pub filename: PathBuf, pub filename: PathBuf,
pub frozen: bool, frozen: bool,
skip_write: bool,
} }
pub struct Guard<'a, T> { pub struct Guard<'a, T> {
@ -49,15 +59,6 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
} }
impl CliLockfile { impl CliLockfile {
pub fn new(lockfile: Lockfile, frozen: bool) -> Self {
let filename = lockfile.filename.clone();
Self {
lockfile: Mutex::new(lockfile),
filename,
frozen,
}
}
/// Get the inner deno_lockfile::Lockfile. /// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<Lockfile> { pub fn lock(&self) -> Guard<Lockfile> {
Guard { Guard {
@ -77,6 +78,10 @@ impl CliLockfile {
} }
pub fn write_if_changed(&self) -> Result<(), AnyError> { pub fn write_if_changed(&self) -> Result<(), AnyError> {
if self.skip_write {
return Ok(());
}
self.error_if_changed()?; self.error_if_changed()?;
let mut lockfile = self.lockfile.lock(); let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else { let Some(bytes) = lockfile.resolve_write_bytes() else {
@ -98,7 +103,9 @@ impl CliLockfile {
flags: &Flags, flags: &Flags,
workspace: &Workspace, workspace: &Workspace,
) -> Result<Option<CliLockfile>, AnyError> { ) -> Result<Option<CliLockfile>, AnyError> {
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> { fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>,
) -> HashSet<JsrDepPackageReq> {
let Some(pkg_json) = maybe_pkg_json else { let Some(pkg_json) = maybe_pkg_json else {
return Default::default(); return Default::default();
}; };
@ -107,23 +114,19 @@ impl CliLockfile {
.values() .values()
.filter_map(|dep| dep.as_ref().ok()) .filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep { .filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => Some(req), PackageJsonDepValue::Req(req) => {
Some(JsrDepPackageReq::npm(req.clone()))
}
PackageJsonDepValue::Workspace(_) => None, PackageJsonDepValue::Workspace(_) => None,
}) })
.map(|r| format!("npm:{}", r))
.collect() .collect()
} }
fn deno_json_deps( fn deno_json_deps(
maybe_deno_json: Option<&ConfigFile>, maybe_deno_json: Option<&ConfigFile>,
) -> BTreeSet<String> { ) -> HashSet<JsrDepPackageReq> {
maybe_deno_json maybe_deno_json
.map(|c| { .map(crate::args::deno_json::deno_json_deps)
crate::args::deno_json::deno_json_deps(c)
.into_iter()
.map(|req| req.to_string())
.collect()
})
.unwrap_or_default() .unwrap_or_default()
} }
@ -139,7 +142,7 @@ impl CliLockfile {
return Ok(None); return Ok(None);
} }
let filename = match flags.lock { let file_path = match flags.lock {
Some(ref lock) => PathBuf::from(lock), Some(ref lock) => PathBuf::from(lock),
None => match workspace.resolve_lockfile_path()? { None => match workspace.resolve_lockfile_path()? {
Some(path) => path, Some(path) => path,
@ -147,22 +150,24 @@ impl CliLockfile {
}, },
}; };
let lockfile = if flags.lock_write { let root_folder = workspace.root_folder_configs();
log::warn!( // CLI flag takes precedence over the config
"{} \"--lock-write\" flag is deprecated and will be removed in Deno 2.", let frozen = flags.frozen_lockfile.unwrap_or_else(|| {
crate::colors::yellow("Warning") root_folder
); .deno_json
CliLockfile::new( .as_ref()
Lockfile::new_empty(filename, true), .and_then(|c| c.to_lock_config().ok().flatten().map(|c| c.frozen()))
flags.frozen_lockfile, .unwrap_or(false)
) });
} else {
Self::read_from_path(filename, flags.frozen_lockfile)? let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions {
}; file_path,
frozen,
skip_write: flags.internal.lockfile_skip_write,
})?;
// initialize the lockfile with the workspace's configuration // initialize the lockfile with the workspace's configuration
let root_url = workspace.root_dir(); let root_url = workspace.root_dir();
let root_folder = workspace.root_folder_configs();
let config = deno_lockfile::WorkspaceConfig { let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig { root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()), package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
@ -209,35 +214,39 @@ impl CliLockfile {
Ok(Some(lockfile)) Ok(Some(lockfile))
} }
pub fn read_from_path( pub fn read_from_path(
filename: PathBuf, opts: CliLockfileReadFromPathOptions,
frozen: bool,
) -> Result<CliLockfile, AnyError> { ) -> Result<CliLockfile, AnyError> {
match std::fs::read_to_string(&filename) { let lockfile = match std::fs::read_to_string(&opts.file_path) {
Ok(text) => Ok(CliLockfile::new( Ok(text) => Lockfile::new(deno_lockfile::NewLockfileOptions {
Lockfile::with_lockfile_content(filename, &text, false)?, file_path: opts.file_path,
frozen, content: &text,
)), overwrite: false,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok( })?,
CliLockfile::new(Lockfile::new_empty(filename, false), frozen), Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
), Lockfile::new_empty(opts.file_path, false)
Err(err) => Err(err).with_context(|| { }
format!("Failed reading lockfile '{}'", filename.display()) Err(err) => {
}), return Err(err).with_context(|| {
} format!("Failed reading lockfile '{}'", opts.file_path.display())
});
}
};
Ok(CliLockfile {
filename: lockfile.filename.clone(),
lockfile: Mutex::new(lockfile),
frozen: opts.frozen,
skip_write: opts.skip_write,
})
} }
pub fn error_if_changed(&self) -> Result<(), AnyError> { pub fn error_if_changed(&self) -> Result<(), AnyError> {
if !self.frozen { if !self.frozen {
return Ok(()); return Ok(());
} }
let lockfile = self.lockfile.lock(); let lockfile = self.lockfile.lock();
if lockfile.has_content_changed { if lockfile.has_content_changed {
let suggested = if *super::DENO_FUTURE {
"`deno cache --frozen=false`, `deno install --frozen=false`,"
} else {
"`deno cache --frozen=false`"
};
let contents = let contents =
std::fs::read_to_string(&lockfile.filename).unwrap_or_default(); std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
let new_contents = lockfile.as_json_string(); let new_contents = lockfile.as_json_string();
@ -245,7 +254,7 @@ impl CliLockfile {
// has an extra newline at the end // has an extra newline at the end
let diff = diff.trim_end(); let diff = diff.trim_end();
Err(deno_core::anyhow::anyhow!( Err(deno_core::anyhow::anyhow!(
"The lockfile is out of date. Run {suggested} or rerun with `--frozen=false` to update it.\nchanges:\n{diff}" "The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
)) ))
} else { } else {
Ok(()) Ok(())

View file

@ -7,8 +7,11 @@ mod import_map;
mod lockfile; mod lockfile;
mod package_json; mod package_json;
use deno_ast::MediaType;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions; use deno_config::workspace::CreateResolverOptions;
use deno_config::workspace::FolderConfigs;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::VendorEnablement; use deno_config::workspace::VendorEnablement;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
@ -18,21 +21,20 @@ use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_config::workspace::WorkspaceDiscoverStart; use deno_config::workspace::WorkspaceDiscoverStart;
use deno_config::workspace::WorkspaceLintConfig; use deno_config::workspace::WorkspaceLintConfig;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::normalize_path;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_npm::npm_rc::NpmRc; use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_path_util::normalize_path;
use deno_runtime::ops::otel::OtelConfig;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use import_map::resolve_import_map_value_from_specifier; use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::BenchConfig;
pub use deno_config::deno_json::ConfigFile; pub use deno_config::deno_json::ConfigFile;
pub use deno_config::deno_json::FmtOptionsConfig; pub use deno_config::deno_json::FmtOptionsConfig;
pub use deno_config::deno_json::JsxImportSourceConfig;
pub use deno_config::deno_json::LintRulesConfig; pub use deno_config::deno_json::LintRulesConfig;
pub use deno_config::deno_json::ProseWrap; pub use deno_config::deno_json::ProseWrap;
pub use deno_config::deno_json::TsConfig; pub use deno_config::deno_json::TsConfig;
@ -40,9 +42,12 @@ pub use deno_config::deno_json::TsConfigForEmit;
pub use deno_config::deno_json::TsConfigType; pub use deno_config::deno_json::TsConfigType;
pub use deno_config::deno_json::TsTypeLib; pub use deno_config::deno_json::TsTypeLib;
pub use deno_config::glob::FilePatterns; pub use deno_config::glob::FilePatterns;
pub use deno_json::check_warn_tsconfig;
pub use flags::*; pub use flags::*;
pub use lockfile::CliLockfile; pub use lockfile::CliLockfile;
pub use package_json::PackageJsonInstallDepsProvider; pub use lockfile::CliLockfileReadFromPathOptions;
pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -50,7 +55,6 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs; use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls; use deno_runtime::deno_tls::rustls;
@ -63,10 +67,13 @@ use dotenvy::from_filename;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::io::BufReader; use std::io::BufReader;
use std::io::Cursor; use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
@ -75,6 +82,7 @@ use std::sync::Arc;
use thiserror::Error; use thiserror::Error;
use crate::cache; use crate::cache;
use crate::cache::DenoDirProvider;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version; use crate::version;
@ -115,9 +123,6 @@ pub static DENO_DISABLE_PEDANTIC_NODE_WARNINGS: Lazy<bool> = Lazy::new(|| {
.is_some() .is_some()
}); });
pub static DENO_FUTURE: Lazy<bool> =
Lazy::new(|| std::env::var("DENO_FUTURE").ok().is_some());
pub fn jsr_url() -> &'static Url { pub fn jsr_url() -> &'static Url {
static JSR_URL: Lazy<Url> = Lazy::new(|| { static JSR_URL: Lazy<Url> = Lazy::new(|| {
let env_var_name = "JSR_URL"; let env_var_name = "JSR_URL";
@ -197,11 +202,14 @@ pub fn ts_config_to_transpile_and_emit_options(
precompile_jsx_dynamic_props: None, precompile_jsx_dynamic_props: None,
transform_jsx, transform_jsx,
var_decl_imports: false, var_decl_imports: false,
// todo(dsherret): support verbatim_module_syntax here properly
verbatim_module_syntax: false,
}, },
deno_ast::EmitOptions { deno_ast::EmitOptions {
inline_sources: options.inline_sources, inline_sources: options.inline_sources,
remove_comments: false, remove_comments: false,
source_map, source_map,
source_map_base: None,
source_map_file: None, source_map_file: None,
}, },
)) ))
@ -278,9 +286,16 @@ impl BenchOptions {
} }
} }
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct UnstableFmtOptions {
pub component: bool,
pub sql: bool,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct FmtOptions { pub struct FmtOptions {
pub options: FmtOptionsConfig, pub options: FmtOptionsConfig,
pub unstable: UnstableFmtOptions,
pub files: FilePatterns, pub files: FilePatterns,
} }
@ -294,13 +309,22 @@ impl FmtOptions {
pub fn new_with_base(base: PathBuf) -> Self { pub fn new_with_base(base: PathBuf) -> Self {
Self { Self {
options: FmtOptionsConfig::default(), options: FmtOptionsConfig::default(),
unstable: Default::default(),
files: FilePatterns::new_with_base(base), files: FilePatterns::new_with_base(base),
} }
} }
pub fn resolve(fmt_config: FmtConfig, fmt_flags: &FmtFlags) -> Self { pub fn resolve(
fmt_config: FmtConfig,
unstable: UnstableFmtOptions,
fmt_flags: &FmtFlags,
) -> Self {
Self { Self {
options: resolve_fmt_options(fmt_flags, fmt_config.options), options: resolve_fmt_options(fmt_flags, fmt_config.options),
unstable: UnstableFmtOptions {
component: unstable.component || fmt_flags.unstable_component,
sql: unstable.sql || fmt_flags.unstable_sql,
},
files: fmt_config.files, files: fmt_config.files,
} }
} }
@ -348,19 +372,20 @@ pub struct WorkspaceTestOptions {
pub doc: bool, pub doc: bool,
pub no_run: bool, pub no_run: bool,
pub fail_fast: Option<NonZeroUsize>, pub fail_fast: Option<NonZeroUsize>,
pub allow_none: bool, pub permit_no_files: bool,
pub filter: Option<String>, pub filter: Option<String>,
pub shuffle: Option<u64>, pub shuffle: Option<u64>,
pub concurrent_jobs: NonZeroUsize, pub concurrent_jobs: NonZeroUsize,
pub trace_leaks: bool, pub trace_leaks: bool,
pub reporter: TestReporterConfig, pub reporter: TestReporterConfig,
pub junit_path: Option<String>, pub junit_path: Option<String>,
pub hide_stacktraces: bool,
} }
impl WorkspaceTestOptions { impl WorkspaceTestOptions {
pub fn resolve(test_flags: &TestFlags) -> Self { pub fn resolve(test_flags: &TestFlags) -> Self {
Self { Self {
allow_none: test_flags.allow_none, permit_no_files: test_flags.permit_no_files,
concurrent_jobs: test_flags concurrent_jobs: test_flags
.concurrent_jobs .concurrent_jobs
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap()), .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()),
@ -372,6 +397,7 @@ impl WorkspaceTestOptions {
trace_leaks: test_flags.trace_leaks, trace_leaks: test_flags.trace_leaks,
reporter: test_flags.reporter, reporter: test_flags.reporter,
junit_path: test_flags.junit_path.clone(), junit_path: test_flags.junit_path.clone(),
hide_stacktraces: test_flags.hide_stacktraces,
} }
} }
} }
@ -558,6 +584,7 @@ fn discover_npmrc(
let resolved = npmrc let resolved = npmrc
.as_resolved(npm_registry_url()) .as_resolved(npm_registry_url())
.context("Failed to resolve .npmrc options")?; .context("Failed to resolve .npmrc options")?;
log::debug!(".npmrc found at: '{}'", path.display());
Ok(Arc::new(resolved)) Ok(Arc::new(resolved))
} }
@ -656,9 +683,19 @@ pub fn get_root_cert_store(
"system" => { "system" => {
let roots = load_native_certs().expect("could not load platform certs"); let roots = load_native_certs().expect("could not load platform certs");
for root in roots { for root in roots {
root_cert_store if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0)) .add(rustls::pki_types::CertificateDer::from(root.0.clone()))
.expect("Failed to add platform cert to root cert store"); {
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
} }
} }
_ => { _ => {
@ -715,15 +752,33 @@ pub enum NpmProcessStateKind {
Byonm, Byonm,
} }
pub(crate) const NPM_RESOLUTION_STATE_ENV_VAR_NAME: &str =
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| { static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
let state = std::env::var(NPM_RESOLUTION_STATE_ENV_VAR_NAME).ok()?; use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let state: NpmProcessState = serde_json::from_str(&state).ok()?; let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
// remove the environment variable so that sub processes std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
// that are spawned do not also use this. let fd = fd.parse::<usize>().ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_ENV_VAR_NAME); let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state) Some(state)
}); });
@ -742,13 +797,13 @@ pub struct CliOptions {
// application need not concern itself with, so keep these private // application need not concern itself with, so keep these private
flags: Arc<Flags>, flags: Arc<Flags>,
initial_cwd: PathBuf, initial_cwd: PathBuf,
main_module_cell: std::sync::OnceLock<Result<ModuleSpecifier, AnyError>>,
maybe_node_modules_folder: Option<PathBuf>, maybe_node_modules_folder: Option<PathBuf>,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
overrides: CliOptionOverrides, overrides: CliOptionOverrides,
pub start_dir: Arc<WorkspaceDirectory>, pub start_dir: Arc<WorkspaceDirectory>,
pub disable_deprecated_api_warning: bool, pub deno_dir_provider: Arc<DenoDirProvider>,
pub verbose_deprecated_api_warning: bool,
} }
impl CliOptions { impl CliOptions {
@ -770,32 +825,24 @@ impl CliOptions {
}; };
let msg = let msg =
format!("DANGER: TLS certificate validation is disabled {}", domains); format!("DANGER: TLS certificate validation is disabled {}", domains);
#[allow(clippy::print_stderr)]
{ {
// use eprintln instead of log::warn so this always gets shown log::error!("{}", colors::yellow(msg));
eprintln!("{}", colors::yellow(msg));
} }
} }
let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache); let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache);
let root_folder = start_dir.workspace.root_folder_configs(); let deno_dir_provider =
Arc::new(DenoDirProvider::new(flags.internal.cache_path.clone()));
let maybe_node_modules_folder = resolve_node_modules_folder( let maybe_node_modules_folder = resolve_node_modules_folder(
&initial_cwd, &initial_cwd,
&flags, &flags,
root_folder.deno_json.as_deref(), &start_dir.workspace,
root_folder.pkg_json.as_deref(), &deno_dir_provider,
) )
.with_context(|| "Resolving node_modules folder.")?; .with_context(|| "Resolving node_modules folder.")?;
load_env_variables_from_env_file(flags.env_file.as_ref()); load_env_variables_from_env_file(flags.env_file.as_ref());
let disable_deprecated_api_warning = flags.log_level
== Some(log::Level::Error)
|| std::env::var("DENO_NO_DEPRECATION_WARNINGS").ok().is_some();
let verbose_deprecated_api_warning =
std::env::var("DENO_VERBOSE_WARNINGS").ok().is_some();
Ok(Self { Ok(Self {
flags, flags,
initial_cwd, initial_cwd,
@ -803,9 +850,9 @@ impl CliOptions {
npmrc, npmrc,
maybe_node_modules_folder, maybe_node_modules_folder,
overrides: Default::default(), overrides: Default::default(),
main_module_cell: std::sync::OnceLock::new(),
start_dir, start_dir,
disable_deprecated_api_warning, deno_dir_provider,
verbose_deprecated_api_warning,
}) })
} }
@ -823,12 +870,8 @@ impl CliOptions {
} else { } else {
&[] &[]
}; };
let config_parse_options = deno_config::deno_json::ConfigParseOptions { let config_parse_options =
include_task_comments: matches!( deno_config::deno_json::ConfigParseOptions::default();
flags.subcommand,
DenoSubcommand::Task(..)
),
};
let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled
&& !flags.no_npm && !flags.no_npm
&& !has_flag_env_var("DENO_NO_PACKAGE_JSON"); && !has_flag_env_var("DENO_NO_PACKAGE_JSON");
@ -921,6 +964,9 @@ impl CliOptions {
match self.sub_command() { match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly, DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(_),
}) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(), _ => self.type_check_mode().as_graph_kind(),
} }
} }
@ -1044,34 +1090,20 @@ impl CliOptions {
None => None, None => None,
} }
}; };
Ok( Ok(self.workspace().create_resolver(
self CreateResolverOptions {
.start_dir pkg_json_dep_resolution,
.create_resolver( specified_import_map: cli_arg_specified_import_map,
CreateResolverOptions { },
pkg_json_dep_resolution, |path| Ok(std::fs::read_to_string(path)?),
specified_import_map: cli_arg_specified_import_map, )?)
},
|specifier| {
let specifier = specifier.clone();
async move {
let file = file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all())
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
)
.await?,
)
} }
pub fn node_ipc_fd(&self) -> Option<i64> { pub fn node_ipc_fd(&self) -> Option<i64> {
let maybe_node_channel_fd = std::env::var("DENO_CHANNEL_FD").ok(); let maybe_node_channel_fd = std::env::var("NODE_CHANNEL_FD").ok();
if let Some(node_channel_fd) = maybe_node_channel_fd { if let Some(node_channel_fd) = maybe_node_channel_fd {
// Remove so that child processes don't inherit this environment variable. // Remove so that child processes don't inherit this environment variable.
std::env::remove_var("DENO_CHANNEL_FD"); std::env::remove_var("NODE_CHANNEL_FD");
node_channel_fd.parse::<i64>().ok() node_channel_fd.parse::<i64>().ok()
} else { } else {
None None
@ -1094,53 +1126,58 @@ impl CliOptions {
} }
} }
pub fn env_file_name(&self) -> Option<&String> { pub fn otel_config(&self) -> Option<OtelConfig> {
self.flags.otel_config()
}
pub fn env_file_name(&self) -> Option<&Vec<String>> {
self.flags.env_file.as_ref() self.flags.env_file.as_ref()
} }
pub fn enable_future_features(&self) -> bool { pub fn resolve_main_module(&self) -> Result<&ModuleSpecifier, AnyError> {
*DENO_FUTURE self
} .main_module_cell
.get_or_init(|| {
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> { Ok(match &self.flags.subcommand {
let main_module = match &self.flags.subcommand { DenoSubcommand::Compile(compile_flags) => {
DenoSubcommand::Bundle(bundle_flags) => { resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
resolve_url_or_path(&bundle_flags.source_file, self.initial_cwd())? }
} DenoSubcommand::Eval(_) => {
DenoSubcommand::Compile(compile_flags) => { resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())?
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())? }
} DenoSubcommand::Repl(_) => {
DenoSubcommand::Eval(_) => { resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())?
resolve_url_or_path("./$deno$eval", self.initial_cwd())? }
} DenoSubcommand::Run(run_flags) => {
DenoSubcommand::Repl(_) => { if run_flags.is_stdin() {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())? resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())?
} } else {
DenoSubcommand::Run(run_flags) => { let url =
if run_flags.is_stdin() { resolve_url_or_path(&run_flags.script, self.initial_cwd())?;
std::env::current_dir() if self.is_node_main()
.context("Unable to get CWD") && url.scheme() == "file"
.and_then(|cwd| { && MediaType::from_specifier(&url) == MediaType::Unknown
resolve_url_or_path("./$deno$stdin.ts", &cwd) {
.map_err(AnyError::from) try_resolve_node_binary_main_entrypoint(
})? &run_flags.script,
} else if run_flags.watch.is_some() { self.initial_cwd(),
resolve_url_or_path(&run_flags.script, self.initial_cwd())? )?
} else if NpmPackageReqReference::from_str(&run_flags.script).is_ok() { .unwrap_or(url)
ModuleSpecifier::parse(&run_flags.script)? } else {
} else { url
resolve_url_or_path(&run_flags.script, self.initial_cwd())? }
} }
} }
DenoSubcommand::Serve(run_flags) => { DenoSubcommand::Serve(run_flags) => {
resolve_url_or_path(&run_flags.script, self.initial_cwd())? resolve_url_or_path(&run_flags.script, self.initial_cwd())?
} }
_ => { _ => {
bail!("No main module.") bail!("No main module.")
} }
}; })
})
Ok(main_module) .as_ref()
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
} }
pub fn resolve_file_header_overrides( pub fn resolve_file_header_overrides(
@ -1161,7 +1198,7 @@ impl CliOptions {
(maybe_main_specifier, maybe_content_type) (maybe_main_specifier, maybe_content_type)
{ {
HashMap::from([( HashMap::from([(
main_specifier, main_specifier.clone(),
HashMap::from([("content-type".to_string(), content_type.to_string())]), HashMap::from([("content-type".to_string(), content_type.to_string())]),
)]) )])
} else { } else {
@ -1186,15 +1223,10 @@ impl CliOptions {
// This is triggered via a secret environment variable which is used // This is triggered via a secret environment variable which is used
// for functionality like child_process.fork. Users should NOT depend // for functionality like child_process.fork. Users should NOT depend
// on this functionality. // on this functionality.
pub fn is_npm_main(&self) -> bool { pub fn is_node_main(&self) -> bool {
NPM_PROCESS_STATE.is_some() NPM_PROCESS_STATE.is_some()
} }
/// Overrides the import map specifier to use.
pub fn set_import_map_specifier(&mut self, path: Option<ModuleSpecifier>) {
self.overrides.import_map_specifier = Some(path);
}
pub fn has_node_modules_dir(&self) -> bool { pub fn has_node_modules_dir(&self) -> bool {
self.maybe_node_modules_folder.is_some() self.maybe_node_modules_folder.is_some()
} }
@ -1203,25 +1235,13 @@ impl CliOptions {
self.maybe_node_modules_folder.as_ref() self.maybe_node_modules_folder.as_ref()
} }
pub fn with_node_modules_dir_path(&self, path: PathBuf) -> Self { pub fn node_modules_dir(
Self { &self,
flags: self.flags.clone(), ) -> Result<Option<NodeModulesDirMode>, AnyError> {
initial_cwd: self.initial_cwd.clone(), if let Some(flag) = self.flags.node_modules_dir {
maybe_node_modules_folder: Some(path), return Ok(Some(flag));
npmrc: self.npmrc.clone(),
maybe_lockfile: self.maybe_lockfile.clone(),
start_dir: self.start_dir.clone(),
overrides: self.overrides.clone(),
disable_deprecated_api_warning: self.disable_deprecated_api_warning,
verbose_deprecated_api_warning: self.verbose_deprecated_api_warning,
} }
} self.workspace().node_modules_dir().map_err(Into::into)
pub fn node_modules_dir_enablement(&self) -> Option<bool> {
self
.flags
.node_modules_dir
.or_else(|| self.workspace().node_modules_dir())
} }
pub fn vendor_dir_path(&self) -> Option<&PathBuf> { pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
@ -1232,23 +1252,7 @@ impl CliOptions {
&self, &self,
config_type: TsConfigType, config_type: TsConfigType,
) -> Result<TsConfigForEmit, AnyError> { ) -> Result<TsConfigForEmit, AnyError> {
let result = self.workspace().resolve_ts_config_for_emit(config_type); self.workspace().resolve_ts_config_for_emit(config_type)
match result {
Ok(mut ts_config_for_emit) => {
if matches!(self.flags.subcommand, DenoSubcommand::Bundle(..)) {
// For backwards compatibility, force `experimentalDecorators` setting
// to true.
*ts_config_for_emit
.ts_config
.0
.get_mut("experimentalDecorators")
.unwrap() = serde_json::Value::Bool(true);
}
Ok(ts_config_for_emit)
}
Err(err) => Err(err),
}
} }
pub fn resolve_inspector_server( pub fn resolve_inspector_server(
@ -1264,7 +1268,10 @@ impl CliOptions {
return Ok(None); return Ok(None);
}; };
Ok(Some(InspectorServer::new(host, version::get_user_agent())?)) Ok(Some(InspectorServer::new(
host,
version::DENO_VERSION_INFO.user_agent,
)?))
} }
pub fn maybe_lockfile(&self) -> Option<&Arc<CliLockfile>> { pub fn maybe_lockfile(&self) -> Option<&Arc<CliLockfile>> {
@ -1301,14 +1308,23 @@ impl CliOptions {
let member_configs = self let member_configs = self
.workspace() .workspace()
.resolve_fmt_config_for_members(&cli_arg_patterns)?; .resolve_fmt_config_for_members(&cli_arg_patterns)?;
let unstable = self.resolve_config_unstable_fmt_options();
let mut result = Vec::with_capacity(member_configs.len()); let mut result = Vec::with_capacity(member_configs.len());
for (ctx, config) in member_configs { for (ctx, config) in member_configs {
let options = FmtOptions::resolve(config, fmt_flags); let options = FmtOptions::resolve(config, unstable.clone(), fmt_flags);
result.push((ctx, options)); result.push((ctx, options));
} }
Ok(result) Ok(result)
} }
pub fn resolve_config_unstable_fmt_options(&self) -> UnstableFmtOptions {
let workspace = self.workspace();
UnstableFmtOptions {
component: workspace.has_unstable("fmt-component"),
sql: workspace.has_unstable("fmt-sql"),
}
}
pub fn resolve_workspace_lint_options( pub fn resolve_workspace_lint_options(
&self, &self,
lint_flags: &LintFlags, lint_flags: &LintFlags,
@ -1346,11 +1362,9 @@ impl CliOptions {
)?; )?;
Ok(deno_lint::linter::LintConfig { Ok(deno_lint::linter::LintConfig {
default_jsx_factory: transpile_options default_jsx_factory: (!transpile_options.jsx_automatic)
.jsx_automatic
.then(|| transpile_options.jsx_factory.clone()), .then(|| transpile_options.jsx_factory.clone()),
default_jsx_fragment_factory: transpile_options default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
.jsx_automatic
.then(|| transpile_options.jsx_fragment_factory.clone()), .then(|| transpile_options.jsx_fragment_factory.clone()),
}) })
} }
@ -1403,17 +1417,6 @@ impl CliOptions {
Ok(result) Ok(result)
} }
pub fn resolve_deno_graph_workspace_members(
&self,
) -> Result<Vec<deno_graph::WorkspaceMember>, AnyError> {
self
.workspace()
.jsr_packages()
.into_iter()
.map(|pkg| config_to_deno_graph_workspace_member(&pkg.config_file))
.collect::<Result<Vec<_>, _>>()
}
/// Vector of user script CLI arguments. /// Vector of user script CLI arguments.
pub fn argv(&self) -> &Vec<String> { pub fn argv(&self) -> &Vec<String> {
&self.flags.argv &self.flags.argv
@ -1465,6 +1468,12 @@ impl CliOptions {
watch: Some(WatchFlagsWithPaths { hmr, .. }), watch: Some(WatchFlagsWithPaths { hmr, .. }),
.. ..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{
*hmr
} else if let DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { hmr, .. }),
..
}) = &self.flags.subcommand
{ {
*hmr *hmr
} else { } else {
@ -1502,10 +1511,6 @@ impl CliOptions {
&self.flags.location &self.flags.location
} }
pub fn maybe_custom_root(&self) -> &Option<PathBuf> {
&self.flags.cache_path
}
pub fn no_remote(&self) -> bool { pub fn no_remote(&self) -> bool {
self.flags.no_remote self.flags.no_remote
} }
@ -1518,8 +1523,39 @@ impl CliOptions {
&self.flags.permissions &self.flags.permissions
} }
pub fn permissions_options(&self) -> Result<PermissionsOptions, AnyError> { pub fn permissions_options(&self) -> PermissionsOptions {
self.flags.permissions.to_options(Some(&self.initial_cwd)) fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
files
.iter()
.filter_map(|f| Url::parse(f).ok().map(Cow::Owned))
.collect()
}
// get a list of urls to imply for --allow-import
let cli_arg_urls = self
.resolve_main_module()
.ok()
.map(|url| vec![Cow::Borrowed(url)])
.or_else(|| match &self.flags.subcommand {
DenoSubcommand::Cache(cache_flags) => {
Some(files_to_urls(&cache_flags.files))
}
DenoSubcommand::Check(check_flags) => {
Some(files_to_urls(&check_flags.files))
}
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(flags),
}) => Url::parse(&flags.module_url)
.ok()
.map(|url| vec![Cow::Owned(url)]),
DenoSubcommand::Doc(DocFlags {
source_files: DocSourceFileFlag::Paths(paths),
..
}) => Some(files_to_urls(paths)),
_ => None,
})
.unwrap_or_default();
self.flags.permissions.to_options(&cli_arg_urls)
} }
pub fn reload_flag(&self) -> bool { pub fn reload_flag(&self) -> bool {
@ -1565,18 +1601,41 @@ impl CliOptions {
&self.flags.unsafely_ignore_certificate_errors &self.flags.unsafely_ignore_certificate_errors
} }
pub fn legacy_unstable_flag(&self) -> bool {
self.flags.unstable_config.legacy_flag_enabled
}
pub fn unstable_bare_node_builtins(&self) -> bool { pub fn unstable_bare_node_builtins(&self) -> bool {
self.flags.unstable_config.bare_node_builtins self.flags.unstable_config.bare_node_builtins
|| self.workspace().has_unstable("bare-node-builtins") || self.workspace().has_unstable("bare-node-builtins")
} }
pub fn detect_cjs(&self) -> bool {
// only enabled when there's a package.json in order to not have a
// perf penalty for non-npm Deno projects of searching for the closest
// package.json beside each module
self.workspace().package_jsons().next().is_some() || self.is_node_main()
}
fn byonm_enabled(&self) -> bool {
// check if enabled via unstable
self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual)
|| NPM_PROCESS_STATE
.as_ref()
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
.unwrap_or(false)
}
pub fn use_byonm(&self) -> bool { pub fn use_byonm(&self) -> bool {
if self.enable_future_features() if matches!(
&& self.node_modules_dir_enablement().is_none() self.sub_command(),
DenoSubcommand::Install(_)
| DenoSubcommand::Add(_)
| DenoSubcommand::Remove(_)
| DenoSubcommand::Init(_)
| DenoSubcommand::Outdated(_)
) {
// For `deno install/add/remove/init` we want to force the managed resolver so it can set up `node_modules/` directory.
return false;
}
if self.node_modules_dir().ok().flatten().is_none()
&& self.maybe_node_modules_folder.is_some()
&& self && self
.workspace() .workspace()
.config_folders() .config_folders()
@ -1586,13 +1645,7 @@ impl CliOptions {
return true; return true;
} }
// check if enabled via unstable self.byonm_enabled()
self.flags.unstable_config.byonm
|| NPM_PROCESS_STATE
.as_ref()
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
.unwrap_or(false)
|| self.workspace().has_unstable("byonm")
} }
pub fn unstable_sloppy_imports(&self) -> bool { pub fn unstable_sloppy_imports(&self) -> bool {
@ -1614,31 +1667,18 @@ impl CliOptions {
} }
}); });
if *DENO_FUTURE {
let future_features = [
deno_runtime::deno_ffi::UNSTABLE_FEATURE_NAME.to_string(),
deno_runtime::deno_fs::UNSTABLE_FEATURE_NAME.to_string(),
deno_runtime::deno_webgpu::UNSTABLE_FEATURE_NAME.to_string(),
];
future_features.iter().for_each(|future_feature| {
if !from_config_file.contains(future_feature) {
from_config_file.push(future_feature.to_string());
}
});
}
if !from_config_file.is_empty() { if !from_config_file.is_empty() {
// collect unstable granular flags let all_valid_unstable_flags: Vec<&str> = crate::UNSTABLE_GRANULAR_FLAGS
let mut all_valid_unstable_flags: Vec<&str> = .iter()
crate::UNSTABLE_GRANULAR_FLAGS .map(|granular_flag| granular_flag.name)
.iter() .chain([
.map(|granular_flag| granular_flag.0) "sloppy-imports",
.collect(); "byonm",
"bare-node-builtins",
let mut another_unstable_flags = "fmt-component",
Vec::from(["sloppy-imports", "byonm", "bare-node-builtins"]); "fmt-sql",
// add more unstable flags to the same vector holding granular flags ])
all_valid_unstable_flags.append(&mut another_unstable_flags); .collect();
// check and warn if the unstable flag of config file isn't supported, by // check and warn if the unstable flag of config file isn't supported, by
// iterating through the vector holding the unstable flags // iterating through the vector holding the unstable flags
@ -1671,6 +1711,10 @@ impl CliOptions {
if let DenoSubcommand::Run(RunFlags { if let DenoSubcommand::Run(RunFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }), watch: Some(WatchFlagsWithPaths { paths, .. }),
.. ..
})
| DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }),
..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{ {
full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path)));
@ -1701,14 +1745,14 @@ impl CliOptions {
pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig { pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig {
LifecycleScriptsConfig { LifecycleScriptsConfig {
allowed: self.flags.allow_scripts.clone(), allowed: self.flags.allow_scripts.clone(),
initial_cwd: if matches!( initial_cwd: self.initial_cwd.clone(),
self.flags.allow_scripts, root_dir: self.workspace().root_dir_path(),
PackagesAllowedScripts::None explicit_install: matches!(
) { self.sub_command(),
None DenoSubcommand::Install(_)
} else { | DenoSubcommand::Cache(_)
Some(self.initial_cwd.clone()) | DenoSubcommand::Add(_)
}, ),
} }
} }
} }
@ -1717,34 +1761,89 @@ impl CliOptions {
fn resolve_node_modules_folder( fn resolve_node_modules_folder(
cwd: &Path, cwd: &Path,
flags: &Flags, flags: &Flags,
maybe_config_file: Option<&ConfigFile>, workspace: &Workspace,
maybe_package_json: Option<&PackageJson>, deno_dir_provider: &Arc<DenoDirProvider>,
) -> Result<Option<PathBuf>, AnyError> { ) -> Result<Option<PathBuf>, AnyError> {
let use_node_modules_dir = flags fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf {
.node_modules_dir root_folder
.or_else(|| maybe_config_file.and_then(|c| c.json.node_modules_dir)) .deno_json
.or(flags.vendor) .as_ref()
.or_else(|| maybe_config_file.and_then(|c| c.json.vendor)); .map(|c| Cow::Owned(c.dir_path()))
.or_else(|| {
root_folder
.pkg_json
.as_ref()
.map(|c| Cow::Borrowed(c.dir_path()))
})
.unwrap_or(Cow::Borrowed(cwd))
.join("node_modules")
}
let root_folder = workspace.root_folder_configs();
let use_node_modules_dir = if let Some(mode) = flags.node_modules_dir {
Some(mode.uses_node_modules_dir())
} else {
workspace
.node_modules_dir()?
.map(|m| m.uses_node_modules_dir())
.or(flags.vendor)
.or_else(|| root_folder.deno_json.as_ref().and_then(|c| c.json.vendor))
};
let path = if use_node_modules_dir == Some(false) { let path = if use_node_modules_dir == Some(false) {
return Ok(None); return Ok(None);
} else if let Some(state) = &*NPM_PROCESS_STATE { } else if let Some(state) = &*NPM_PROCESS_STATE {
return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from)); return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from));
} else if let Some(package_json_path) = maybe_package_json.map(|c| &c.path) { } else if root_folder.pkg_json.is_some() {
// always auto-discover the local_node_modules_folder when a package.json exists let node_modules_dir = resolve_from_root(root_folder, cwd);
package_json_path.parent().unwrap().join("node_modules") if let Ok(deno_dir) = deno_dir_provider.get_or_create() {
// `deno_dir.root` can be symlink in macOS
if let Ok(root) = canonicalize_path_maybe_not_exists(&deno_dir.root) {
if node_modules_dir.starts_with(root) {
// if the package.json is in deno_dir, then do not use node_modules
// next to it as local node_modules dir
return Ok(None);
}
}
}
node_modules_dir
} else if use_node_modules_dir.is_none() { } else if use_node_modules_dir.is_none() {
return Ok(None); return Ok(None);
} else if let Some(config_path) = maybe_config_file
.as_ref()
.and_then(|c| c.specifier.to_file_path().ok())
{
config_path.parent().unwrap().join("node_modules")
} else { } else {
cwd.join("node_modules") resolve_from_root(root_folder, cwd)
}; };
Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
} }
fn try_resolve_node_binary_main_entrypoint(
specifier: &str,
initial_cwd: &Path,
) -> Result<Option<Url>, AnyError> {
// node allows running files at paths without a `.js` extension
// or at directories with an index.js file
let path = deno_core::normalize_path(initial_cwd.join(specifier));
if path.is_dir() {
let index_file = path.join("index.js");
Ok(if index_file.is_file() {
Some(deno_path_util::url_from_file_path(&index_file)?)
} else {
None
})
} else {
let path = path.with_extension(
path
.extension()
.and_then(|s| s.to_str())
.map(|s| format!("{}.js", s))
.unwrap_or("js".to_string()),
);
if path.is_file() {
Ok(Some(deno_path_util::url_from_file_path(&path)?))
} else {
Ok(None)
}
}
}
fn resolve_import_map_specifier( fn resolve_import_map_specifier(
maybe_import_map_path: Option<&str>, maybe_import_map_path: Option<&str>,
maybe_config_file: Option<&ConfigFile>, maybe_config_file: Option<&ConfigFile>,
@ -1815,6 +1914,10 @@ pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT") flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
} }
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool { pub fn has_flag_env_var(name: &str) -> bool {
let value = env::var(name); let value = env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1")) matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
@ -1830,36 +1933,38 @@ pub fn npm_pkg_req_ref_to_binary_command(
pub fn config_to_deno_graph_workspace_member( pub fn config_to_deno_graph_workspace_member(
config: &ConfigFile, config: &ConfigFile,
) -> Result<deno_graph::WorkspaceMember, AnyError> { ) -> Result<deno_graph::WorkspaceMember, AnyError> {
let nv = deno_semver::package::PackageNv { let name = match &config.json.name {
name: match &config.json.name { Some(name) => name.clone(),
Some(name) => name.clone(), None => bail!("Missing 'name' field in config file."),
None => bail!("Missing 'name' field in config file."), };
}, let version = match &config.json.version {
version: match &config.json.version { Some(name) => Some(deno_semver::Version::parse_standard(name)?),
Some(name) => deno_semver::Version::parse_standard(name)?, None => None,
None => bail!("Missing 'version' field in config file."),
},
}; };
Ok(deno_graph::WorkspaceMember { Ok(deno_graph::WorkspaceMember {
base: config.specifier.join("./").unwrap(), base: config.specifier.join("./").unwrap(),
nv, name,
version,
exports: config.to_exports_config()?.into_map(), exports: config.to_exports_config()?.into_map(),
}) })
} }
fn load_env_variables_from_env_file(filename: Option<&String>) { fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
let Some(env_file_name) = filename else { let Some(env_file_names) = filename else {
return; return;
}; };
match from_filename(env_file_name) {
Ok(_) => (), for env_file_name in env_file_names.iter().rev() {
Err(error) => { match from_filename(env_file_name) {
match error { Ok(_) => (),
Err(error) => {
match error {
dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line), dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line),
dotenvy::Error::Io(_)=> log::info!("{} The `--env` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name), dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name),
dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name), dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name),
_ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name), _ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name),
} }
}
} }
} }
} }

View file

@ -4,102 +4,172 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use thiserror::Error;
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmRemotePkg { pub struct InstallNpmRemotePkg {
pub alias: String, pub alias: Option<String>,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf, pub base_dir: PathBuf,
pub req: PackageReq, pub req: PackageReq,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmWorkspacePkg { pub struct InstallNpmWorkspacePkg {
pub alias: String, pub alias: Option<String>,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf,
pub target_dir: PathBuf, pub target_dir: PathBuf,
} }
#[derive(Debug, Default)] #[derive(Debug, Error, Clone)]
pub struct PackageJsonInstallDepsProvider { #[error("Failed to install '{}'\n at {}", alias, location)]
remote_pkgs: Vec<InstallNpmRemotePkg>, pub struct PackageJsonDepValueParseWithLocationError {
workspace_pkgs: Vec<InstallNpmWorkspacePkg>, pub location: Url,
pub alias: String,
#[source]
pub source: PackageJsonDepValueParseError,
} }
impl PackageJsonInstallDepsProvider { #[derive(Debug, Default)]
pub struct NpmInstallDepsProvider {
remote_pkgs: Vec<InstallNpmRemotePkg>,
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
pkg_json_dep_errors: Vec<PackageJsonDepValueParseWithLocationError>,
}
impl NpmInstallDepsProvider {
pub fn empty() -> Self { pub fn empty() -> Self {
Self::default() Self::default()
} }
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self { pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
// todo(dsherret): estimate capacity?
let mut workspace_pkgs = Vec::new(); let mut workspace_pkgs = Vec::new();
let mut remote_pkgs = Vec::new(); let mut remote_pkgs = Vec::new();
let mut pkg_json_dep_errors = Vec::new();
let workspace_npm_pkgs = workspace.npm_packages(); let workspace_npm_pkgs = workspace.npm_packages();
for pkg_json in workspace.package_jsons() {
let deps = pkg_json.resolve_local_package_json_deps(); for (_, folder) in workspace.config_folders() {
let mut pkg_pkgs = Vec::with_capacity(deps.len()); // deal with the deno.json first because it takes precedence during resolution
for (alias, dep) in deps { if let Some(deno_json) = &folder.deno_json {
let Ok(dep) = dep else { // don't bother with externally referenced import maps as users
continue; // should inline their import map to get this behaviour
}; if let Some(serde_json::Value::Object(obj)) = &deno_json.json.imports {
match dep { let mut pkg_pkgs = Vec::with_capacity(obj.len());
PackageJsonDepValue::Req(pkg_req) => { for (_alias, value) in obj {
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| { let serde_json::Value::String(specifier) = value else {
pkg.matches_req(&pkg_req) continue;
// do not resolve to the current package };
&& pkg.pkg_json.path != pkg_json.path let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier)
}); else {
continue;
};
let pkg_req = npm_req_ref.into_inner().req;
let workspace_pkg = workspace_npm_pkgs
.iter()
.find(|pkg| pkg.matches_req(&pkg_req));
if let Some(pkg) = workspace_pkg { if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg { workspace_pkgs.push(InstallNpmWorkspacePkg {
alias, alias: None,
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), target_dir: pkg.pkg_json.dir_path().to_path_buf(),
}); });
} else { } else {
pkg_pkgs.push(InstallNpmRemotePkg { pkg_pkgs.push(InstallNpmRemotePkg {
alias, alias: None,
base_dir: pkg_json.dir_path().to_path_buf(), base_dir: deno_json.dir_path(),
req: pkg_req, req: pkg_req,
}); });
} }
} }
PackageJsonDepValue::Workspace(version_req) => {
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| { // sort within each package (more like npm resolution)
pkg.matches_name_and_version_req(&alias, &version_req) pkg_pkgs.sort_by(|a, b| a.req.cmp(&b.req));
}) { remote_pkgs.extend(pkg_pkgs);
workspace_pkgs.push(InstallNpmWorkspacePkg { }
alias, }
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(deps.len());
for (alias, dep) in deps {
let dep = match dep {
Ok(dep) => dep,
Err(err) => {
pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias,
source: err,
},
);
continue;
}
};
match dep {
PackageJsonDepValue::Req(pkg_req) => {
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_req(&pkg_req)
// do not resolve to the current package
&& pkg.pkg_json.path != pkg_json.path
}); });
if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_pkgs.push(InstallNpmRemotePkg {
alias: Some(alias),
base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req,
});
}
}
PackageJsonDepValue::Workspace(version_req) => {
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(&alias, &version_req)
}) {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
}
} }
} }
} }
}
// sort within each package
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
remote_pkgs.extend(pkg_pkgs); // sort within each package as npm does
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
remote_pkgs.extend(pkg_pkgs);
}
} }
remote_pkgs.shrink_to_fit(); remote_pkgs.shrink_to_fit();
workspace_pkgs.shrink_to_fit(); workspace_pkgs.shrink_to_fit();
Self { Self {
remote_pkgs, remote_pkgs,
workspace_pkgs, workspace_pkgs,
pkg_json_dep_errors,
} }
} }
pub fn remote_pkgs(&self) -> &Vec<InstallNpmRemotePkg> { pub fn remote_pkgs(&self) -> &[InstallNpmRemotePkg] {
&self.remote_pkgs &self.remote_pkgs
} }
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> { pub fn workspace_pkgs(&self) -> &[InstallNpmWorkspacePkg] {
&self.workspace_pkgs &self.workspace_pkgs
} }
pub fn pkg_json_dep_errors(
&self,
) -> &[PackageJsonDepValueParseWithLocationError] {
&self.pkg_json_dep_errors
}
} }

View file

@ -123,19 +123,19 @@ impl AuthTokens {
pub fn new(maybe_tokens_str: Option<String>) -> Self { pub fn new(maybe_tokens_str: Option<String>) -> Self {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
if let Some(tokens_str) = maybe_tokens_str { if let Some(tokens_str) = maybe_tokens_str {
for token_str in tokens_str.split(';') { for token_str in tokens_str.trim().split(';') {
if token_str.contains('@') { if token_str.contains('@') {
let pair: Vec<&str> = token_str.rsplitn(2, '@').collect(); let mut iter = token_str.rsplitn(2, '@');
let token = pair[1]; let host = AuthDomain::from(iter.next().unwrap());
let host = AuthDomain::from(pair[0]); let token = iter.next().unwrap();
if token.contains(':') { if token.contains(':') {
let pair: Vec<&str> = token.rsplitn(2, ':').collect(); let mut iter = token.rsplitn(2, ':');
let username = pair[1].to_string(); let password = iter.next().unwrap().to_owned();
let password = pair[0].to_string(); let username = iter.next().unwrap().to_owned();
tokens.push(AuthToken { tokens.push(AuthToken {
host, host,
token: AuthTokenData::Basic { username, password }, token: AuthTokenData::Basic { username, password },
}) });
} else { } else {
tokens.push(AuthToken { tokens.push(AuthToken {
host, host,
@ -211,6 +211,40 @@ mod tests {
); );
} }
#[test]
fn test_auth_tokens_space() {
let auth_tokens = AuthTokens::new(Some(
" abc123@deno.land;def456@example.com\t".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_newline() {
let auth_tokens = AuthTokens::new(Some(
"\nabc123@deno.land;def456@example.com\n".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test] #[test]
fn test_auth_tokens_port() { fn test_auth_tokens_port() {
let auth_tokens = let auth_tokens =

View file

@ -1,3 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
const count = 100000; const count = 100000;
for (let i = 0; i < count; i++) console.log("Hello World"); for (let i = 0; i < count; i++) console.log("Hello World");

View file

@ -46,8 +46,7 @@ Deno.bench("b64_rt_short", { n: 1e6 }, () => {
const buf = new Uint8Array(100); const buf = new Uint8Array(100);
const file = Deno.openSync("/dev/zero"); const file = Deno.openSync("/dev/zero");
Deno.bench("read_zero", { n: 5e5 }, () => { Deno.bench("read_zero", { n: 5e5 }, () => {
// deno-lint-ignore no-deprecated-deno-api file.readSync(buf);
Deno.readSync(file.rid, buf);
}); });
} }

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -1,4 +1,4 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
let total = 5; let total = 5;
let current = ""; let current = "";

View file

@ -1,4 +1,4 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
/** @jsx h */ /** @jsx h */
import results from "./deno.json" assert { type: "json" }; import results from "./deno.json" assert { type: "json" };

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -1,167 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::net::TcpStream;
use std::path::Path;
use std::process::Command;
use std::sync::atomic::AtomicU16;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use super::Result;
pub use test_util::parse_wrk_output;
pub use test_util::WrkOutput as HttpBenchmarkResult;
// Some of the benchmarks in this file have been renamed. In case the history
// somehow gets messed up:
// "node_http" was once called "node"
// "deno_tcp" was once called "deno"
// "deno_http" was once called "deno_net_http"
const DURATION: &str = "10s";
pub fn benchmark(
target_path: &Path,
) -> Result<HashMap<String, HttpBenchmarkResult>> {
let deno_exe = test_util::deno_exe_path();
let deno_exe = deno_exe.to_string();
let hyper_hello_exe = target_path.join("test_server");
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
let mut res = HashMap::new();
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let http_dir = manifest_dir.join("bench").join("http");
for entry in std::fs::read_dir(&http_dir)? {
let entry = entry?;
let pathbuf = entry.path();
let path = pathbuf.to_str().unwrap();
if path.ends_with(".lua") {
continue;
}
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
let lua_script = http_dir.join(format!("{file_stem}.lua"));
let mut maybe_lua = None;
if lua_script.exists() {
maybe_lua = Some(lua_script.to_str().unwrap());
}
let port = get_port();
// deno run -A --unstable <path> <addr>
res.insert(
file_stem.to_string(),
run(
&[
deno_exe.as_str(),
"run",
"--allow-all",
"--unstable",
"--enable-testing-features-do-not-use",
path,
&server_addr(port),
],
port,
None,
None,
maybe_lua,
)?,
);
}
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
Ok(res)
}
fn run(
server_cmd: &[&str],
port: u16,
env: Option<Vec<(String, String)>>,
origin_cmd: Option<&[&str]>,
lua_script: Option<&str>,
) -> Result<HttpBenchmarkResult> {
// Wait for port 4544 to become available.
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
std::thread::sleep(Duration::from_secs(5));
let mut origin = None;
if let Some(cmd) = origin_cmd {
let mut com = Command::new(cmd[0]);
com.args(&cmd[1..]);
if let Some(env) = env.clone() {
com.envs(env);
}
origin = Some(com.spawn()?);
};
println!("{}", server_cmd.join(" "));
let mut server = {
let mut com = Command::new(server_cmd[0]);
com.args(&server_cmd[1..]);
if let Some(env) = env {
com.envs(env);
}
com.spawn()?
};
// Wait for server to wake up.
let now = Instant::now();
let addr = format!("127.0.0.1:{port}");
while now.elapsed().as_secs() < 30 {
if TcpStream::connect(&addr).is_ok() {
break;
}
std::thread::sleep(Duration::from_millis(10));
}
TcpStream::connect(&addr).expect("Failed to connect to server in time");
println!("Server took {} ms to start", now.elapsed().as_millis());
let wrk = test_util::prebuilt_tool_path("wrk");
assert!(wrk.is_file());
let addr = format!("http://{addr}/");
let wrk = wrk.to_string();
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
if let Some(lua_script) = lua_script {
wrk_cmd.push("-s");
wrk_cmd.push(lua_script);
}
println!("{}", wrk_cmd.join(" "));
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
println!("{output}");
assert!(
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
"server ended with error"
);
server.kill()?;
if let Some(mut origin) = origin {
origin.kill()?;
}
Ok(parse_wrk_output(&output))
}
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
pub(crate) fn get_port() -> u16 {
let p = NEXT_PORT.load(Ordering::SeqCst);
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
p
}
fn server_addr(port: u16) -> String {
format!("0.0.0.0:{port}")
}
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
let port = get_port();
println!("http_benchmark testing RUST hyper");
run(&[exe, &port.to_string()], port, None, None, None)
}

View file

@ -1,10 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const app = new Hono();
app.get("/", (c) => c.text("Hello, World!"));
Deno.serve({ port: Number(port), hostname }, app.fetch);

View file

@ -1,14 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
function handler() {
const file = Deno.openSync(path);
return new Response(file.readable);
}
serve({ hostname, port: Number(port) }, handler);

View file

@ -1,5 +0,0 @@
wrk.headers["foo"] = "bar"
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
wrk.headers["Viewport-Width"] = "1920"
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
wrk.headers["Accept-Language"] = "en,la;q=0.9"

View file

@ -1,11 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] ?? "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
function handler() {
return new Response("Hello World");
}
serve({ hostname, port: Number(port), reusePort: true }, handler);

View file

@ -1,5 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/octet-stream"
file = io.open("./cli/bench/testdata/128k.bin", "rb")
wrk.body = file:read("*a")

View file

@ -1,3 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/json"
wrk.body = '{"hello":"deno"}'

View file

@ -1,23 +0,0 @@
import { renderToReadableStream } from "https://esm.run/react-dom/server";
import * as React from "https://esm.run/react";
const { serve } = Deno;
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const App = () => (
<html>
<body>
<h1>Hello World</h1>
</body>
</html>
);
const headers = {
headers: {
"Content-Type": "text/html",
},
};
serve({ hostname, port: Number(port) }, async () => {
return new Response(await renderToReadableStream(<App />), headers);
});

View file

@ -1,33 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Used for benchmarking Deno's networking.
// TODO(bartlomieju): Replace this with a real HTTP server once
// https://github.com/denoland/deno/issues/726 is completed.
// Note: this is a keep-alive server.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const listener = Deno.listen({ hostname, port: Number(port) });
const response = new TextEncoder().encode(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
);
async function handle(conn: Deno.Conn): Promise<void> {
const buffer = new Uint8Array(1024);
try {
while (true) {
await conn.read(buffer);
await conn.write(response);
}
} catch (e) {
if (
!(e instanceof Deno.errors.BrokenPipe) &&
!(e instanceof Deno.errors.ConnectionReset)
) {
throw e;
}
}
conn.close();
}
console.log("Listening on", addr);
for await (const conn of listener) {
handle(conn);
}

View file

@ -4,9 +4,10 @@ use deno_core::serde::Deserialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::url::Url; use lsp_types::Uri;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use std::time::Duration; use std::time::Duration;
use test_util::lsp::LspClientBuilder; use test_util::lsp::LspClientBuilder;
use test_util::PathRef; use test_util::PathRef;
@ -91,7 +92,7 @@ fn bench_deco_apps_edits(deno_exe: &Path) -> Duration {
.build(); .build();
client.initialize(|c| { client.initialize(|c| {
c.set_workspace_folders(vec![lsp_types::WorkspaceFolder { c.set_workspace_folders(vec![lsp_types::WorkspaceFolder {
uri: Url::from_file_path(&apps).unwrap(), uri: apps.uri_dir(),
name: "apps".to_string(), name: "apps".to_string(),
}]); }]);
c.set_deno_enable(true); c.set_deno_enable(true);
@ -149,7 +150,11 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",
@ -205,6 +210,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { client.change_configuration(json!({ "deno": {
"enable": true, "enable": true,
"codeLens": { "codeLens": {
@ -213,6 +220,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
"test": true, "test": true,
}, },
} })); } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",
@ -256,7 +265,11 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
for i in 0..10 { for i in 0..10 {
client.write_notification( client.write_notification(
@ -283,7 +296,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
"textDocument/didChange", "textDocument/didChange",
lsp::DidChangeTextDocumentParams { lsp::DidChangeTextDocumentParams {
text_document: lsp::VersionedTextDocumentIdentifier { text_document: lsp::VersionedTextDocumentIdentifier {
uri: Url::parse(&file_name).unwrap(), uri: Uri::from_str(&file_name).unwrap(),
version: 2, version: 2,
}, },
content_changes: vec![lsp::TextDocumentContentChangeEvent { content_changes: vec![lsp::TextDocumentContentChangeEvent {
@ -310,7 +323,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
"textDocument/formatting", "textDocument/formatting",
lsp::DocumentFormattingParams { lsp::DocumentFormattingParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: Url::parse(&file_name).unwrap(), uri: Uri::from_str(&file_name).unwrap(),
}, },
options: lsp::FormattingOptions { options: lsp::FormattingOptions {
tab_size: 2, tab_size: 2,
@ -340,7 +353,11 @@ fn bench_startup_shutdown(deno_exe: &Path) -> Duration {
.deno_exe(deno_exe) .deno_exe(deno_exe)
.build(); .build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",

View file

@ -13,7 +13,11 @@ use test_util::lsp::LspClientBuilder;
fn incremental_change_wait(bench: &mut Bencher) { fn incremental_change_wait(bench: &mut Bencher) {
let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build(); let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build();
client.initialize_default(); client.initialize_default();
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.change_configuration(json!({ "deno": { "enable": true } })); client.change_configuration(json!({ "deno": { "enable": true } }));
let (method, _): (String, Option<Value>) = client.read_notification();
assert_eq!(method, "deno/didRefreshDenoConfigurationTree");
client.write_notification( client.write_notification(
"textDocument/didOpen", "textDocument/didOpen",

View file

@ -17,7 +17,6 @@ use std::process::Stdio;
use std::time::SystemTime; use std::time::SystemTime;
use test_util::PathRef; use test_util::PathRef;
mod http;
mod lsp; mod lsp;
fn read_json(filename: &Path) -> Result<Value> { fn read_json(filename: &Path) -> Result<Value> {
@ -143,29 +142,6 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
], ],
None, None,
), ),
(
"bundle",
&[
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
),
(
"bundle_no_check",
&[
"bundle",
"--no-check",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
),
]; ];
const RESULT_KEYS: &[&str] = const RESULT_KEYS: &[&str] =
@ -314,40 +290,6 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> {
Ok(sizes) Ok(sizes)
} }
const BUNDLES: &[(&str, &str)] = &[
("file_server", "./tests/util/std/http/file_server.ts"),
("welcome", "./tests/testdata/welcome.ts"),
];
fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
let mut sizes = HashMap::<String, i64>::new();
for (name, url) in BUNDLES {
let path = format!("{name}.bundle.js");
test_util::run(
&[
deno_exe.to_str().unwrap(),
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
url,
&path,
],
None,
None,
None,
true,
);
let file = PathBuf::from(path);
assert!(file.is_file());
sizes.insert(name.to_string(), file.metadata()?.len() as i64);
let _ = fs::remove_file(file);
}
Ok(sizes)
}
fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> { fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
let mut results = HashMap::<String, i64>::new(); let mut results = HashMap::<String, i64>::new();
@ -402,9 +344,11 @@ struct BenchResult {
binary_size: HashMap<String, i64>, binary_size: HashMap<String, i64>,
bundle_size: HashMap<String, i64>, bundle_size: HashMap<String, i64>,
cargo_deps: usize, cargo_deps: usize,
// TODO(bartlomieju): remove
max_latency: HashMap<String, f64>, max_latency: HashMap<String, f64>,
max_memory: HashMap<String, i64>, max_memory: HashMap<String, i64>,
lsp_exec_time: HashMap<String, i64>, lsp_exec_time: HashMap<String, i64>,
// TODO(bartlomieju): remove
req_per_sec: HashMap<String, i64>, req_per_sec: HashMap<String, i64>,
syscall_count: HashMap<String, i64>, syscall_count: HashMap<String, i64>,
thread_count: HashMap<String, i64>, thread_count: HashMap<String, i64>,
@ -415,12 +359,10 @@ async fn main() -> Result<()> {
let mut args = env::args(); let mut args = env::args();
let mut benchmarks = vec![ let mut benchmarks = vec![
"bundle",
"exec_time", "exec_time",
"binary_size", "binary_size",
"cargo_deps", "cargo_deps",
"lsp", "lsp",
"http",
"strace", "strace",
"mem_usage", "mem_usage",
]; ];
@ -465,11 +407,6 @@ async fn main() -> Result<()> {
..Default::default() ..Default::default()
}; };
if benchmarks.contains(&"bundle") {
let bundle_size = bundle_benchmark(&deno_exe)?;
new_data.bundle_size = bundle_size;
}
if benchmarks.contains(&"exec_time") { if benchmarks.contains(&"exec_time") {
let exec_times = run_exec_time(&deno_exe, &target_dir)?; let exec_times = run_exec_time(&deno_exe, &target_dir)?;
new_data.benchmark = exec_times; new_data.benchmark = exec_times;
@ -490,21 +427,6 @@ async fn main() -> Result<()> {
new_data.lsp_exec_time = lsp_exec_times; new_data.lsp_exec_time = lsp_exec_times;
} }
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
let stats = http::benchmark(target_dir.as_path())?;
let req_per_sec = stats
.iter()
.map(|(name, result)| (name.clone(), result.requests as i64))
.collect();
new_data.req_per_sec = req_per_sec;
let max_latency = stats
.iter()
.map(|(name, result)| (name.clone(), result.latency))
.collect();
new_data.max_latency = max_latency;
}
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
use std::io::Read; use std::io::Read;

View file

@ -1,3 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { bench, run } from "mitata"; import { bench, run } from "mitata";
import { createRequire } from "module"; import { createRequire } from "module";

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args
: [process.argv[2], process.argv[3]]; : [process.argv[2], process.argv[3]];

View file

@ -1,5 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// From https://github.com/just-js/benchmarks/tree/main/01-stdio // From https://github.com/just-js/benchmarks/tree/main/01-stdio
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
#include <stdlib.h> #include <stdlib.h>
#include <stdio.h> #include <stdio.h>
@ -26,4 +26,4 @@ int main(int argc, char *argv[]) {
exit(1); exit(1);
} }
fprintf(stdout, "size %lu reads %u blocksize %u\n", size, reads, blocksize); fprintf(stdout, "size %lu reads %u blocksize %u\n", size, reads, blocksize);
} }

View file

@ -2,6 +2,8 @@
// //
// From https://github.com/just-js/benchmarks/tree/main/01-stdio // From https://github.com/just-js/benchmarks/tree/main/01-stdio
// deno-lint-ignore-file no-console
const blocksize = parseInt(Deno.args[0] || 65536); const blocksize = parseInt(Deno.args[0] || 65536);
const buf = new Uint8Array(blocksize); const buf = new Uint8Array(blocksize);
let size = 0; let size = 0;

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,5 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console
// Note: when benchmarking across different Deno version, make sure to clear // Note: when benchmarking across different Deno version, make sure to clear
// the DENO_DIR cache. // the DENO_DIR cache.
let [total, count] = typeof Deno !== "undefined" ? Deno.args : []; let [total, count] = typeof Deno !== "undefined" ? Deno.args : [];

View file

@ -1,4 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -5,6 +5,7 @@ use std::path::PathBuf;
use deno_core::snapshot::*; use deno_core::snapshot::*;
use deno_runtime::*; use deno_runtime::*;
mod shared;
mod ts { mod ts {
use super::*; use super::*;
@ -12,7 +13,6 @@ mod ts {
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::op2; use deno_core::op2;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use serde::Serialize; use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
@ -24,7 +24,6 @@ mod ts {
struct BuildInfoResponse { struct BuildInfoResponse {
build_specifier: String, build_specifier: String,
libs: Vec<String>, libs: Vec<String>,
node_built_in_module_names: Vec<String>,
} }
#[op2] #[op2]
@ -36,14 +35,9 @@ mod ts {
.iter() .iter()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect(); .collect();
let node_built_in_module_names = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|s| s.to_string())
.collect();
BuildInfoResponse { BuildInfoResponse {
build_specifier, build_specifier,
libs: build_libs, libs: build_libs,
node_built_in_module_names,
} }
} }
@ -242,6 +236,7 @@ mod ts {
"esnext.decorators", "esnext.decorators",
"esnext.disposable", "esnext.disposable",
"esnext.intl", "esnext.intl",
"esnext.iterator",
"esnext.object", "esnext.object",
"esnext.promise", "esnext.promise",
"esnext.regexp", "esnext.regexp",
@ -329,20 +324,9 @@ mod ts {
fn create_cli_snapshot(snapshot_path: PathBuf) { fn create_cli_snapshot(snapshot_path: PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions; use deno_runtime::ops::bootstrap::SnapshotOptions;
// NOTE(bartlomieju): keep in sync with `cli/version.rs`.
// Ideally we could deduplicate that code.
fn deno_version() -> String {
if env::var("DENO_CANARY").is_ok() {
format!("{}+{}", env!("CARGO_PKG_VERSION"), &git_commit_hash()[..7])
} else {
env!("CARGO_PKG_VERSION").to_string()
}
}
let snapshot_options = SnapshotOptions { let snapshot_options = SnapshotOptions {
deno_version: deno_version(),
ts_version: ts::version(), ts_version: ts::version(),
v8_version: deno_core::v8_version(), v8_version: deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(), target: std::env::var("TARGET").unwrap(),
}; };
@ -381,6 +365,9 @@ fn main() {
return; return;
} }
deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
// Host snapshots won't work when cross compiling. // Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap(); let target = env::var("TARGET").unwrap();
let host = env::var("HOST").unwrap(); let host = env::var("HOST").unwrap();
@ -390,56 +377,6 @@ fn main() {
panic!("Cross compiling with snapshot is not supported."); panic!("Cross compiling with snapshot is not supported.");
} }
let symbols_file_name = match env::consts::OS {
"android" | "freebsd" | "openbsd" => {
"generated_symbol_exports_list_linux.def".to_string()
}
os => format!("generated_symbol_exports_list_{}.def", os),
};
let symbols_path = std::path::Path::new("napi")
.join(symbols_file_name)
.canonicalize()
.expect(
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
);
#[cfg(target_os = "windows")]
println!(
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
symbols_path.display()
);
#[cfg(target_os = "macos")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
symbols_path.display()
);
#[cfg(target_os = "linux")]
{
// If a custom compiler is set, the glibc version is not reliable.
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
if env::var("CC").is_err()
&& glibc_version::get_version()
.map(|ver| ver.major <= 2 && ver.minor < 35)
.unwrap_or(false)
{
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
} else {
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
}
}
#[cfg(target_os = "android")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
// To debug snapshot issues uncomment: // To debug snapshot issues uncomment:
// op_fetch_asset::trace_serializer(); // op_fetch_asset::trace_serializer();
@ -456,13 +393,31 @@ fn main() {
); );
let ts_version = ts::version(); let ts_version = ts::version();
debug_assert_eq!(ts_version, "5.5.2"); // bump this assertion when it changes debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
println!("cargo:rustc-env=TS_VERSION={}", ts_version); println!("cargo:rustc-env=TS_VERSION={}", ts_version);
println!("cargo:rerun-if-env-changed=TS_VERSION"); println!("cargo:rerun-if-env-changed=TS_VERSION");
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
}
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());

View file

@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash {
} }
/// What should the cache should do on failure? /// What should the cache should do on failure?
#[derive(Default)] #[derive(Debug, Default)]
pub enum CacheFailure { pub enum CacheFailure {
/// Return errors if failure mode otherwise unspecified. /// Return errors if failure mode otherwise unspecified.
#[default] #[default]
@ -69,6 +69,7 @@ pub enum CacheFailure {
} }
/// Configuration SQL and other parameters for a [`CacheDB`]. /// Configuration SQL and other parameters for a [`CacheDB`].
#[derive(Debug)]
pub struct CacheDBConfiguration { pub struct CacheDBConfiguration {
/// SQL to run for a new database. /// SQL to run for a new database.
pub table_initializer: &'static str, pub table_initializer: &'static str,
@ -98,6 +99,7 @@ impl CacheDBConfiguration {
} }
} }
#[derive(Debug)]
enum ConnectionState { enum ConnectionState {
Connected(Connection), Connected(Connection),
Blackhole, Blackhole,
@ -106,7 +108,7 @@ enum ConnectionState {
/// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// A cache database that eagerly initializes itself off-thread, preventing initialization operations
/// from blocking the main thread. /// from blocking the main thread.
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct CacheDB { pub struct CacheDB {
// TODO(mmastrac): We can probably simplify our thread-safe implementation here // TODO(mmastrac): We can probably simplify our thread-safe implementation here
conn: Arc<Mutex<OnceCell<ConnectionState>>>, conn: Arc<Mutex<OnceCell<ConnectionState>>>,
@ -470,7 +472,7 @@ mod tests {
}; };
static TEST_DB_BLACKHOLE: CacheDBConfiguration = CacheDBConfiguration { static TEST_DB_BLACKHOLE: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: "syntax error", // intentially cause an error table_initializer: "syntax error", // intentionally cause an error
on_version_change: "", on_version_change: "",
preheat_queries: &[], preheat_queries: &[],
on_failure: CacheFailure::Blackhole, on_failure: CacheFailure::Blackhole,

8
cli/cache/caches.rs vendored
View file

@ -48,9 +48,13 @@ impl Caches {
cell cell
.get_or_init(|| { .get_or_init(|| {
if let Some(path) = path { if let Some(path) = path {
CacheDB::from_path(config, path, crate::version::deno()) CacheDB::from_path(
config,
path,
crate::version::DENO_VERSION_INFO.deno,
)
} else { } else {
CacheDB::in_memory(config, crate::version::deno()) CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
} }
}) })
.clone() .clone()

View file

@ -1,10 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_runtime::code_cache; use deno_runtime::code_cache;
use deno_runtime::deno_webstorage::rusqlite::params; use deno_runtime::deno_webstorage::rusqlite::params;
use crate::worker::CliCodeCache;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash; use super::cache_db::CacheDBHash;
@ -82,6 +86,12 @@ impl CodeCache {
} }
} }
impl CliCodeCache for CodeCache {
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
self
}
}
impl code_cache::CodeCache for CodeCache { impl code_cache::CodeCache for CodeCache {
fn get_sync( fn get_sync(
&self, &self,

2
cli/cache/common.rs vendored
View file

@ -12,7 +12,7 @@ impl FastInsecureHasher {
pub fn new_deno_versioned() -> Self { pub fn new_deno_versioned() -> Self {
let mut hasher = Self::new_without_deno_version(); let mut hasher = Self::new_without_deno_version();
hasher.write_str(crate::version::deno()); hasher.write_str(crate::version::DENO_VERSION_INFO.deno);
hasher hasher
} }

View file

@ -126,9 +126,9 @@ impl DenoDir {
self.root.join("registries") self.root.join("registries")
} }
/// Path to the dependencies cache folder. /// Path to the remote cache folder.
pub fn deps_folder_path(&self) -> PathBuf { pub fn remote_folder_path(&self) -> PathBuf {
self.root.join("deps") self.root.join("remote")
} }
/// Path to the origin data cache folder. /// Path to the origin data cache folder.

201
cli/cache/emit.rs vendored
View file

@ -5,31 +5,26 @@ use std::path::PathBuf;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::unsync::sync::AtomicFlag;
use serde::Deserialize;
use serde::Serialize;
use super::DiskCache; use super::DiskCache;
use super::FastInsecureHasher;
#[derive(Debug, Deserialize, Serialize)]
struct EmitMetadata {
pub source_hash: u64,
pub emit_hash: u64,
}
/// The cache that stores previously emitted files. /// The cache that stores previously emitted files.
#[derive(Clone)] #[derive(Debug)]
pub struct EmitCache { pub struct EmitCache {
disk_cache: DiskCache, disk_cache: DiskCache,
cli_version: &'static str, emit_failed_flag: AtomicFlag,
file_serializer: EmitFileSerializer,
} }
impl EmitCache { impl EmitCache {
pub fn new(disk_cache: DiskCache) -> Self { pub fn new(disk_cache: DiskCache) -> Self {
Self { Self {
disk_cache, disk_cache,
cli_version: crate::version::deno(), emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer {
cli_version: crate::version::DENO_VERSION_INFO.deno,
},
} }
} }
@ -45,38 +40,12 @@ impl EmitCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let meta_filename = self.get_meta_filename(specifier)?;
let emit_filename = self.get_emit_filename(specifier)?; let emit_filename = self.get_emit_filename(specifier)?;
let bytes = self.disk_cache.get(&emit_filename).ok()?;
// load and verify the meta data file is for this source and CLI version self
let bytes = self.disk_cache.get(&meta_filename).ok()?; .file_serializer
let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?; .deserialize(bytes, expected_source_hash)
if meta.source_hash != expected_source_hash {
return None;
}
// load and verify the emit is for the meta data
let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
if meta.emit_hash != compute_emit_hash(&emit_bytes, self.cli_version) {
return None;
}
// everything looks good, return it
Some(emit_bytes)
}
/// Gets the filepath which stores the emit.
pub fn get_emit_filepath(
&self,
specifier: &ModuleSpecifier,
) -> Option<PathBuf> {
Some(
self
.disk_cache
.location
.join(self.get_emit_filename(specifier)?),
)
} }
/// Sets the emit code in the cache. /// Sets the emit code in the cache.
@ -87,12 +56,10 @@ impl EmitCache {
code: &[u8], code: &[u8],
) { ) {
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) { if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
// should never error here, but if it ever does don't fail // might error in cases such as a readonly file system
if cfg!(debug_assertions) { log::debug!("Error saving emit data ({}): {}", specifier, err);
panic!("Error saving emit data ({specifier}): {err}"); // assume the cache can't be written to and disable caching to it
} else { self.emit_failed_flag.raise();
log::debug!("Error saving emit data({}): {}", specifier, err);
}
} }
} }
@ -102,34 +69,20 @@ impl EmitCache {
source_hash: u64, source_hash: u64,
code: &[u8], code: &[u8],
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let meta_filename = self if self.emit_failed_flag.is_raised() {
.get_meta_filename(specifier) log::debug!("Skipped emit cache save of {}", specifier);
.ok_or_else(|| anyhow!("Could not get meta filename."))?; return Ok(());
}
let emit_filename = self let emit_filename = self
.get_emit_filename(specifier) .get_emit_filename(specifier)
.ok_or_else(|| anyhow!("Could not get emit filename."))?; .ok_or_else(|| anyhow!("Could not get emit filename."))?;
let cache_data = self.file_serializer.serialize(code, source_hash);
// save the metadata self.disk_cache.set(&emit_filename, &cache_data)?;
let metadata = EmitMetadata {
source_hash,
emit_hash: compute_emit_hash(code, self.cli_version),
};
self
.disk_cache
.set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
// save the emit source
self.disk_cache.set(&emit_filename, code)?;
Ok(()) Ok(())
} }
fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self
.disk_cache
.get_cache_filename_with_extension(specifier, "meta")
}
fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> { fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self self
.disk_cache .disk_cache
@ -137,15 +90,69 @@ impl EmitCache {
} }
} }
fn compute_emit_hash(bytes: &[u8], cli_version: &str) -> u64 { const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
// it's ok to use an insecure hash here because
// if someone can change the emit source then they #[derive(Debug)]
// can also change the version hash struct EmitFileSerializer {
FastInsecureHasher::new_without_deno_version() // use cli_version param instead cli_version: &'static str,
.write(bytes) }
// emit should not be re-used between cli versions
.write_str(cli_version) impl EmitFileSerializer {
.finish() pub fn deserialize(
&self,
mut bytes: Vec<u8>,
expected_source_hash: u64,
) -> Option<String> {
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
let (content, last_line) = bytes.split_at(last_newline_index);
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
let hashes = String::from_utf8_lossy(hashes);
let (source_hash, emit_hash) = hashes.split_once(',')?;
// verify the meta data file is for this source and CLI version
let source_hash = source_hash.parse::<u64>().ok()?;
if source_hash != expected_source_hash {
return None;
}
let emit_hash = emit_hash.parse::<u64>().ok()?;
// prevent using an emit from a different cli version or emits that were tampered with
if emit_hash != self.compute_emit_hash(content) {
return None;
}
// everything looks good, truncate and return it
bytes.truncate(content.len());
String::from_utf8(bytes).ok()
}
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
let source_hash = source_hash.to_string();
let emit_hash = self.compute_emit_hash(code).to_string();
let capacity = code.len()
+ LAST_LINE_PREFIX.len()
+ source_hash.len()
+ 1
+ emit_hash.len();
let mut cache_data = Vec::with_capacity(capacity);
cache_data.extend(code);
cache_data.extend(LAST_LINE_PREFIX.as_bytes());
cache_data.extend(source_hash.as_bytes());
cache_data.push(b',');
cache_data.extend(emit_hash.as_bytes());
debug_assert_eq!(cache_data.len(), capacity);
cache_data
}
fn compute_emit_hash(&self, bytes: &[u8]) -> u64 {
// it's ok to use an insecure hash here because
// if someone can change the emit source then they
// can also change the version hash
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
.write(bytes)
// emit should not be re-used between cli versions
.write_str(self.cli_version)
.finish()
}
} }
#[cfg(test)] #[cfg(test)]
@ -160,10 +167,11 @@ mod test {
let disk_cache = DiskCache::new(temp_dir.path().as_path()); let disk_cache = DiskCache::new(temp_dir.path().as_path());
let cache = EmitCache { let cache = EmitCache {
disk_cache: disk_cache.clone(), disk_cache: disk_cache.clone(),
cli_version: "1.0.0", file_serializer: EmitFileSerializer {
cli_version: "1.0.0",
},
emit_failed_flag: Default::default(),
}; };
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
let specifier1 = let specifier1 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
@ -180,18 +188,18 @@ mod test {
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash // providing the correct source hash
assert_eq!( assert_eq!(
cache.get_emit_code(&specifier1, 10).map(to_string), cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()), Some(emit_code1.clone()),
); );
assert_eq!( assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
cache.get_emit_code(&specifier2, 2).map(to_string),
Some(emit_code2)
);
// try changing the cli version (should not load previous ones) // try changing the cli version (should not load previous ones)
let cache = EmitCache { let cache = EmitCache {
disk_cache: disk_cache.clone(), disk_cache: disk_cache.clone(),
cli_version: "2.0.0", file_serializer: EmitFileSerializer {
cli_version: "2.0.0",
},
emit_failed_flag: Default::default(),
}; };
assert_eq!(cache.get_emit_code(&specifier1, 10), None); assert_eq!(cache.get_emit_code(&specifier1, 10), None);
cache.set_emit_code(&specifier1, 5, emit_code1.as_bytes()); cache.set_emit_code(&specifier1, 5, emit_code1.as_bytes());
@ -199,20 +207,17 @@ mod test {
// recreating the cache should still load the data because the CLI version is the same // recreating the cache should still load the data because the CLI version is the same
let cache = EmitCache { let cache = EmitCache {
disk_cache, disk_cache,
cli_version: "2.0.0", file_serializer: EmitFileSerializer {
cli_version: "2.0.0",
},
emit_failed_flag: Default::default(),
}; };
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
cache.get_emit_code(&specifier1, 5).map(to_string),
Some(emit_code1)
);
// adding when already exists should not cause issue // adding when already exists should not cause issue
let emit_code3 = "asdf".to_string(); let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
cache.get_emit_code(&specifier1, 20).map(to_string),
Some(emit_code3)
);
} }
} }

165
cli/cache/mod.rs vendored
View file

@ -1,13 +1,16 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use crate::file_fetcher::FetchNoFollowOptions; use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect; use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::futures; use deno_core::futures;
@ -17,7 +20,9 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse; use deno_graph::source::LoadResponse;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -62,12 +67,8 @@ pub const CACHE_PERM: u32 = 0o644;
pub struct RealDenoCacheEnv; pub struct RealDenoCacheEnv;
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv { impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Option<Vec<u8>>> { fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
match std::fs::read(path) { std::fs::read(path)
Ok(s) => Ok(Some(s)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
} }
fn atomic_write_file( fn atomic_write_file(
@ -78,6 +79,14 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
atomic_write_file_with_retries(path, bytes, CACHE_PERM) atomic_write_file_with_retries(path, bytes, CACHE_PERM)
} }
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
crate::util::fs::canonicalize_path(path)
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(path)
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> { fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
match std::fs::metadata(path) { match std::fs::metadata(path) {
Ok(metadata) => Ok(Some( Ok(metadata) => Ok(Some(
@ -97,43 +106,111 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
} }
} }
#[derive(Debug, Clone)]
pub struct DenoCacheEnvFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
self
.0
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn atomic_write_file(
&self,
path: &Path,
bytes: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.0,
write_mode: CACHE_PERM,
},
path,
bytes,
)
}
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
self
.0
.mkdir_sync(path, true, None)
.map_err(|e| e.into_io_error())
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
self
.0
.stat_sync(path)
.map(|stat| {
stat
.mtime
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
})
.map_err(|e| e.into_io_error())
}
fn is_file(&self, path: &Path) -> bool {
self.0.is_file_sync(path)
}
fn time_now(&self) -> SystemTime {
SystemTime::now()
}
}
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>; pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>; pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
pub type LocalLspHttpCache = pub type LocalLspHttpCache =
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>; deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
pub use deno_cache_dir::HttpCache; pub use deno_cache_dir::HttpCache;
pub struct FetchCacherOptions {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`.
pub is_deno_publish: bool,
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs. /// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher { pub struct FetchCacher {
emit_cache: EmitCache, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, permissions: PermissionsContainer,
is_deno_publish: bool,
cache_info_enabled: bool, cache_info_enabled: bool,
} }
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
emit_cache: EmitCache,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, options: FetchCacherOptions,
) -> Self { ) -> Self {
Self { Self {
emit_cache,
file_fetcher, file_fetcher,
file_header_overrides, fs,
global_http_cache, global_http_cache,
npm_resolver, in_npm_pkg_checker,
module_info_cache, module_info_cache,
permissions, file_header_overrides: options.file_header_overrides,
permissions: options.permissions,
is_deno_publish: options.is_deno_publish,
cache_info_enabled: false, cache_info_enabled: false,
} }
} }
@ -144,15 +221,7 @@ impl FetchCacher {
self.cache_info_enabled = true; self.cache_info_enabled = true;
} }
// DEPRECATED: Where the file is stored and how it's stored should be an implementation /// Only use this for `deno info`.
// detail of the cache.
//
// todo(dsheret): remove once implementing
// * https://github.com/denoland/deno/issues/17707
// * https://github.com/denoland/deno/issues/17703
#[deprecated(
note = "There should not be a way to do this because the file may not be cached at a local path in the future."
)]
fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> { fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic // TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
// modules // modules
@ -179,15 +248,7 @@ impl Loader for FetchCacher {
#[allow(deprecated)] #[allow(deprecated)]
let local = self.get_local_path(specifier)?; let local = self.get_local_path(specifier)?;
if local.is_file() { if local.is_file() {
let emit = self Some(CacheInfo { local: Some(local) })
.emit_cache
.get_emit_filepath(specifier)
.filter(|p| p.is_file());
Some(CacheInfo {
local: Some(local),
emit,
map: None,
})
} else { } else {
None None
} }
@ -208,19 +269,35 @@ impl Loader for FetchCacher {
// symlinked to `/my-project-2/node_modules`), so first we checked if the path // symlinked to `/my-project-2/node_modules`), so first we checked if the path
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare // is in a node_modules dir to avoid needlessly canonicalizing, then now compare
// against the canonicalized specifier. // against the canonicalized specifier.
let specifier = let specifier = crate::node::resolve_specifier_into_node_modules(
crate::node::resolve_specifier_into_node_modules(specifier); specifier,
if self.npm_resolver.in_npm_package(&specifier) { self.fs.as_ref(),
);
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Box::pin(futures::future::ready(Ok(Some( return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External { specifier }, LoadResponse::External { specifier },
)))); ))));
} }
} }
if self.is_deno_publish
&& matches!(specifier.scheme(), "http" | "https")
&& !specifier.as_str().starts_with(jsr_url().as_str())
{
// mark non-JSR remote modules as external so we don't need --allow-import
// permissions as these will error out later when publishing
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let file_header_overrides = self.file_header_overrides.clone(); let file_header_overrides = self.file_header_overrides.clone();
let permissions = self.permissions.clone(); let permissions = self.permissions.clone();
let specifier = specifier.clone(); let specifier = specifier.clone();
let is_statically_analyzable = !options.was_dynamic_root;
async move { async move {
let maybe_cache_setting = match options.cache_setting { let maybe_cache_setting = match options.cache_setting {
@ -239,7 +316,12 @@ impl Loader for FetchCacher {
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &permissions, permissions: if is_statically_analyzable {
FetchPermissionsOptionRef::StaticContainer(&permissions)
} else {
FetchPermissionsOptionRef::DynamicContainer(&permissions)
},
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(), maybe_cache_setting: maybe_cache_setting.as_ref(),
}, },
@ -293,6 +375,7 @@ impl Loader for FetchCacher {
fn cache_module_info( fn cache_module_info(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<[u8]>, source: &Arc<[u8]>,
module_info: &deno_graph::ModuleInfo, module_info: &deno_graph::ModuleInfo,
) { ) {
@ -300,7 +383,7 @@ impl Loader for FetchCacher {
let source_hash = CacheDBHash::from_source(source); let source_hash = CacheDBHash::from_source(source);
let result = self.module_info_cache.set_module_info( let result = self.module_info_cache.set_module_info(
specifier, specifier,
MediaType::from_specifier(specifier), media_type,
source_hash, source_hash,
module_info, module_info,
); );

View file

@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
/// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable
/// performance improvement because when it exists we can skip parsing a module for /// performance improvement because when it exists we can skip parsing a module for
/// deno_graph. /// deno_graph.
#[derive(Debug)]
pub struct ModuleInfoCache { pub struct ModuleInfoCache {
conn: CacheDB, conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
} }
impl ModuleInfoCache { impl ModuleInfoCache {
#[cfg(test)] #[cfg(test)]
pub fn new_in_memory(version: &'static str) -> Self { pub fn new_in_memory(
Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) version: &'static str,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self::new(
CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version),
parsed_source_cache,
)
} }
pub fn new(conn: CacheDB) -> Self { pub fn new(
Self { conn } conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self {
conn,
parsed_source_cache,
}
} }
/// Useful for testing: re-create this cache DB with a different current version. /// Useful for testing: re-create this cache DB with a different current version.
@ -63,6 +77,7 @@ impl ModuleInfoCache {
pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self {
Self { Self {
conn: self.conn.recreate_with_version(version), conn: self.conn.recreate_with_version(version),
parsed_source_cache: self.parsed_source_cache,
} }
} }
@ -113,13 +128,10 @@ impl ModuleInfoCache {
Ok(()) Ok(())
} }
pub fn as_module_analyzer<'a>( pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer {
&'a self,
parsed_source_cache: &'a Arc<ParsedSourceCache>,
) -> ModuleInfoCacheModuleAnalyzer<'a> {
ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer {
module_info_cache: self, module_info_cache: self,
parsed_source_cache, parsed_source_cache: &self.parsed_source_cache,
} }
} }
} }
@ -129,6 +141,84 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> {
parsed_source_cache: &'a Arc<ParsedSourceCache>, parsed_source_cache: &'a Arc<ParsedSourceCache>,
} }
impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
fn load_cached_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
) -> Option<ModuleInfo> {
match self.module_info_cache.get_module_info(
specifier,
media_type,
source_hash,
) {
Ok(Some(info)) => Some(info),
Ok(None) => None,
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
None
}
}
}
fn save_module_info_to_cache(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
module_info: &ModuleInfo,
) {
if let Err(err) = self.module_info_cache.set_module_info(
specifier,
media_type,
source_hash,
module_info,
) {
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
}
pub fn analyze_sync(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<str>,
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache
let source_hash = CacheDBHash::from_source(source);
if let Some(info) =
self.load_cached_module_info(specifier, media_type, source_hash)
{
return Ok(info);
}
// otherwise, get the module info from the parsed source cache
let parser = self.parsed_source_cache.as_capturing_parser();
let analyzer = ParserModuleAnalyzer::new(&parser);
let module_info =
analyzer.analyze_sync(specifier, source.clone(), media_type)?;
// then attempt to cache it
self.save_module_info_to_cache(
specifier,
media_type,
source_hash,
&module_info,
);
Ok(module_info)
}
}
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
async fn analyze( async fn analyze(
@ -139,20 +229,10 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache // attempt to load from the cache
let source_hash = CacheDBHash::from_source(&source); let source_hash = CacheDBHash::from_source(&source);
match self.module_info_cache.get_module_info( if let Some(info) =
specifier, self.load_cached_module_info(specifier, media_type, source_hash)
media_type, {
source_hash, return Ok(info);
) {
Ok(Some(info)) => return Ok(info),
Ok(None) => {}
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
}
} }
// otherwise, get the module info from the parsed source cache // otherwise, get the module info from the parsed source cache
@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
.unwrap()?; .unwrap()?;
// then attempt to cache it // then attempt to cache it
if let Err(err) = self.module_info_cache.set_module_info( self.save_module_info_to_cache(
specifier, specifier,
media_type, media_type,
source_hash, source_hash,
&module_info, &module_info,
) { );
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
Ok(module_info) Ok(module_info)
} }
@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
Tsx => 11, Tsx => 11,
Json => 12, Json => 12,
Wasm => 13, Wasm => 13,
TsBuildInfo => 14, Css => 14,
SourceMap => 15, SourceMap => 15,
Unknown => 16, Unknown => 16,
} }
@ -217,7 +291,7 @@ mod test {
#[test] #[test]
pub fn module_info_cache_general_use() { pub fn module_info_cache_general_use() {
let cache = ModuleInfoCache::new_in_memory("1.0.0"); let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default());
let specifier1 = let specifier1 =
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
let specifier2 = let specifier2 =

View file

@ -7,9 +7,9 @@ use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_graph::CapturingModuleParser; use deno_graph::CapturingEsParser;
use deno_graph::DefaultModuleParser; use deno_graph::DefaultEsParser;
use deno_graph::ModuleParser; use deno_graph::EsParser;
use deno_graph::ParseOptions; use deno_graph::ParseOptions;
use deno_graph::ParsedSourceStore; use deno_graph::ParsedSourceStore;
@ -46,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> {
} }
} }
#[derive(Default)] #[derive(Debug, Default)]
pub struct ParsedSourceCache { pub struct ParsedSourceCache {
sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>, sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>,
} }
@ -57,12 +57,11 @@ impl ParsedSourceCache {
module: &deno_graph::JsModule, module: &deno_graph::JsModule,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> { ) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
let parser = self.as_capturing_parser(); let parser = self.as_capturing_parser();
// this will conditionally parse because it's using a CapturingModuleParser // this will conditionally parse because it's using a CapturingEsParser
parser.parse_module(ParseOptions { parser.parse_program(ParseOptions {
specifier: &module.specifier, specifier: &module.specifier,
source: module.source.clone(), source: module.source.clone(),
media_type: module.media_type, media_type: module.media_type,
// don't bother enabling because this method is currently only used for vendoring
scope_analysis: false, scope_analysis: false,
}) })
} }
@ -86,10 +85,9 @@ impl ParsedSourceCache {
specifier, specifier,
source, source,
media_type, media_type,
// don't bother enabling because this method is currently only used for emitting
scope_analysis: false, scope_analysis: false,
}; };
DefaultModuleParser.parse_module(options) DefaultEsParser.parse_program(options)
} }
/// Frees the parsed source from memory. /// Frees the parsed source from memory.
@ -99,8 +97,8 @@ impl ParsedSourceCache {
/// Creates a parser that will reuse a ParsedSource from the store /// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse. /// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingModuleParser { pub fn as_capturing_parser(&self) -> CapturingEsParser {
CapturingModuleParser::new(None, self) CapturingEsParser::new(None, self)
} }
} }

View file

@ -1,6 +1,10 @@
disallowed-methods = [ disallowed-methods = [
{ path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" }, { path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" },
{ path = "std::process::exit", reason = "use deno_runtime::exit instead" },
] ]
disallowed-types = [ disallowed-types = [
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
] ]
ignore-interior-mutability = [
"lsp_types::Uri",
]

View file

@ -3,22 +3,29 @@
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::resolver::CjsTracker;
use deno_ast::ModuleKind;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_ast::SourceRange;
use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult; use deno_ast::TranspileResult;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::ModuleCodeBytes;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::Module; use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug)]
pub struct Emitter { pub struct Emitter {
emit_cache: EmitCache, cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options: transpile_and_emit_options:
Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>, Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>,
@ -28,7 +35,8 @@ pub struct Emitter {
impl Emitter { impl Emitter {
pub fn new( pub fn new(
emit_cache: EmitCache, cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions, transpile_options: deno_ast::TranspileOptions,
emit_options: deno_ast::EmitOptions, emit_options: deno_ast::EmitOptions,
@ -40,6 +48,7 @@ impl Emitter {
hasher.finish() hasher.finish()
}; };
Self { Self {
cjs_tracker,
emit_cache, emit_cache,
parsed_source_cache, parsed_source_cache,
transpile_and_emit_options: Arc::new((transpile_options, emit_options)), transpile_and_emit_options: Arc::new((transpile_options, emit_options)),
@ -57,20 +66,19 @@ impl Emitter {
continue; continue;
}; };
let is_emittable = matches!( if module.media_type.is_emittable() {
module.media_type,
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx
);
if is_emittable {
futures.push( futures.push(
self self
.emit_parsed_source( .emit_parsed_source(
&module.specifier, &module.specifier,
module.media_type, module.media_type,
ModuleKind::from_is_cjs(
self.cjs_tracker.is_cjs_with_known_is_script(
&module.specifier,
module.media_type,
module.is_script,
)?,
),
&module.source, &module.source,
) )
.boxed_local(), .boxed_local(),
@ -89,9 +97,10 @@ impl Emitter {
pub fn maybe_cached_emit( pub fn maybe_cached_emit(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &str, source: &str,
) -> Option<Vec<u8>> { ) -> Option<String> {
let source_hash = self.get_source_hash(source); let source_hash = self.get_source_hash(module_kind, source);
self.emit_cache.get_emit_code(specifier, source_hash) self.emit_cache.get_emit_code(specifier, source_hash)
} }
@ -99,25 +108,27 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
let transpile_and_emit_options = let transpile_and_emit_options =
self.transpile_and_emit_options.clone(); self.transpile_and_emit_options.clone();
let transpile_result = deno_core::unsync::spawn_blocking({ let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source = source.clone(); let source = source.clone();
move || -> Result<_, AnyError> { move || -> Result<_, AnyError> {
EmitParsedSourceHelper::transpile( EmitParsedSourceHelper::transpile(
&parsed_source_cache, &parsed_source_cache,
&specifier, &specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&transpile_and_emit_options.0, &transpile_and_emit_options.0,
&transpile_and_emit_options.1, &transpile_and_emit_options.1,
) )
@ -125,11 +136,12 @@ impl Emitter {
}) })
.await .await
.unwrap()?; .unwrap()?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -138,26 +150,29 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpile_result = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache, &self.parsed_source_cache,
specifier, specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&self.transpile_and_emit_options.0, &self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1, &self.transpile_and_emit_options.1,
)?; )?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -187,10 +202,20 @@ impl Emitter {
// this statement is probably wrong) // this statement is probably wrong)
let mut options = self.transpile_and_emit_options.1.clone(); let mut options = self.transpile_and_emit_options.1.clone();
options.source_map = SourceMapOption::None; options.source_map = SourceMapOption::None;
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)?;
let transpiled_source = parsed_source let transpiled_source = parsed_source
.transpile(&self.transpile_and_emit_options.0, &options)? .transpile(
.into_source() &self.transpile_and_emit_options.0,
.into_string()?; &deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
},
&options,
)?
.into_source();
Ok(transpiled_source.text) Ok(transpiled_source.text)
} }
MediaType::JavaScript MediaType::JavaScript
@ -201,7 +226,7 @@ impl Emitter {
| MediaType::Dcts | MediaType::Dcts
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => { | MediaType::Unknown => {
// clear this specifier from the parsed source cache as it's now out of date // clear this specifier from the parsed source cache as it's now out of date
@ -214,16 +239,17 @@ impl Emitter {
/// A hashing function that takes the source code and uses the global emit /// A hashing function that takes the source code and uses the global emit
/// options then generates a string hash which can be stored to /// options then generates a string hash which can be stored to
/// determine if the cached emit is valid or not. /// determine if the cached emit is valid or not.
fn get_source_hash(&self, source_text: &str) -> u64 { fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 {
FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash
.write_str(source_text) .write_str(source_text)
.write_u64(self.transpile_and_emit_options_hash) .write_u64(self.transpile_and_emit_options_hash)
.write_hashable(module_kind)
.finish() .finish()
} }
} }
enum PreEmitResult { enum PreEmitResult {
Cached(ModuleCodeBytes), Cached(String),
NotCached { source_hash: u64 }, NotCached { source_hash: u64 },
} }
@ -234,14 +260,15 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn pre_emit_parsed_source( pub fn pre_emit_parsed_source(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> PreEmitResult { ) -> PreEmitResult {
let source_hash = self.0.get_source_hash(source); let source_hash = self.0.get_source_hash(module_kind, source);
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
{ {
PreEmitResult::Cached(emit_code.into_boxed_slice().into()) PreEmitResult::Cached(emit_code)
} else { } else {
PreEmitResult::NotCached { source_hash } PreEmitResult::NotCached { source_hash }
} }
@ -250,24 +277,24 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn transpile( pub fn transpile(
parsed_source_cache: &ParsedSourceCache, parsed_source_cache: &ParsedSourceCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<TranspileResult, AnyError> { ) -> Result<String, AnyError> {
// nothing else needs the parsed source at this point, so remove from // nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned // the cache in order to not transpile owned
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
.remove_or_parse_module(specifier, source, media_type)?; .remove_or_parse_module(specifier, source, media_type)?;
Ok(parsed_source.transpile(transpile_options, emit_options)?) ensure_no_import_assertion(&parsed_source)?;
} let transpile_result = parsed_source.transpile(
transpile_options,
pub fn post_emit_parsed_source( &TranspileModuleOptions {
&self, module_kind: Some(module_kind),
specifier: &ModuleSpecifier, },
transpile_result: TranspileResult, emit_options,
source_hash: u64, )?;
) -> ModuleCodeBytes {
let transpiled_source = match transpile_result { let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source, TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => { TranspileResult::Cloned(source) => {
@ -276,11 +303,89 @@ impl<'a> EmitParsedSourceHelper<'a> {
} }
}; };
debug_assert!(transpiled_source.source_map.is_none()); debug_assert!(transpiled_source.source_map.is_none());
Ok(transpiled_source.text)
}
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpiled_source: &str,
source_hash: u64,
) {
self.0.emit_cache.set_emit_code( self.0.emit_cache.set_emit_code(
specifier, specifier,
source_hash, source_hash,
&transpiled_source.source, transpiled_source.as_bytes(),
); );
transpiled_source.source.into_boxed_slice().into()
} }
} }
// todo(dsherret): this is a temporary measure until we have swc erroring for this
fn ensure_no_import_assertion(
parsed_source: &deno_ast::ParsedSource,
) -> Result<(), AnyError> {
fn has_import_assertion(text: &str) -> bool {
// good enough
text.contains(" assert ") && !text.contains(" with ")
}
fn create_err(
parsed_source: &deno_ast::ParsedSource,
range: SourceRange,
) -> AnyError {
let text_info = parsed_source.text_info_lazy();
let loc = text_info.line_and_column_display(range.start);
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
msg.push_str("\n\n");
msg.push_str(range.text_fast(text_info));
msg.push_str("\n\n");
msg.push_str(&format!(
" at {}:{}:{}\n",
parsed_source.specifier(),
loc.line_number,
loc.column_number,
));
deno_core::anyhow::anyhow!("{}", msg)
}
let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {
return Ok(());
};
for item in &module.body {
match item {
deno_ast::swc::ast::ModuleItem::ModuleDecl(decl) => match decl {
deno_ast::swc::ast::ModuleDecl::Import(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportAll(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportNamed(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultExpr(_)
| deno_ast::swc::ast::ModuleDecl::TsImportEquals(_)
| deno_ast::swc::ast::ModuleDecl::TsExportAssignment(_)
| deno_ast::swc::ast::ModuleDecl::TsNamespaceExport(_) => {}
},
deno_ast::swc::ast::ModuleItem::Stmt(_) => {}
}
}
Ok(())
}

View file

@ -17,7 +17,6 @@ use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError; use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use import_map::ImportMapError; use import_map::ImportMapError;
use std::fmt::Write;
fn get_import_map_error_class(_: &ImportMapError) -> &'static str { fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
"URIError" "URIError"
@ -30,7 +29,6 @@ fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str { fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
use deno_graph::JsrLoadError; use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError; use deno_graph::NpmLoadError;
use deno_graph::WorkspaceLoadError;
match err { match err {
ModuleGraphError::ResolutionError(err) ModuleGraphError::ResolutionError(err)
@ -40,6 +38,7 @@ fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
ModuleGraphError::ModuleError(err) => match err { ModuleGraphError::ModuleError(err) => match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError", ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic), ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. } ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError", | ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => { ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
@ -72,10 +71,6 @@ fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
| JsrLoadError::PackageVersionNotFound(_) | JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound", | JsrLoadError::UnknownExport { .. } => "NotFound",
}, },
ModuleLoadError::Workspace(err) => match err {
WorkspaceLoadError::MemberInvalidExportPath { .. } => "TypeError",
WorkspaceLoadError::MissingMemberExports { .. } => "NotFound",
},
}, },
}, },
} }
@ -94,6 +89,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
} }
} }
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str { pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e) deno_runtime::errors::get_error_class_name(e)
.or_else(|| { .or_else(|| {
@ -112,17 +111,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
e.downcast_ref::<ResolutionError>() e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class) .map(get_resolution_error_class)
}) })
.unwrap_or_else(|| { .or_else(|| {
if cfg!(debug) { e.downcast_ref::<std::num::TryFromIntError>()
log::warn!( .map(get_try_from_int_error_class)
"Error '{}' contains boxed error of unknown type:{}",
e,
e.chain().fold(String::new(), |mut output, e| {
let _ = write!(output, "\n {e:?}");
output
})
);
}
"Error"
}) })
.unwrap_or("Error")
} }

View file

@ -1,15 +1,17 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::check_warn_tsconfig;
use crate::args::get_root_cert_store; use crate::args::get_root_cert_store;
use crate::args::CaData; use crate::args::CaData;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
use crate::args::PackageJsonInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver; use crate::args::StorageKeyResolver;
use crate::args::TsConfigType; use crate::args::TsConfigType;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::CodeCache; use crate::cache::CodeCache;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::EmitCache; use crate::cache::EmitCache;
@ -31,22 +33,30 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CliGraphResolver; use crate::resolver::CjsTracker;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliDenoResolver;
use crate::resolver::CliNodeResolver; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter; use crate::standalone::DenoCompileBinaryWriter;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector; use crate::tools::coverage::CoverageCollector;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
use crate::tools::run::hmr::HmrRunner; use crate::tools::run::hmr::HmrRunner;
use crate::tsc::TypeCheckingCjsTracker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
@ -55,21 +65,30 @@ use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use std::path::PathBuf; use std::path::PathBuf;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::FeatureChecker; use deno_core::FeatureChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use log::warn; use log::warn;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use std::future::Future; use std::future::Future;
use std::sync::Arc; use std::sync::Arc;
@ -111,7 +130,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider {
} }
} }
struct Deferred<T>(once_cell::unsync::OnceCell<T>); pub struct Deferred<T>(once_cell::unsync::OnceCell<T>);
impl<T> Default for Deferred<T> { impl<T> Default for Deferred<T> {
fn default() -> Self { fn default() -> Self {
@ -157,37 +176,42 @@ impl<T> Deferred<T> {
#[derive(Default)] #[derive(Default)]
struct CliFactoryServices { struct CliFactoryServices {
cli_options: Deferred<Arc<CliOptions>>, blob_store: Deferred<Arc<BlobStore>>,
deno_dir_provider: Deferred<Arc<DenoDirProvider>>,
caches: Deferred<Arc<Caches>>, caches: Deferred<Arc<Caches>>,
cjs_tracker: Deferred<Arc<CjsTracker>>,
cli_options: Deferred<Arc<CliOptions>>,
code_cache: Deferred<Arc<CodeCache>>,
deno_resolver: Deferred<Arc<CliDenoResolver>>,
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<FileFetcher>>, file_fetcher: Deferred<Arc<FileFetcher>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>, global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>, http_cache: Deferred<Arc<dyn HttpCache>>,
http_client_provider: Deferred<Arc<HttpClientProvider>>, http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<EmitCache>, in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
emitter: Deferred<Arc<Emitter>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>, main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>, maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>, module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
module_graph_creator: Deferred<Arc<ModuleGraphCreator>>, module_graph_creator: Deferred<Arc<ModuleGraphCreator>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>, module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>, node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>, node_resolver: Deferred<Arc<NodeResolver>>,
npm_cache_dir: Deferred<Arc<NpmCacheDir>>,
npm_req_resolver: Deferred<Arc<CliNpmReqResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>, npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>, parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>,
resolver: Deferred<Arc<CliResolver>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>, text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>, type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
code_cache: Deferred<Arc<CodeCache>>,
workspace_resolver: Deferred<Arc<WorkspaceResolver>>, workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
} }
@ -236,11 +260,7 @@ impl CliFactory {
} }
pub fn deno_dir_provider(&self) -> Result<&Arc<DenoDirProvider>, AnyError> { pub fn deno_dir_provider(&self) -> Result<&Arc<DenoDirProvider>, AnyError> {
self.services.deno_dir_provider.get_or_try_init(|| { Ok(&self.cli_options()?.deno_dir_provider)
Ok(Arc::new(DenoDirProvider::new(
self.cli_options()?.maybe_custom_root().clone(),
)))
})
} }
pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> { pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> {
@ -298,7 +318,7 @@ impl CliFactory {
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> { pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| { self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new( Ok(Arc::new(GlobalHttpCache::new(
self.deno_dir()?.deps_folder_path(), self.deno_dir()?.remote_folder_path(),
crate::cache::RealDenoCacheEnv, crate::cache::RealDenoCacheEnv,
))) )))
}) })
@ -309,8 +329,11 @@ impl CliFactory {
let global_cache = self.global_http_cache()?.clone(); let global_cache = self.global_http_cache()?.clone();
match self.cli_options()?.vendor_dir_path() { match self.cli_options()?.vendor_dir_path() {
Some(local_path) => { Some(local_path) => {
let local_cache = let local_cache = LocalHttpCache::new(
LocalHttpCache::new(local_path.clone(), global_cache); local_path.clone(),
global_cache,
deno_cache_dir::GlobalToLocalCopy::Allow,
);
Ok(Arc::new(local_cache)) Ok(Arc::new(local_cache))
} }
None => Ok(global_cache), None => Ok(global_cache),
@ -345,74 +368,127 @@ impl CliFactory {
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
} }
pub fn in_npm_pkg_checker(
&self,
) -> Result<&Arc<dyn InNpmPackageChecker>, AnyError> {
self.services.in_npm_pkg_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?;
let options = if cli_options.use_byonm() {
CreateInNpmPkgCheckerOptions::Byonm
} else {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.map(|p| p.as_path()),
},
)
};
Ok(create_in_npm_pkg_checker(options))
})
}
pub fn npm_cache_dir(&self) -> Result<&Arc<NpmCacheDir>, AnyError> {
self.services.npm_cache_dir.get_or_try_init(|| {
let fs = self.fs();
let global_path = self.deno_dir()?.npm_folder_path();
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
global_path,
cli_options.npmrc().get_all_known_registries_urls(),
)))
})
}
pub async fn npm_resolver( pub async fn npm_resolver(
&self, &self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
self self
.services .services
.npm_resolver .npm_resolver
.get_or_try_init_async(async { .get_or_try_init_async(
let fs = self.fs(); async {
let cli_options = self.cli_options()?; let fs = self.fs();
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. let cli_options = self.cli_options()?;
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_)) { create_cli_npm_resolver(if cli_options.use_byonm() {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions { CliNpmResolverCreateOptions::Byonm(
fs: fs.clone(), CliByonmNpmResolverCreateOptions {
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { fs: CliDenoResolverFs(fs.clone()),
Some(node_modules_path) => node_modules_path.to_path_buf(), pkg_json_resolver: self.pkg_json_resolver().clone(),
// path needs to be canonicalized for node resolution root_node_modules_dir: Some(
// (node_modules_dir_path above is already canonicalized) match cli_options.node_modules_dir_path() {
None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? Some(node_modules_path) => node_modules_path.to_path_buf(),
.join("node_modules"), // path needs to be canonicalized for node resolution
}), // (node_modules_dir_path above is already canonicalized)
}) None => canonicalize_path_maybe_not_exists(
} else { cli_options.initial_cwd(),
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { )?
snapshot: match cli_options.resolve_npm_resolution_snapshot()? { .join("node_modules"),
Some(snapshot) => { },
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) ),
}
None => match cli_options.maybe_lockfile() {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
}, },
}, )
maybe_lockfile: cli_options.maybe_lockfile().cloned(), } else {
fs: fs.clone(), CliNpmResolverCreateOptions::Managed(
http_client_provider: self.http_client_provider().clone(), CliManagedNpmResolverCreateOptions {
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
cache_setting: cli_options.cache_setting(), Some(snapshot) => {
text_only_progress_bar: self.text_only_progress_bar().clone(), CliNpmResolverManagedSnapshotOption::Specified(Some(
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), snapshot,
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace( ))
cli_options.workspace(), }
)), None => match cli_options.maybe_lockfile() {
npm_system_info: cli_options.npm_system_info(), Some(lockfile) => {
npmrc: cli_options.npmrc().clone(), CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lifecycle_scripts: cli_options.lifecycle_scripts_config(), lockfile.clone(),
)
}
None => {
CliNpmResolverManagedSnapshotOption::Specified(None)
}
},
},
maybe_lockfile: cli_options.maybe_lockfile().cloned(),
fs: fs.clone(),
http_client_provider: self.http_client_provider().clone(),
npm_cache_dir: self.npm_cache_dir()?.clone(),
cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.cloned(),
npm_install_deps_provider: Arc::new(
NpmInstallDepsProvider::from_workspace(
cli_options.workspace(),
),
),
npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
},
)
}) })
}).await .await
}.boxed_local()) }
.boxed_local(),
)
.await .await
} }
pub fn sloppy_imports_resolver( pub fn sloppy_imports_resolver(
&self, &self,
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> { ) -> Result<Option<&Arc<CliSloppyImportsResolver>>, AnyError> {
self self
.services .services
.sloppy_imports_resolver .sloppy_imports_resolver
.get_or_try_init(|| { .get_or_try_init(|| {
Ok( Ok(self.cli_options()?.unstable_sloppy_imports().then(|| {
self Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
.cli_options()? self.fs().clone(),
.unstable_sloppy_imports() )))
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))), }))
)
}) })
.map(|maybe| maybe.as_ref()) .map(|maybe| maybe.as_ref())
} }
@ -452,28 +528,47 @@ impl CliFactory {
.await .await
} }
pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> { pub async fn deno_resolver(&self) -> Result<&Arc<CliDenoResolver>, AnyError> {
self
.services
.deno_resolver
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliDenoResolver::new(DenoResolverOptions {
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_and_req_resolver: if cli_options.no_npm() {
None
} else {
Some(NodeAndNpmReqResolver {
node_resolver: self.node_resolver().await?.clone(),
npm_req_resolver: self.npm_req_resolver().await?.clone(),
})
},
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
workspace_resolver: self.workspace_resolver().await?.clone(),
is_byonm: cli_options.use_byonm(),
maybe_vendor_dir: cli_options.vendor_dir_path(),
})))
})
.await
}
pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> {
self self
.services .services
.resolver .resolver
.get_or_try_init_async( .get_or_try_init_async(
async { async {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions { Ok(Arc::new(CliResolver::new(CliResolverOptions {
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
node_resolver: Some(self.cli_node_resolver().await?.clone()),
npm_resolver: if cli_options.no_npm() { npm_resolver: if cli_options.no_npm() {
None None
} else { } else {
Some(self.npm_resolver().await?.clone()) Some(self.npm_resolver().await?.clone())
}, },
workspace_resolver: self.workspace_resolver().await?.clone(),
bare_node_builtins_enabled: cli_options bare_node_builtins_enabled: cli_options
.unstable_bare_node_builtins(), .unstable_bare_node_builtins(),
maybe_jsx_import_source_config: cli_options deno_resolver: self.deno_resolver().await?.clone(),
.workspace()
.to_maybe_jsx_import_source_config()?,
maybe_vendor_dir: cli_options.vendor_dir_path(),
}))) })))
} }
.boxed_local(), .boxed_local(),
@ -492,9 +587,9 @@ impl CliFactory {
.get_or_init(|| maybe_file_watcher_reporter) .get_or_init(|| maybe_file_watcher_reporter)
} }
pub fn emit_cache(&self) -> Result<&EmitCache, AnyError> { pub fn emit_cache(&self) -> Result<&Arc<EmitCache>, AnyError> {
self.services.emit_cache.get_or_try_init(|| { self.services.emit_cache.get_or_try_init(|| {
Ok(EmitCache::new(self.deno_dir()?.gen_cache.clone())) Ok(Arc::new(EmitCache::new(self.deno_dir()?.gen_cache.clone())))
}) })
} }
@ -502,6 +597,7 @@ impl CliFactory {
self.services.module_info_cache.get_or_try_init(|| { self.services.module_info_cache.get_or_try_init(|| {
Ok(Arc::new(ModuleInfoCache::new( Ok(Arc::new(ModuleInfoCache::new(
self.caches()?.dep_analysis_db(), self.caches()?.dep_analysis_db(),
self.parsed_source_cache().clone(),
))) )))
}) })
} }
@ -524,14 +620,13 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let ts_config_result = let ts_config_result =
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?; cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
if let Some(ignored_options) = ts_config_result.maybe_ignored_options { check_warn_tsconfig(&ts_config_result);
warn!("{}", ignored_options);
}
let (transpile_options, emit_options) = let (transpile_options, emit_options) =
crate::args::ts_config_to_transpile_and_emit_options( crate::args::ts_config_to_transpile_and_emit_options(
ts_config_result.ts_config, ts_config_result.ts_config,
)?; )?;
Ok(Arc::new(Emitter::new( Ok(Arc::new(Emitter::new(
self.cjs_tracker()?.clone(),
self.emit_cache()?.clone(), self.emit_cache()?.clone(),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
transpile_options, transpile_options,
@ -555,7 +650,13 @@ impl CliFactory {
async { async {
Ok(Arc::new(NodeResolver::new( Ok(Arc::new(NodeResolver::new(
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.npm_resolver().await?.clone().into_npm_resolver(), self.in_npm_pkg_checker()?.clone(),
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
))) )))
} }
.boxed_local(), .boxed_local(),
@ -573,19 +674,57 @@ impl CliFactory {
let caches = self.caches()?; let caches = self.caches()?;
let node_analysis_cache = let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db()); NodeAnalysisCache::new(caches.node_analysis_db());
let cjs_esm_analyzer = let node_resolver = self.node_resolver().await?.clone();
CliCjsCodeAnalyzer::new(node_analysis_cache, self.fs().clone()); let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
self.cjs_tracker()?.clone(),
self.fs().clone(),
Some(self.parsed_source_cache().clone()),
);
Ok(Arc::new(NodeCodeTranslator::new( Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer, cjs_esm_analyzer,
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.node_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
self.npm_resolver().await?.clone().into_npm_resolver(), node_resolver,
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
))) )))
}) })
.await .await
} }
pub async fn npm_req_resolver(
&self,
) -> Result<&Arc<CliNpmReqResolver>, AnyError> {
self
.services
.npm_req_resolver
.get_or_try_init_async(async {
let npm_resolver = self.npm_resolver().await?;
Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
fs: CliDenoResolverFs(self.fs().clone()),
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_resolver: self.node_resolver().await?.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
})))
})
.await
}
pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
self.services.pkg_json_resolver.get_or_init(|| {
Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new(
self.fs().clone(),
)))
})
}
pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> { pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> {
self self
.services .services
@ -594,6 +733,10 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(TypeChecker::new( Ok(Arc::new(TypeChecker::new(
self.caches()?.clone(), self.caches()?.clone(),
Arc::new(TypeCheckingCjsTracker::new(
self.cjs_tracker()?.clone(),
self.module_info_cache()?.clone(),
)),
cli_options.clone(), cli_options.clone(),
self.module_graph_builder().await?.clone(), self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(), self.node_resolver().await?.clone(),
@ -612,18 +755,20 @@ impl CliFactory {
.get_or_try_init_async(async { .get_or_try_init_async(async {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphBuilder::new( Ok(Arc::new(ModuleGraphBuilder::new(
cli_options.clone(),
self.caches()?.clone(), self.caches()?.clone(),
self.cjs_tracker()?.clone(),
cli_options.clone(),
self.file_fetcher()?.clone(),
self.fs().clone(), self.fs().clone(),
self.resolver().await?.clone(), self.global_http_cache()?.clone(),
self.npm_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(), self.maybe_file_watcher_reporter().clone(),
self.emit_cache()?.clone(), self.module_info_cache()?.clone(),
self.file_fetcher()?.clone(), self.npm_resolver().await?.clone(),
self.global_http_cache()?.clone(), self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.root_permissions_container()?.clone(),
))) )))
}) })
.await .await
@ -657,6 +802,7 @@ impl CliFactory {
Ok(Arc::new(MainModuleGraphContainer::new( Ok(Arc::new(MainModuleGraphContainer::new(
self.cli_options()?.clone(), self.cli_options()?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
self.root_permissions_container()?.clone(),
))) )))
}) })
.await .await
@ -693,25 +839,27 @@ impl CliFactory {
.await .await
} }
pub fn cjs_resolutions(&self) -> &Arc<CjsResolutionStore> { pub fn cjs_tracker(&self) -> Result<&Arc<CjsTracker>, AnyError> {
self.services.cjs_resolutions.get_or_init(Default::default) self.services.cjs_tracker.get_or_try_init(|| {
let options = self.cli_options()?;
Ok(Arc::new(CjsTracker::new(
self.in_npm_pkg_checker()?.clone(),
self.pkg_json_resolver().clone(),
IsCjsResolverOptions {
detect_cjs: options.detect_cjs(),
is_node_main: options.is_node_main(),
},
)))
})
} }
pub async fn cli_node_resolver( pub fn permission_desc_parser(
&self, &self,
) -> Result<&Arc<CliNodeResolver>, AnyError> { ) -> Result<&Arc<RuntimePermissionDescriptorParser>, AnyError> {
self self.services.permission_desc_parser.get_or_try_init(|| {
.services let fs = self.fs().clone();
.cli_node_resolver Ok(Arc::new(RuntimePermissionDescriptorParser::new(fs)))
.get_or_try_init_async(async { })
Ok(Arc::new(CliNodeResolver::new(
self.cjs_resolutions().clone(),
self.fs().clone(),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
)))
})
.await
} }
pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> { pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> {
@ -719,15 +867,10 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let mut checker = FeatureChecker::default(); let mut checker = FeatureChecker::default();
checker.set_exit_cb(Box::new(crate::unstable_exit_cb)); checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
checker.set_warn_cb(Box::new(crate::unstable_warn_cb));
if cli_options.legacy_unstable_flag() {
checker.enable_legacy_unstable();
checker.warn_on_legacy_unstable();
}
let unstable_features = cli_options.unstable_features(); let unstable_features = cli_options.unstable_features();
for (flag_name, _, _) in crate::UNSTABLE_GRANULAR_FLAGS { for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
if unstable_features.contains(&flag_name.to_string()) { if unstable_features.contains(&granular_flag.name.to_string()) {
checker.enable_feature(flag_name); checker.enable_feature(granular_flag.name);
} }
} }
@ -740,7 +883,10 @@ impl CliFactory {
) -> Result<DenoCompileBinaryWriter, AnyError> { ) -> Result<DenoCompileBinaryWriter, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new( Ok(DenoCompileBinaryWriter::new(
self.cjs_tracker()?,
self.cli_options()?,
self.deno_dir()?, self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?, self.file_fetcher()?,
self.http_client_provider(), self.http_client_provider(),
self.npm_resolver().await?.as_ref(), self.npm_resolver().await?.as_ref(),
@ -749,66 +895,87 @@ impl CliFactory {
)) ))
} }
pub fn root_permissions_container(
&self,
) -> Result<&PermissionsContainer, AnyError> {
self
.services
.root_permissions_container
.get_or_try_init(|| {
let desc_parser = self.permission_desc_parser()?.clone();
let permissions = Permissions::from_options(
desc_parser.as_ref(),
&self.cli_options()?.permissions_options(),
)?;
Ok(PermissionsContainer::new(desc_parser, permissions))
})
}
pub async fn create_cli_main_worker_factory( pub async fn create_cli_main_worker_factory(
&self, &self,
) -> Result<CliMainWorkerFactory, AnyError> { ) -> Result<CliMainWorkerFactory, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let fs = self.fs();
let node_resolver = self.node_resolver().await?; let node_resolver = self.node_resolver().await?;
let npm_resolver = self.npm_resolver().await?; let npm_resolver = self.npm_resolver().await?;
let fs = self.fs(); let cli_npm_resolver = self.npm_resolver().await?.clone();
let cli_node_resolver = self.cli_node_resolver().await?; let in_npm_pkg_checker = self.in_npm_pkg_checker()?;
let maybe_file_watcher_communicator = if cli_options.has_hmr() { let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap()) Some(self.watcher_communicator.clone().unwrap())
} else { } else {
None None
}; };
let node_code_translator = self.node_code_translator().await?;
let cjs_tracker = self.cjs_tracker()?.clone();
let pkg_json_resolver = self.pkg_json_resolver().clone();
let npm_req_resolver = self.npm_req_resolver().await?;
Ok(CliMainWorkerFactory::new( Ok(CliMainWorkerFactory::new(
StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(),
npm_resolver.clone(),
node_resolver.clone(),
self.blob_store().clone(), self.blob_store().clone(),
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
self.feature_checker()?.clone(),
fs.clone(),
maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(),
cli_options.maybe_lockfile().cloned(),
Box::new(CliModuleLoaderFactory::new( Box::new(CliModuleLoaderFactory::new(
cli_options, cli_options,
cjs_tracker,
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
} else { } else {
None None
}, },
self.emitter()?.clone(), self.emitter()?.clone(),
fs.clone(),
in_npm_pkg_checker.clone(),
self.main_module_graph_container().await?.clone(), self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
cli_node_resolver.clone(), node_code_translator.clone(),
node_resolver.clone(),
npm_req_resolver.clone(),
cli_npm_resolver.clone(),
NpmModuleLoader::new( NpmModuleLoader::new(
self.cjs_resolutions().clone(), self.cjs_tracker()?.clone(),
self.node_code_translator().await?.clone(),
fs.clone(), fs.clone(),
cli_node_resolver.clone(), node_code_translator.clone(),
), ),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
self.resolver().await?.clone(), self.resolver().await?.clone(),
)), )),
node_resolver.clone(),
npm_resolver.clone(),
pkg_json_resolver,
self.root_cert_store_provider().clone(), self.root_cert_store_provider().clone(),
self.fs().clone(), self.root_permissions_container()?.clone(),
maybe_file_watcher_communicator, StorageKeyResolver::from_options(cli_options),
self.maybe_inspector_server()?.clone(), cli_options.sub_command().clone(),
cli_options.maybe_lockfile().cloned(),
self.feature_checker()?.clone(),
self.create_cli_main_worker_options()?, self.create_cli_main_worker_options()?,
cli_options.node_ipc_fd(), self.cli_options()?.otel_config(),
cli_options.serve_port(),
cli_options.serve_host(),
cli_options.enable_future_features(),
// TODO(bartlomieju): temporarily disabled
// cli_options.disable_deprecated_api_warning,
true,
cli_options.verbose_deprecated_api_warning,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
)) ))
} }
@ -857,7 +1024,6 @@ impl CliFactory {
inspect_wait: cli_options.inspect_wait().is_some(), inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(), strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(), is_inspecting: cli_options.is_inspecting(),
is_npm_main: cli_options.is_npm_main(),
location: cli_options.location_flag().clone(), location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0] // if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno // to be the name of the binary command instead of deno
@ -870,9 +1036,11 @@ impl CliFactory {
unsafely_ignore_certificate_errors: cli_options unsafely_ignore_certificate_errors: cli_options
.unsafely_ignore_certificate_errors() .unsafely_ignore_certificate_errors()
.clone(), .clone(),
unstable: cli_options.legacy_unstable_flag(),
create_hmr_runner, create_hmr_runner,
create_coverage_collector, create_coverage_collector,
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
}) })
} }
} }

View file

@ -11,7 +11,6 @@ use crate::http_util::HttpClientProvider;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::generic_error; use deno_core::error::generic_error;
@ -22,8 +21,10 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::LoaderChecksum; use deno_graph::source::LoaderChecksum;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use http::header;
use log::debug; use log::debug;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
@ -52,6 +53,25 @@ pub enum FileOrRedirect {
Redirect(ModuleSpecifier), Redirect(ModuleSpecifier),
} }
impl FileOrRedirect {
fn from_deno_cache_entry(
specifier: &ModuleSpecifier,
cache_entry: deno_cache_dir::CacheEntry,
) -> Result<Self, AnyError> {
if let Some(redirect_to) = cache_entry.metadata.headers.get("location") {
let redirect =
deno_core::resolve_import(redirect_to, specifier.as_str())?;
Ok(FileOrRedirect::Redirect(redirect))
} else {
Ok(FileOrRedirect::File(File {
specifier: specifier.clone(),
maybe_headers: Some(cache_entry.metadata.headers),
source: Arc::from(cache_entry.content),
}))
}
}
}
/// A structure representing a source file. /// A structure representing a source file.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct File { pub struct File {
@ -117,14 +137,23 @@ impl MemoryFiles {
/// Fetch a source file from the local file system. /// Fetch a source file from the local file system.
fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> { fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| { let local = url_to_file_path(specifier).map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {specifier}")) uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?; })?;
// If it doesnt have a extension, we want to treat it as typescript by default
let headers = if local.extension().is_none() {
Some(HashMap::from([(
"content-type".to_string(),
"application/typescript".to_string(),
)]))
} else {
None
};
let bytes = fs::read(local)?; let bytes = fs::read(local)?;
Ok(File { Ok(File {
specifier: specifier.clone(), specifier: specifier.clone(),
maybe_headers: None, maybe_headers: headers,
source: bytes.into(), source: bytes.into(),
}) })
} }
@ -135,17 +164,36 @@ fn get_validated_scheme(
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
let scheme = specifier.scheme(); let scheme = specifier.scheme();
if !SUPPORTED_SCHEMES.contains(&scheme) { if !SUPPORTED_SCHEMES.contains(&scheme) {
// NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled
// before `file_fetcher.rs` APIs are even hit.
let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec();
all_supported_schemes.extend_from_slice(&["npm", "jsr"]);
all_supported_schemes.sort();
let scheme_list = all_supported_schemes
.iter()
.map(|scheme| format!(" - \"{}\"", scheme))
.collect::<Vec<_>>()
.join("\n");
Err(generic_error(format!( Err(generic_error(format!(
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}" "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}",
scheme_list
))) )))
} else { } else {
Ok(scheme.to_string()) Ok(scheme.to_string())
} }
} }
#[derive(Debug, Copy, Clone)]
pub enum FetchPermissionsOptionRef<'a> {
AllowAll,
DynamicContainer(&'a PermissionsContainer),
StaticContainer(&'a PermissionsContainer),
}
pub struct FetchOptions<'a> { pub struct FetchOptions<'a> {
pub specifier: &'a ModuleSpecifier, pub specifier: &'a ModuleSpecifier,
pub permissions: &'a PermissionsContainer, pub permissions: FetchPermissionsOptionRef<'a>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>, pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>, pub maybe_cache_setting: Option<&'a CacheSetting>,
} }
@ -238,45 +286,32 @@ impl FileFetcher {
); );
let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once
let Some(headers) = self.http_cache.read_headers(&cache_key)? else { let result = self.http_cache.get(
return Ok(None);
};
if let Some(redirect_to) = headers.get("location") {
let redirect =
deno_core::resolve_import(redirect_to, specifier.as_str())?;
return Ok(Some(FileOrRedirect::Redirect(redirect)));
}
let result = self.http_cache.read_file_bytes(
&cache_key, &cache_key,
maybe_checksum maybe_checksum
.as_ref() .as_ref()
.map(|c| deno_cache_dir::Checksum::new(c.as_str())), .map(|c| deno_cache_dir::Checksum::new(c.as_str())),
deno_cache_dir::GlobalToLocalCopy::Allow,
); );
let bytes = match result { match result {
Ok(Some(bytes)) => bytes, Ok(Some(cache_data)) => Ok(Some(FileOrRedirect::from_deno_cache_entry(
Ok(None) => return Ok(None), specifier, cache_data,
)?)),
Ok(None) => Ok(None),
Err(err) => match err { Err(err) => match err {
deno_cache_dir::CacheReadFileError::Io(err) => return Err(err.into()), deno_cache_dir::CacheReadFileError::Io(err) => Err(err.into()),
deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => { deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it // convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph // enhances it if this is passed to deno_graph
return Err( Err(
deno_graph::source::ChecksumIntegrityError { deno_graph::source::ChecksumIntegrityError {
actual: err.actual, actual: err.actual,
expected: err.expected, expected: err.expected,
} }
.into(), .into(),
); )
} }
}, },
}; }
Ok(Some(FileOrRedirect::File(File {
specifier: specifier.clone(),
maybe_headers: Some(headers),
source: Arc::from(bytes),
})))
} }
/// Convert a data URL into a file, resulting in an error if the URL is /// Convert a data URL into a file, resulting in an error if the URL is
@ -311,7 +346,7 @@ impl FileFetcher {
) )
})?; })?;
let bytes = blob.read_all().await?; let bytes = blob.read_all().await;
let headers = let headers =
HashMap::from([("content-type".to_string(), blob.media_type.clone())]); HashMap::from([("content-type".to_string(), blob.media_type.clone())]);
@ -328,6 +363,7 @@ impl FileFetcher {
maybe_accept: Option<&str>, maybe_accept: Option<&str>,
cache_setting: &CacheSetting, cache_setting: &CacheSetting,
maybe_checksum: Option<&LoaderChecksum>, maybe_checksum: Option<&LoaderChecksum>,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<FileOrRedirect, AnyError> { ) -> Result<FileOrRedirect, AnyError> {
debug!( debug!(
"FileFetcher::fetch_remote_no_follow - specifier: {}", "FileFetcher::fetch_remote_no_follow - specifier: {}",
@ -363,12 +399,30 @@ impl FileFetcher {
); );
} }
let maybe_etag = self let maybe_etag_cache_entry = self
.http_cache .http_cache
.cache_item_key(specifier) .cache_item_key(specifier)
.ok() .ok()
.and_then(|key| self.http_cache.read_headers(&key).ok().flatten()) .and_then(|key| {
.and_then(|headers| headers.get("etag").cloned()); self
.http_cache
.get(
&key,
maybe_checksum
.as_ref()
.map(|c| deno_cache_dir::Checksum::new(c.as_str())),
)
.ok()
.flatten()
})
.and_then(|cache_entry| {
cache_entry
.metadata
.headers
.get("etag")
.cloned()
.map(|etag| (cache_entry, etag))
});
let maybe_auth_token = self.auth_tokens.get(specifier); let maybe_auth_token = self.auth_tokens.get(specifier);
async fn handle_request_or_server_error( async fn handle_request_or_server_error(
@ -390,7 +444,6 @@ impl FileFetcher {
} }
} }
let mut maybe_etag = maybe_etag;
let mut retried = false; // retry intermittent failures let mut retried = false; // retry intermittent failures
let result = loop { let result = loop {
let result = match self let result = match self
@ -399,31 +452,18 @@ impl FileFetcher {
.fetch_no_follow(FetchOnceArgs { .fetch_no_follow(FetchOnceArgs {
url: specifier.clone(), url: specifier.clone(),
maybe_accept: maybe_accept.map(ToOwned::to_owned), maybe_accept: maybe_accept.map(ToOwned::to_owned),
maybe_etag: maybe_etag.clone(), maybe_etag: maybe_etag_cache_entry
.as_ref()
.map(|(_, etag)| etag.clone()),
maybe_auth_token: maybe_auth_token.clone(), maybe_auth_token: maybe_auth_token.clone(),
maybe_auth: maybe_auth.clone(),
maybe_progress_guard: maybe_progress_guard.as_ref(), maybe_progress_guard: maybe_progress_guard.as_ref(),
}) })
.await? .await?
{ {
FetchOnceResult::NotModified => { FetchOnceResult::NotModified => {
let file_or_redirect = let (cache_entry, _) = maybe_etag_cache_entry.unwrap();
self.fetch_cached_no_follow(specifier, maybe_checksum)?; FileOrRedirect::from_deno_cache_entry(specifier, cache_entry)
match file_or_redirect {
Some(file_or_redirect) => Ok(file_or_redirect),
None => {
// Someone may have deleted the body from the cache since
// it's currently stored in a separate file from the headers,
// so delete the etag and try again
if maybe_etag.is_some() {
debug!("Cache body not found. Trying again without etag.");
maybe_etag = None;
continue;
} else {
// should never happen
bail!("Your deno cache directory is in an unrecoverable state. Please delete it and try again.")
}
}
}
} }
FetchOnceResult::Redirect(redirect_url, headers) => { FetchOnceResult::Redirect(redirect_url, headers) => {
self.http_cache.set(specifier, headers, &[])?; self.http_cache.set(specifier, headers, &[])?;
@ -507,16 +547,54 @@ impl FileFetcher {
} }
} }
#[inline(always)]
pub async fn fetch_bypass_permissions(
&self,
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
.await
}
#[inline(always)]
pub async fn fetch_bypass_permissions_with_maybe_auth(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
.await
}
/// Fetch a source file and asynchronously return it. /// Fetch a source file and asynchronously return it.
#[inline(always)]
pub async fn fetch( pub async fn fetch(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
permissions: &PermissionsContainer, permissions: &PermissionsContainer,
) -> Result<File, AnyError> {
self
.fetch_inner(
specifier,
None,
FetchPermissionsOptionRef::StaticContainer(permissions),
)
.await
}
async fn fetch_inner(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
permissions: FetchPermissionsOptionRef<'_>,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier, specifier,
permissions, permissions,
maybe_auth,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: None, maybe_cache_setting: None,
}) })
@ -536,12 +614,14 @@ impl FileFetcher {
max_redirect: usize, max_redirect: usize,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
let mut specifier = Cow::Borrowed(options.specifier); let mut specifier = Cow::Borrowed(options.specifier);
let mut maybe_auth = options.maybe_auth.clone();
for _ in 0..=max_redirect { for _ in 0..=max_redirect {
match self match self
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: options.permissions, permissions: options.permissions,
maybe_auth: maybe_auth.clone(),
maybe_accept: options.maybe_accept, maybe_accept: options.maybe_accept,
maybe_cache_setting: options.maybe_cache_setting, maybe_cache_setting: options.maybe_cache_setting,
}, },
@ -553,6 +633,10 @@ impl FileFetcher {
return Ok(file); return Ok(file);
} }
FileOrRedirect::Redirect(redirect_specifier) => { FileOrRedirect::Redirect(redirect_specifier) => {
// If we were redirected to another origin, don't send the auth header anymore.
if redirect_specifier.origin() != specifier.origin() {
maybe_auth = None;
}
specifier = Cow::Owned(redirect_specifier); specifier = Cow::Owned(redirect_specifier);
} }
} }
@ -575,7 +659,23 @@ impl FileFetcher {
specifier specifier
); );
let scheme = get_validated_scheme(specifier)?; let scheme = get_validated_scheme(specifier)?;
options.permissions.check_specifier(specifier)?; match options.permissions {
FetchPermissionsOptionRef::AllowAll => {
// allow
}
FetchPermissionsOptionRef::StaticContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Static,
)?;
}
FetchPermissionsOptionRef::DynamicContainer(permissions) => {
permissions.check_specifier(
specifier,
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic,
)?;
}
}
if let Some(file) = self.memory_files.get(specifier) { if let Some(file) = self.memory_files.get(specifier) {
Ok(FileOrRedirect::File(file)) Ok(FileOrRedirect::File(file))
} else if scheme == "file" { } else if scheme == "file" {
@ -601,6 +701,7 @@ impl FileFetcher {
options.maybe_accept, options.maybe_accept,
options.maybe_cache_setting.unwrap_or(&self.cache_setting), options.maybe_cache_setting.unwrap_or(&self.cache_setting),
maybe_checksum, maybe_checksum,
options.maybe_auth,
) )
.await .await
} }
@ -661,7 +762,7 @@ mod tests {
maybe_temp_dir: Option<TempDir>, maybe_temp_dir: Option<TempDir>,
) -> (FileFetcher, TempDir, Arc<BlobStore>) { ) -> (FileFetcher, TempDir, Arc<BlobStore>) {
let temp_dir = maybe_temp_dir.unwrap_or_default(); let temp_dir = maybe_temp_dir.unwrap_or_default();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let blob_store: Arc<BlobStore> = Default::default(); let blob_store: Arc<BlobStore> = Default::default();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)),
@ -676,9 +777,7 @@ mod tests {
async fn test_fetch(specifier: &ModuleSpecifier) -> (File, FileFetcher) { async fn test_fetch(specifier: &ModuleSpecifier) -> (File, FileFetcher) {
let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None); let (file_fetcher, _) = setup(CacheSetting::ReloadAll, None);
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(specifier).await;
.fetch(specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
(result.unwrap(), file_fetcher) (result.unwrap(), file_fetcher)
} }
@ -692,7 +791,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier, specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -788,9 +888,7 @@ mod tests {
}; };
file_fetcher.insert_memory_files(file.clone()); file_fetcher.insert_memory_files(file.clone());
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let result_file = result.unwrap(); let result_file = result.unwrap();
assert_eq!(result_file, file); assert_eq!(result_file, file);
@ -801,9 +899,7 @@ mod tests {
let (file_fetcher, _) = setup(CacheSetting::Use, None); let (file_fetcher, _) = setup(CacheSetting::Use, None);
let specifier = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); let specifier = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -832,9 +928,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -854,9 +948,7 @@ mod tests {
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/subdir/mod2.ts").unwrap(); ModuleSpecifier::parse("http://localhost:4545/subdir/mod2.ts").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -874,9 +966,7 @@ mod tests {
.set(&specifier, headers.clone(), file.source.as_bytes()) .set(&specifier, headers.clone(), file.source.as_bytes())
.unwrap(); .unwrap();
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -900,9 +990,7 @@ mod tests {
.set(&specifier, headers.clone(), file.source.as_bytes()) .set(&specifier, headers.clone(), file.source.as_bytes())
.unwrap(); .unwrap();
let result = file_fetcher_02 let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -913,7 +1001,7 @@ mod tests {
// This creates a totally new instance, simulating another Deno process // This creates a totally new instance, simulating another Deno process
// invocation and indicates to "cache bust". // invocation and indicates to "cache bust".
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(
location, location,
@ -925,9 +1013,7 @@ mod tests {
Default::default(), Default::default(),
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!( assert_eq!(
@ -941,7 +1027,7 @@ mod tests {
async fn test_fetch_uses_cache() { async fn test_fetch_uses_cache() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let specifier = let specifier =
resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap();
@ -958,9 +1044,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = let cache_key =
file_fetcher.http_cache.cache_item_key(&specifier).unwrap(); file_fetcher.http_cache.cache_item_key(&specifier).unwrap();
@ -994,9 +1078,7 @@ mod tests {
Default::default(), Default::default(),
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = let cache_key =
@ -1033,9 +1115,7 @@ mod tests {
resolve_url("http://localhost:4545/subdir/redirects/redirect1.js") resolve_url("http://localhost:4545/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_specifier); assert_eq!(file.specifier, redirected_specifier);
@ -1074,9 +1154,7 @@ mod tests {
resolve_url("http://localhost:4545/subdir/redirects/redirect1.js") resolve_url("http://localhost:4545/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_02_specifier); assert_eq!(file.specifier, redirected_02_specifier);
@ -1115,7 +1193,7 @@ mod tests {
async fn test_fetch_uses_cache_with_redirects() { async fn test_fetch_uses_cache_with_redirects() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let specifier = let specifier =
resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap();
let redirected_specifier = let redirected_specifier =
@ -1134,9 +1212,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let cache_key = file_fetcher let cache_key = file_fetcher
@ -1174,7 +1250,7 @@ mod tests {
None, None,
); );
let result = file_fetcher let result = file_fetcher
.fetch(&redirected_specifier, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&redirected_specifier)
.await; .await;
assert!(result.is_ok()); assert!(result.is_ok());
@ -1215,7 +1291,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1228,7 +1305,8 @@ mod tests {
.fetch_with_options_and_max_redirect( .fetch_with_options_and_max_redirect(
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: &PermissionsContainer::allow_all(), permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1256,9 +1334,7 @@ mod tests {
resolve_url("http://localhost:4550/subdir/redirects/redirect1.js") resolve_url("http://localhost:4550/subdir/redirects/redirect1.js")
.unwrap(); .unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
assert_eq!(file.specifier, redirected_specifier); assert_eq!(file.specifier, redirected_specifier);
@ -1287,7 +1363,7 @@ mod tests {
async fn test_fetch_no_remote() { async fn test_fetch_no_remote() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(
location, location,
@ -1302,9 +1378,7 @@ mod tests {
let specifier = let specifier =
resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); resolve_url("http://localhost:4545/run/002_hello.ts").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_err()); assert!(result.is_err());
let err = result.unwrap_err(); let err = result.unwrap_err();
assert_eq!(get_custom_error_class(&err), Some("NoRemote")); assert_eq!(get_custom_error_class(&err), Some("NoRemote"));
@ -1315,7 +1389,7 @@ mod tests {
async fn test_fetch_cache_only() { async fn test_fetch_cache_only() {
let _http_server_guard = test_util::http_server(); let _http_server_guard = test_util::http_server();
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("deps").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher_01 = FileFetcher::new( let file_fetcher_01 = FileFetcher::new(
Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)),
CacheSetting::Only, CacheSetting::Only,
@ -1335,22 +1409,16 @@ mod tests {
let specifier = let specifier =
resolve_url("http://localhost:4545/run/002_hello.ts").unwrap(); resolve_url("http://localhost:4545/run/002_hello.ts").unwrap();
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_err()); assert!(result.is_err());
let err = result.unwrap_err(); let err = result.unwrap_err();
assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified."); assert_eq!(err.to_string(), "Specifier not found in cache: \"http://localhost:4545/run/002_hello.ts\", --cached-only is specified.");
assert_eq!(get_custom_error_class(&err), Some("NotCached")); assert_eq!(get_custom_error_class(&err), Some("NotCached"));
let result = file_fetcher_02 let result = file_fetcher_02.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let result = file_fetcher_01 let result = file_fetcher_01.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
} }
@ -1360,17 +1428,13 @@ mod tests {
let fixture_path = temp_dir.path().join("mod.ts"); let fixture_path = temp_dir.path().join("mod.ts");
let specifier = ModuleSpecifier::from_file_path(&fixture_path).unwrap(); let specifier = ModuleSpecifier::from_file_path(&fixture_path).unwrap();
fs::write(fixture_path.clone(), r#"console.log("hello deno");"#).unwrap(); fs::write(fixture_path.clone(), r#"console.log("hello deno");"#).unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!(&*file.source, r#"console.log("hello deno");"#); assert_eq!(&*file.source, r#"console.log("hello deno");"#);
fs::write(fixture_path, r#"console.log("goodbye deno");"#).unwrap(); fs::write(fixture_path, r#"console.log("goodbye deno");"#).unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap().into_text_decoded().unwrap(); let file = result.unwrap().into_text_decoded().unwrap();
assert_eq!(&*file.source, r#"console.log("goodbye deno");"#); assert_eq!(&*file.source, r#"console.log("goodbye deno");"#);
@ -1384,18 +1448,14 @@ mod tests {
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/dynamic").unwrap(); ModuleSpecifier::parse("http://localhost:4545/dynamic").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let first = file.source; let first = file.source;
let (file_fetcher, _) = let (file_fetcher, _) =
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let second = file.source; let second = file.source;
@ -1411,18 +1471,14 @@ mod tests {
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let specifier = let specifier =
ModuleSpecifier::parse("http://localhost:4545/dynamic_cache").unwrap(); ModuleSpecifier::parse("http://localhost:4545/dynamic_cache").unwrap();
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let first = file.source; let first = file.source;
let (file_fetcher, _) = let (file_fetcher, _) =
setup(CacheSetting::RespectHeaders, Some(temp_dir.clone())); setup(CacheSetting::RespectHeaders, Some(temp_dir.clone()));
let result = file_fetcher let result = file_fetcher.fetch_bypass_permissions(&specifier).await;
.fetch(&specifier, &PermissionsContainer::allow_all())
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let file = result.unwrap(); let file = result.unwrap();
let second = file.source; let second = file.source;
@ -1480,13 +1536,10 @@ mod tests {
let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap(); let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap();
let bytes = file_fetcher let bytes = file_fetcher
.http_cache .http_cache
.read_file_bytes( .get(&cache_key, None)
&cache_key,
None,
deno_cache_dir::GlobalToLocalCopy::Allow,
)
.unwrap() .unwrap()
.unwrap(); .unwrap()
.content;
String::from_utf8(bytes).unwrap() String::from_utf8(bytes).unwrap()
} }

View file

@ -3,15 +3,18 @@
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
use deno_core::resolve_url_or_path;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_runtime::colors; use deno_runtime::colors;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::module_loader::ModuleLoadPreparer; use crate::module_loader::ModuleLoadPreparer;
use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
pub trait ModuleGraphContainer: Clone + 'static { pub trait ModuleGraphContainer: Clone + 'static {
/// Acquires a permit to modify the module graph without other code /// Acquires a permit to modify the module graph without other code
@ -42,12 +45,14 @@ pub struct MainModuleGraphContainer {
inner: Arc<RwLock<Arc<ModuleGraph>>>, inner: Arc<RwLock<Arc<ModuleGraph>>>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
} }
impl MainModuleGraphContainer { impl MainModuleGraphContainer {
pub fn new( pub fn new(
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
root_permissions: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
update_queue: Default::default(), update_queue: Default::default(),
@ -56,12 +61,14 @@ impl MainModuleGraphContainer {
)))), )))),
cli_options, cli_options,
module_load_preparer, module_load_preparer,
root_permissions,
} }
} }
pub async fn check_specifiers( pub async fn check_specifiers(
&self, &self,
specifiers: &[ModuleSpecifier], specifiers: &[ModuleSpecifier],
ext_overwrite: Option<&String>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let mut graph_permit = self.acquire_update_permit().await; let mut graph_permit = self.acquire_update_permit().await;
let graph = graph_permit.graph_mut(); let graph = graph_permit.graph_mut();
@ -72,7 +79,8 @@ impl MainModuleGraphContainer {
specifiers, specifiers,
false, false,
self.cli_options.ts_type_lib_window(), self.cli_options.ts_type_lib_window(),
PermissionsContainer::allow_all(), self.root_permissions.clone(),
ext_overwrite,
) )
.await?; .await?;
graph_permit.commit(); graph_permit.commit();
@ -91,7 +99,7 @@ impl MainModuleGraphContainer {
log::warn!("{} No matching files found.", colors::yellow("Warning")); log::warn!("{} No matching files found.", colors::yellow("Warning"));
} }
self.check_specifiers(&specifiers).await self.check_specifiers(&specifiers, None).await
} }
pub fn collect_specifiers( pub fn collect_specifiers(
@ -99,24 +107,20 @@ impl MainModuleGraphContainer {
files: &[String], files: &[String],
) -> Result<Vec<ModuleSpecifier>, AnyError> { ) -> Result<Vec<ModuleSpecifier>, AnyError> {
let excludes = self.cli_options.workspace().resolve_config_excludes()?; let excludes = self.cli_options.workspace().resolve_config_excludes()?;
Ok( let include_patterns =
files PathOrPatternSet::from_include_relative_path_or_patterns(
.iter() self.cli_options.initial_cwd(),
.filter_map(|file| { files,
let file_url = )?;
resolve_url_or_path(file, self.cli_options.initial_cwd()).ok()?; let file_patterns = FilePatterns {
if file_url.scheme() != "file" { base: self.cli_options.initial_cwd().to_path_buf(),
return Some(file_url); include: Some(include_patterns),
} exclude: excludes,
// ignore local files that match any of files listed in `exclude` option };
let file_path = file_url.to_file_path().ok()?; collect_specifiers(
if excludes.matches_path(&file_path) { file_patterns,
None self.cli_options.vendor_dir_path().map(ToOwned::to_owned),
} else { |e| is_script_ext(e.path),
Some(file_url)
}
})
.collect::<Vec<_>>(),
) )
} }
} }

View file

@ -13,52 +13,59 @@ use crate::colors;
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CjsTracker;
use crate::resolver::SloppyImportsResolver; use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::JsrPackageConfig; use deno_config::workspace::JsrPackageConfig;
use deno_emit::LoaderChecksum; use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionMode;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError; use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError; use deno_graph::ModuleLoadError;
use deno_graph::WorkspaceFastCheckOption; use deno_graph::WorkspaceFastCheckOption;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::anyhow::bail;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleError; use deno_graph::ModuleError;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError; use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use import_map::ImportMapError; use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error;
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
#[derive(Clone, Copy)] #[derive(Clone)]
pub struct GraphValidOptions { pub struct GraphValidOptions {
pub check_js: bool, pub check_js: bool,
pub follow_type_only: bool, pub kind: GraphKind,
pub is_vendoring: bool, /// Whether to exit the process for integrity check errors such as
/// Whether to exit the process for lockfile errors. /// lockfile checksum mismatches and JSR integrity failures.
/// Otherwise, surfaces lockfile errors as errors. /// Otherwise, surfaces integrity errors as errors.
pub exit_lockfile_errors: bool, pub exit_integrity_errors: bool,
} }
/// Check if `roots` and their deps are available. Returns `Ok(())` if /// Check if `roots` and their deps are available. Returns `Ok(())` if
@ -74,17 +81,54 @@ pub fn graph_valid(
roots: &[ModuleSpecifier], roots: &[ModuleSpecifier],
options: GraphValidOptions, options: GraphValidOptions,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
if options.exit_lockfile_errors { if options.exit_integrity_errors {
graph_exit_lock_errors(graph); graph_exit_integrity_errors(graph);
} }
let mut errors = graph let mut errors = graph_walk_errors(
graph,
fs,
roots,
GraphWalkErrorsOptions {
check_js: options.check_js,
kind: options.kind,
},
);
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(
get_error_class_name(err),
format_deno_graph_error(err.as_ref().deref()),
));
}
Ok(())
}
}
#[derive(Clone)]
pub struct GraphWalkErrorsOptions {
pub check_js: bool,
pub kind: GraphKind,
}
/// Walks the errors found in the module graph that should be surfaced to users
/// and enhances them with CLI information.
pub fn graph_walk_errors<'a>(
graph: &'a ModuleGraph,
fs: &'a Arc<dyn FileSystem>,
roots: &'a [ModuleSpecifier],
options: GraphWalkErrorsOptions,
) -> impl Iterator<Item = AnyError> + 'a {
graph
.walk( .walk(
roots.iter(), roots.iter(),
deno_graph::WalkOptions { deno_graph::WalkOptions {
check_js: options.check_js, check_js: options.check_js,
follow_type_only: options.follow_type_only, kind: options.kind,
follow_dynamic: options.is_vendoring, follow_dynamic: false,
prefer_fast_check_graph: false, prefer_fast_check_graph: false,
}, },
) )
@ -108,9 +152,9 @@ pub fn graph_valid(
) )
} }
ModuleGraphError::ModuleError(error) => { ModuleGraphError::ModuleError(error) => {
enhanced_lockfile_error_message(error) enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(fs, error)) .or_else(|| enhanced_sloppy_imports_error_message(fs, error))
.unwrap_or_else(|| format!("{}", error)) .unwrap_or_else(|| format_deno_graph_error(error))
} }
}; };
@ -131,55 +175,20 @@ pub fn graph_valid(
return None; return None;
} }
if options.is_vendoring {
// warn about failing dynamic imports when vendoring, but don't fail completely
if matches!(
error,
ModuleGraphError::ModuleError(ModuleError::MissingDynamic(_, _))
) {
log::warn!("Ignoring: {}", message);
return None;
}
// ignore invalid downgrades and invalid local imports when vendoring
match &error {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
if matches!(
err,
ResolutionError::InvalidDowngrade { .. }
| ResolutionError::InvalidLocalImport { .. }
) {
return None;
}
}
ModuleGraphError::ModuleError(_) => {}
}
}
Some(custom_error(get_error_class_name(&error.into()), message)) Some(custom_error(get_error_class_name(&error.into()), message))
}); })
if let Some(error) = errors.next() {
Err(error)
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(get_error_class_name(err), format!("{}", err)));
}
Ok(())
}
} }
pub fn graph_exit_lock_errors(graph: &ModuleGraph) { pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
for error in graph.module_errors() { for error in graph.module_errors() {
exit_for_lockfile_error(error); exit_for_integrity_error(error);
} }
} }
fn exit_for_lockfile_error(err: &ModuleError) { fn exit_for_integrity_error(err: &ModuleError) {
if let Some(err_message) = enhanced_lockfile_error_message(err) { if let Some(err_message) = enhanced_integrity_error_message(err) {
log::error!("{} {}", colors::red("error:"), err_message); log::error!("{} {}", colors::red("error:"), err_message);
std::process::exit(10); deno_runtime::exit(10);
} }
} }
@ -245,6 +254,19 @@ impl ModuleGraphCreator {
package_configs: &[JsrPackageConfig], package_configs: &[JsrPackageConfig],
build_fast_check_graph: bool, build_fast_check_graph: bool,
) -> Result<ModuleGraph, AnyError> { ) -> Result<ModuleGraph, AnyError> {
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
// Earlier on, we marked external non-JSR modules as external.
// If the graph contains any of those, it would cause type checking
// to crash, so since publishing is going to fail anyway, skip type
// checking.
graph.modules().any(|module| match module {
deno_graph::Module::External(external_module) => {
matches!(external_module.specifier.scheme(), "http" | "https")
}
_ => false,
})
}
let mut roots = Vec::new(); let mut roots = Vec::new();
for package_config in package_configs { for package_config in package_configs {
roots.extend(package_config.config_file.resolve_export_value_urls()?); roots.extend(package_config.config_file.resolve_export_value_urls()?);
@ -258,9 +280,12 @@ impl ModuleGraphCreator {
}) })
.await?; .await?;
self.graph_valid(&graph)?; self.graph_valid(&graph)?;
if self.options.type_check_mode().is_true() { if self.options.type_check_mode().is_true()
&& !graph_has_external_remote(&graph)
{
self.type_check_graph(graph.clone()).await?; self.type_check_graph(graph.clone()).await?;
} }
if build_fast_check_graph { if build_fast_check_graph {
let fast_check_workspace_members = package_configs let fast_check_workspace_members = package_configs
.iter() .iter()
@ -275,6 +300,7 @@ impl ModuleGraphCreator {
}, },
)?; )?;
} }
Ok(graph) Ok(graph)
} }
@ -355,49 +381,55 @@ pub struct BuildFastCheckGraphOptions<'a> {
} }
pub struct ModuleGraphBuilder { pub struct ModuleGraphBuilder {
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache, module_info_cache: Arc<ModuleInfoCache>,
file_fetcher: Arc<FileFetcher>, npm_resolver: Arc<dyn CliNpmResolver>,
global_http_cache: Arc<GlobalHttpCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
} }
impl ModuleGraphBuilder { impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache, module_info_cache: Arc<ModuleInfoCache>,
file_fetcher: Arc<FileFetcher>, npm_resolver: Arc<dyn CliNpmResolver>,
global_http_cache: Arc<GlobalHttpCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
options,
caches, caches,
cjs_tracker,
cli_options,
file_fetcher,
fs, fs,
resolver, global_http_cache,
npm_resolver, in_npm_pkg_checker,
module_info_cache,
parsed_source_cache,
lockfile, lockfile,
maybe_file_watcher_reporter, maybe_file_watcher_reporter,
emit_cache, module_info_cache,
file_fetcher, npm_resolver,
global_http_cache, parsed_source_cache,
resolver,
root_permissions_container,
} }
} }
@ -463,7 +495,7 @@ impl ModuleGraphBuilder {
.content .content
.packages .packages
.jsr .jsr
.get(&package_nv.to_string()) .get(package_nv)
.map(|s| LoaderChecksum::new(s.integrity.clone())) .map(|s| LoaderChecksum::new(s.integrity.clone()))
} }
@ -477,31 +509,27 @@ impl ModuleGraphBuilder {
self self
.0 .0
.lock() .lock()
.insert_package(package_nv.to_string(), checksum.into_string()); .insert_package(package_nv.clone(), checksum.into_string());
} }
} }
let maybe_imports = if options.graph_kind.include_types() { let maybe_imports = if options.graph_kind.include_types() {
self.options.to_compiler_option_types()? self.cli_options.to_compiler_option_types()?
} else { } else {
Vec::new() Vec::new()
}; };
let analyzer = self let analyzer = self.module_info_cache.as_module_analyzer();
.module_info_cache
.as_module_analyzer(&self.parsed_source_cache);
let mut loader = match options.loader { let mut loader = match options.loader {
Some(loader) => MutLoaderRef::Borrowed(loader), Some(loader) => MutLoaderRef::Borrowed(loader),
None => MutLoaderRef::Owned(self.create_graph_loader()), None => MutLoaderRef::Owned(self.create_graph_loader()),
}; };
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver(); let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_file_watcher_reporter = self let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter .maybe_file_watcher_reporter
.as_ref() .as_ref()
.map(|r| r.as_reporter()); .map(|r| r.as_reporter());
let workspace_members =
self.options.resolve_deno_graph_workspace_members()?;
let mut locker = self let mut locker = self
.lockfile .lockfile
.as_ref() .as_ref()
@ -515,14 +543,13 @@ impl ModuleGraphBuilder {
imports: maybe_imports, imports: maybe_imports,
is_dynamic: options.is_dynamic, is_dynamic: options.is_dynamic,
passthrough_jsr_specifiers: false, passthrough_jsr_specifiers: false,
workspace_members: &workspace_members,
executor: Default::default(), executor: Default::default(),
file_system: &DenoGraphFsAdapter(self.fs.as_ref()), file_system: &DenoGraphFsAdapter(self.fs.as_ref()),
jsr_url_provider: &CliJsrUrlProvider, jsr_url_provider: &CliJsrUrlProvider,
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(&graph_npm_resolver),
module_analyzer: &analyzer, module_analyzer: &analyzer,
reporter: maybe_file_watcher_reporter, reporter: maybe_file_watcher_reporter,
resolver: Some(graph_resolver), resolver: Some(&graph_resolver),
locker: locker.as_mut().map(|l| l as _), locker: locker.as_mut().map(|l| l as _),
}, },
) )
@ -538,7 +565,12 @@ impl ModuleGraphBuilder {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
if self.options.node_modules_dir_enablement() == Some(true) { if self
.cli_options
.node_modules_dir()?
.map(|m| m.uses_node_modules_dir())
.unwrap_or(false)
{
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?; npm_resolver.ensure_top_level_package_json_install().await?;
} }
@ -550,28 +582,19 @@ impl ModuleGraphBuilder {
// populate the information from the lockfile // populate the information from the lockfile
if let Some(lockfile) = &self.lockfile { if let Some(lockfile) = &self.lockfile {
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for (from, to) in &lockfile.content.redirects { graph.fill_from_lockfile(FillFromLockfileOptions {
if let Ok(from) = ModuleSpecifier::parse(from) { redirects: lockfile
if let Ok(to) = ModuleSpecifier::parse(to) { .content
if !matches!(from.scheme(), "file" | "npm" | "jsr") { .redirects
graph.redirects.insert(from, to); .iter()
} .map(|(from, to)| (from.as_str(), to.as_str())),
} package_specifiers: lockfile
} .content
} .packages
for (key, value) in &lockfile.content.packages.specifiers { .specifiers
if let Some(key) = key .iter()
.strip_prefix("jsr:") .map(|(dep, id)| (dep, id.as_str())),
.and_then(|key| PackageReq::from_str(key).ok()) });
{
if let Some(value) = value
.strip_prefix("jsr:")
.and_then(|value| PackageNv::from_str(value).ok())
{
graph.packages.add_nv(key, value);
}
}
}
} }
} }
@ -579,6 +602,12 @@ impl ModuleGraphBuilder {
let initial_package_deps_len = graph.packages.package_deps_sum(); let initial_package_deps_len = graph.packages.package_deps_sum();
let initial_package_mappings_len = graph.packages.mappings().len(); let initial_package_mappings_len = graph.packages.mappings().len();
if roots.iter().any(|r| r.scheme() == "npm")
&& self.npm_resolver.as_byonm().is_some()
{
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
}
graph.build(roots, loader, options).await; graph.build(roots, loader, options).await;
let has_redirects_changed = graph.redirects.len() != initial_redirects_len; let has_redirects_changed = graph.redirects.len() != initial_redirects_len;
@ -606,16 +635,15 @@ impl ModuleGraphBuilder {
if has_jsr_package_mappings_changed { if has_jsr_package_mappings_changed {
for (from, to) in graph.packages.mappings() { for (from, to) in graph.packages.mappings() {
lockfile.insert_package_specifier( lockfile.insert_package_specifier(
format!("jsr:{}", from), JsrDepPackageReq::jsr(from.clone()),
format!("jsr:{}", to), to.version.to_string(),
); );
} }
} }
// jsr packages // jsr packages
if has_jsr_package_deps_changed { if has_jsr_package_deps_changed {
for (name, deps) in graph.packages.packages_with_deps() { for (nv, deps) in graph.packages.packages_with_deps() {
lockfile lockfile.add_package_deps(nv, deps.cloned());
.add_package_deps(&name.to_string(), deps.map(|s| s.to_string()));
} }
} }
} }
@ -644,16 +672,16 @@ impl ModuleGraphBuilder {
}; };
let parser = self.parsed_source_cache.as_capturing_parser(); let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver(); let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
graph.build_fast_check_type_graph( graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions { deno_graph::BuildFastCheckTypeGraphOptions {
jsr_url_provider: &CliJsrUrlProvider, es_parser: Some(&parser),
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
fast_check_dts: false, fast_check_dts: false,
module_parser: Some(&parser), jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(graph_resolver), resolver: Some(&graph_resolver),
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(&graph_npm_resolver),
workspace_fast_check: options.workspace_fast_check, workspace_fast_check: options.workspace_fast_check,
}, },
@ -663,7 +691,7 @@ impl ModuleGraphBuilder {
/// Creates the default loader used for creating a graph. /// Creates the default loader used for creating a graph.
pub fn create_graph_loader(&self) -> cache::FetchCacher { pub fn create_graph_loader(&self) -> cache::FetchCacher {
self.create_fetch_cacher(PermissionsContainer::allow_all()) self.create_fetch_cacher(self.root_permissions_container.clone())
} }
pub fn create_fetch_cacher( pub fn create_fetch_cacher(
@ -671,13 +699,19 @@ impl ModuleGraphBuilder {
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> cache::FetchCacher { ) -> cache::FetchCacher {
cache::FetchCacher::new( cache::FetchCacher::new(
self.emit_cache.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
self.options.resolve_file_header_overrides(), self.fs.clone(),
self.global_http_cache.clone(), self.global_http_cache.clone(),
self.npm_resolver.clone(), self.in_npm_pkg_checker.clone(),
self.module_info_cache.clone(), self.module_info_cache.clone(),
permissions, cache::FetchCacherOptions {
file_header_overrides: self.cli_options.resolve_file_header_overrides(),
permissions,
is_deno_publish: matches!(
self.cli_options.sub_command(),
crate::args::DenoSubcommand::Publish { .. }
),
},
) )
} }
@ -701,42 +735,53 @@ impl ModuleGraphBuilder {
&self.fs, &self.fs,
roots, roots,
GraphValidOptions { GraphValidOptions {
is_vendoring: false, kind: if self.cli_options.type_check_mode().is_true() {
follow_type_only: self.options.type_check_mode().is_true(), GraphKind::All
check_js: self.options.check_js(), } else {
exit_lockfile_errors: true, GraphKind::CodeOnly
},
check_js: self.cli_options.check_js(),
exit_integrity_errors: true,
}, },
) )
} }
}
pub fn error_for_any_npm_specifier( fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
graph: &ModuleGraph, let jsx_import_source_config = self
) -> Result<(), AnyError> { .cli_options
for module in graph.modules() { .workspace()
match module { .to_maybe_jsx_import_source_config()?;
Module::Npm(module) => { Ok(CliGraphResolver {
bail!("npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier) cjs_tracker: &self.cjs_tracker,
} resolver: &self.resolver,
Module::Node(module) => { jsx_import_source_config,
bail!("Node specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name) })
}
Module::Js(_) | Module::Json(_) | Module::External(_) => {}
}
} }
Ok(())
} }
/// Adds more explanatory information to a resolution error. /// Adds more explanatory information to a resolution error.
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
let mut message = format!("{error}"); let mut message = format_deno_graph_error(error);
if let Some(specifier) = get_resolution_error_bare_node_specifier(error) { let maybe_hint = if let Some(specifier) =
get_resolution_error_bare_node_specifier(error)
{
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS { if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
message.push_str(&format!( Some(format!("If you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."))
"\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")." } else {
)); None
} }
} else {
get_import_prefix_missing_error(error).map(|specifier| {
format!(
"If you want to use a JSR or npm package, try running `deno add jsr:{}` or `deno add npm:{}`",
specifier, specifier
)
})
};
if let Some(hint) = maybe_hint {
message.push_str(&format!("\n {} {}", colors::cyan("hint:"), hint));
} }
message message
@ -749,8 +794,8 @@ fn enhanced_sloppy_imports_error_message(
match error { match error {
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => { | ModuleError::Missing(specifier, _) => {
let additional_message = SloppyImportsResolver::new(fs.clone()) let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
.resolve(specifier, ResolutionMode::Execution)? .resolve(specifier, SloppyImportsResolutionMode::Execution)?
.as_suggestion_message(); .as_suggestion_message();
Some(format!( Some(format!(
"{} {} or run with --unstable-sloppy-imports", "{} {} or run with --unstable-sloppy-imports",
@ -762,7 +807,7 @@ fn enhanced_sloppy_imports_error_message(
} }
} }
fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> { fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err { match err {
ModuleError::LoadingErr( ModuleError::LoadingErr(
specifier, specifier,
@ -806,7 +851,7 @@ fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
"This could be caused by:\n", "This could be caused by:\n",
" * the lock file may be corrupt\n", " * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n", " * the source itself may be corrupt\n\n",
"Use the --lock-write flag to regenerate the lockfile or --reload to reload the source code from the server." "Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
), ),
package_nv, package_nv,
checksum_err.actual, checksum_err.actual,
@ -827,7 +872,7 @@ fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
"This could be caused by:\n", "This could be caused by:\n",
" * the lock file may be corrupt\n", " * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n", " * the source itself may be corrupt\n\n",
"Use the --lock-write flag to regenerate the lockfile or --reload to reload the source code from the server." "Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
), ),
specifier, specifier,
checksum_err.actual, checksum_err.actual,
@ -871,6 +916,50 @@ fn get_resolution_error_bare_specifier(
} }
} }
fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
let mut maybe_specifier = None;
if let ResolutionError::InvalidSpecifier {
error: SpecifierError::ImportPrefixMissing { specifier, .. },
range,
} = error
{
if range.specifier.scheme() == "file" {
maybe_specifier = Some(specifier);
}
} else if let ResolutionError::ResolverError { error, range, .. } = error {
if range.specifier.scheme() == "file" {
match error.as_ref() {
ResolveError::Specifier(specifier_error) => {
if let SpecifierError::ImportPrefixMissing { specifier, .. } =
specifier_error
{
maybe_specifier = Some(specifier);
}
}
ResolveError::Other(other_error) => {
if let Some(SpecifierError::ImportPrefixMissing {
specifier, ..
}) = other_error.downcast_ref::<SpecifierError>()
{
maybe_specifier = Some(specifier);
}
}
}
}
}
// NOTE(bartlomieju): For now, return None if a specifier contains a dot or a space. This is because
// suggesting to `deno add bad-module.ts` makes no sense and is worse than not providing
// a suggestion at all. This should be improved further in the future
if let Some(specifier) = maybe_specifier {
if specifier.contains('.') || specifier.contains(' ') {
return None;
}
}
maybe_specifier.map(|s| s.as_str())
}
/// Gets if any of the specified root's "file:" dependents are in the /// Gets if any of the specified root's "file:" dependents are in the
/// provided changed set. /// provided changed set.
pub fn has_graph_root_local_dependent_changed( pub fn has_graph_root_local_dependent_changed(
@ -882,13 +971,13 @@ pub fn has_graph_root_local_dependent_changed(
std::iter::once(root), std::iter::once(root),
deno_graph::WalkOptions { deno_graph::WalkOptions {
follow_dynamic: true, follow_dynamic: true,
follow_type_only: true, kind: GraphKind::All,
prefer_fast_check_graph: true, prefer_fast_check_graph: true,
check_js: true, check_js: true,
}, },
); );
while let Some((s, _)) = dependent_specifiers.next() { while let Some((s, _)) = dependent_specifiers.next() {
if let Ok(path) = specifier_to_file_path(s) { if let Ok(path) = url_to_file_path(s) {
if let Ok(path) = canonicalize_path(&path) { if let Ok(path) = canonicalize_path(&path) {
if canonicalized_changed_paths.contains(&path) { if canonicalized_changed_paths.contains(&path) {
return true; return true;
@ -930,7 +1019,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
) { ) {
let mut file_paths = self.file_paths.lock(); let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap()); // Don't trust that the path is a valid path at this point:
// https://github.com/denoland/deno/issues/26209.
if let Ok(file_path) = specifier.to_file_path() {
file_paths.push(file_path);
}
} }
if modules_done == modules_total { if modules_done == modules_total {
@ -1025,6 +1118,96 @@ impl deno_graph::source::JsrUrlProvider for CliJsrUrlProvider {
} }
} }
// todo(dsherret): We should change ModuleError to use thiserror so that
// we don't need to do this.
fn format_deno_graph_error(err: &dyn Error) -> String {
use std::fmt::Write;
let mut message = format!("{}", err);
let mut maybe_source = err.source();
if maybe_source.is_some() {
let mut past_message = message.clone();
let mut count = 0;
let mut display_count = 0;
while let Some(source) = maybe_source {
let current_message = format!("{}", source);
maybe_source = source.source();
// sometimes an error might be repeated due to
// being boxed multiple times in another AnyError
if current_message != past_message {
write!(message, "\n {}: ", display_count,).unwrap();
for (i, line) in current_message.lines().enumerate() {
if i > 0 {
write!(message, "\n {}", line).unwrap();
} else {
write!(message, "{}", line).unwrap();
}
}
display_count += 1;
}
if count > 8 {
write!(message, "\n {}: ...", count).unwrap();
break;
}
past_message = current_message;
count += 1;
}
}
message
}
#[derive(Debug)]
struct CliGraphResolver<'a> {
cjs_tracker: &'a CjsTracker,
resolver: &'a CliResolver,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.as_ref()
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
mode: ResolutionMode,
) -> Result<ModuleSpecifier, ResolveError> {
self.resolver.resolve(
raw_specifier,
referrer_range,
self
.cjs_tracker
.get_referrer_kind(&referrer_range.specifier),
mode,
)
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::Arc; use std::sync::Arc;

View file

@ -2,7 +2,7 @@
use crate::auth_tokens::AuthToken; use crate::auth_tokens::AuthToken;
use crate::util::progress_bar::UpdateGuard; use crate::util::progress_bar::UpdateGuard;
use crate::version::get_user_agent; use crate::version;
use cache_control::Cachability; use cache_control::Cachability;
use cache_control::CacheControl; use cache_control::CacheControl;
@ -19,10 +19,12 @@ use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_fetch::CreateHttpClientOptions;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use http::header;
use http::header::HeaderName; use http::header::HeaderName;
use http::header::HeaderValue; use http::header::HeaderValue;
use http::header::ACCEPT; use http::header::ACCEPT;
use http::header::AUTHORIZATION; use http::header::AUTHORIZATION;
use http::header::CONTENT_LENGTH;
use http::header::IF_NONE_MATCH; use http::header::IF_NONE_MATCH;
use http::header::LOCATION; use http::header::LOCATION;
use http::StatusCode; use http::StatusCode;
@ -203,6 +205,7 @@ pub struct FetchOnceArgs<'a> {
pub maybe_accept: Option<String>, pub maybe_accept: Option<String>,
pub maybe_etag: Option<String>, pub maybe_etag: Option<String>,
pub maybe_auth_token: Option<AuthToken>, pub maybe_auth_token: Option<AuthToken>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_progress_guard: Option<&'a UpdateGuard>, pub maybe_progress_guard: Option<&'a UpdateGuard>,
} }
@ -247,7 +250,7 @@ impl HttpClientProvider {
Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())), Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())),
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
let client = create_http_client( let client = create_http_client(
get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
root_cert_store: match &self.root_cert_store_provider { root_cert_store: match &self.root_cert_store_provider {
Some(provider) => Some(provider.get_or_try_init()?.clone()), Some(provider) => Some(provider.get_or_try_init()?.clone()),
@ -381,6 +384,8 @@ impl HttpClient {
request request
.headers_mut() .headers_mut()
.insert(AUTHORIZATION, authorization_val); .insert(AUTHORIZATION, authorization_val);
} else if let Some((header, value)) = args.maybe_auth {
request.headers_mut().insert(header, value);
} }
if let Some(accept) = args.maybe_accept { if let Some(accept) = args.maybe_accept {
let accepts_val = HeaderValue::from_str(&accept)?; let accepts_val = HeaderValue::from_str(&accept)?;
@ -389,10 +394,10 @@ impl HttpClient {
let response = match self.client.clone().send(request).await { let response = match self.client.clone().send(request).await {
Ok(resp) => resp, Ok(resp) => resp,
Err(err) => { Err(err) => {
if is_error_connect(&err) { if err.is_connect_error() {
return Ok(FetchOnceResult::RequestError(err.to_string())); return Ok(FetchOnceResult::RequestError(err.to_string()));
} }
return Err(err); return Err(err.into());
} }
}; };
@ -469,15 +474,23 @@ impl HttpClient {
} }
} }
pub async fn download_with_progress( pub async fn download_with_progress_and_retries(
&self, &self,
url: Url, url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>, maybe_header: Option<(HeaderName, HeaderValue)>,
progress_guard: &UpdateGuard, progress_guard: &UpdateGuard,
) -> Result<Option<Vec<u8>>, DownloadError> { ) -> Result<Option<Vec<u8>>, DownloadError> {
self crate::util::retry::retry(
.download_inner(url, maybe_header, Some(progress_guard)) || {
.await self.download_inner(
url.clone(),
maybe_header.clone(),
Some(progress_guard),
)
},
|e| matches!(e, DownloadError::BadResponse(_) | DownloadError::Fetch(_)),
)
.await
} }
pub async fn get_redirected_url( pub async fn get_redirected_url(
@ -530,7 +543,7 @@ impl HttpClient {
.clone() .clone()
.send(req) .send(req)
.await .await
.map_err(DownloadError::Fetch)?; .map_err(|e| DownloadError::Fetch(e.into()))?;
let status = response.status(); let status = response.status();
if status.is_redirection() { if status.is_redirection() {
for _ in 0..5 { for _ in 0..5 {
@ -550,7 +563,7 @@ impl HttpClient {
.clone() .clone()
.send(req) .send(req)
.await .await
.map_err(DownloadError::Fetch)?; .map_err(|e| DownloadError::Fetch(e.into()))?;
let status = new_response.status(); let status = new_response.status();
if status.is_redirection() { if status.is_redirection() {
response = new_response; response = new_response;
@ -566,20 +579,21 @@ impl HttpClient {
} }
} }
fn is_error_connect(err: &AnyError) -> bool {
err
.downcast_ref::<hyper_util::client::legacy::Error>()
.map(|err| err.is_connect())
.unwrap_or(false)
}
async fn get_response_body_with_progress( async fn get_response_body_with_progress(
response: http::Response<deno_fetch::ResBody>, response: http::Response<deno_fetch::ResBody>,
progress_guard: Option<&UpdateGuard>, progress_guard: Option<&UpdateGuard>,
) -> Result<Vec<u8>, AnyError> { ) -> Result<Vec<u8>, AnyError> {
use http_body::Body as _; use http_body::Body as _;
if let Some(progress_guard) = progress_guard { if let Some(progress_guard) = progress_guard {
if let Some(total_size) = response.body().size_hint().exact() { let mut total_size = response.body().size_hint().exact();
if total_size.is_none() {
total_size = response
.headers()
.get(CONTENT_LENGTH)
.and_then(|val| val.to_str().ok())
.and_then(|s| s.parse::<u64>().ok());
}
if let Some(total_size) = total_size {
progress_guard.set_total_size(total_size); progress_guard.set_total_size(total_size);
let mut current_size = 0; let mut current_size = 0;
let mut data = Vec::with_capacity(total_size as usize); let mut data = Vec::with_capacity(total_size as usize);
@ -676,7 +690,7 @@ impl RequestBuilder {
pub async fn send( pub async fn send(
self, self,
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> { ) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
self.client.send(self.req).await self.client.send(self.req).await.map_err(Into::into)
} }
pub fn build(self) -> http::Request<deno_fetch::ReqBody> { pub fn build(self) -> http::Request<deno_fetch::ReqBody> {
@ -782,6 +796,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -808,6 +823,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -835,6 +851,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -856,6 +873,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -875,6 +893,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -904,6 +923,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, _)) = result { if let Ok(FetchOnceResult::Code(body, _)) = result {
@ -929,6 +949,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Redirect(url, _)) = result { if let Ok(FetchOnceResult::Redirect(url, _)) = result {
@ -946,7 +967,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path().join("tls/RootCA.pem"), test_util::testdata_path().join("tls/RootCA.pem"),
@ -964,6 +985,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -998,7 +1020,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions::default(), CreateHttpClientOptions::default(),
) )
.unwrap(), .unwrap(),
@ -1011,6 +1033,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1057,7 +1080,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
root_cert_store: Some(root_cert_store), root_cert_store: Some(root_cert_store),
..Default::default() ..Default::default()
@ -1073,6 +1096,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1106,7 +1130,7 @@ mod test {
.unwrap(); .unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()
@ -1126,6 +1150,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1147,7 +1172,7 @@ mod test {
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap(); let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()
@ -1167,6 +1192,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1189,6 +1215,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -1203,7 +1230,7 @@ mod test {
.unwrap(); .unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::get_user_agent(), version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()
@ -1223,6 +1250,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1252,6 +1280,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert!(result.is_err()); assert!(result.is_err());
@ -1273,6 +1302,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1296,6 +1326,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;

View file

@ -104,12 +104,12 @@ function bench(
} }
if (optionsOrFn.fn != undefined) { if (optionsOrFn.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, bench function is already provided as the third argument.", "Unexpected 'fn' field in options, bench function is already provided as the third argument",
); );
} }
if (optionsOrFn.name != undefined) { if (optionsOrFn.name != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'name' field in options, bench name is already provided as the first argument.", "Unexpected 'name' field in options, bench name is already provided as the first argument",
); );
} }
benchDesc = { benchDesc = {
@ -141,7 +141,7 @@ function bench(
fn = optionsOrFn; fn = optionsOrFn;
if (nameOrFnOrOptions.fn != undefined) { if (nameOrFnOrOptions.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, bench function is already provided as the second argument.", "Unexpected 'fn' field in options, bench function is already provided as the second argument",
); );
} }
name = nameOrFnOrOptions.name ?? fn.name; name = nameOrFnOrOptions.name ?? fn.name;
@ -150,7 +150,7 @@ function bench(
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function" !nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
) { ) {
throw new TypeError( throw new TypeError(
"Expected 'fn' field in the first argument to be a bench function.", "Expected 'fn' field in the first argument to be a bench function",
); );
} }
fn = nameOrFnOrOptions.fn; fn = nameOrFnOrOptions.fn;
@ -385,12 +385,12 @@ function createBenchContext(desc) {
start() { start() {
if (currentBenchId !== desc.id) { if (currentBenchId !== desc.id) {
throw new TypeError( throw new TypeError(
"The benchmark which this context belongs to is not being executed.", "The benchmark which this context belongs to is not being executed",
); );
} }
if (currentBenchUserExplicitStart != null) { if (currentBenchUserExplicitStart != null) {
throw new TypeError( throw new TypeError(
"BenchContext::start() has already been invoked.", "BenchContext::start() has already been invoked",
); );
} }
currentBenchUserExplicitStart = benchNow(); currentBenchUserExplicitStart = benchNow();
@ -399,11 +399,11 @@ function createBenchContext(desc) {
const end = benchNow(); const end = benchNow();
if (currentBenchId !== desc.id) { if (currentBenchId !== desc.id) {
throw new TypeError( throw new TypeError(
"The benchmark which this context belongs to is not being executed.", "The benchmark which this context belongs to is not being executed",
); );
} }
if (currentBenchUserExplicitEnd != null) { if (currentBenchUserExplicitEnd != null) {
throw new TypeError("BenchContext::end() has already been invoked."); throw new TypeError("BenchContext::end() has already been invoked");
} }
currentBenchUserExplicitEnd = end; currentBenchUserExplicitEnd = end;
}, },

View file

@ -177,6 +177,52 @@ function isCanvasLike(obj) {
return obj !== null && typeof obj === "object" && "toDataURL" in obj; return obj !== null && typeof obj === "object" && "toDataURL" in obj;
} }
function isJpg(obj) {
// Check if obj is a Uint8Array
if (!(obj instanceof Uint8Array)) {
return false;
}
// JPG files start with the magic bytes FF D8
if (obj.length < 2 || obj[0] !== 0xFF || obj[1] !== 0xD8) {
return false;
}
// JPG files end with the magic bytes FF D9
if (
obj.length < 2 || obj[obj.length - 2] !== 0xFF ||
obj[obj.length - 1] !== 0xD9
) {
return false;
}
return true;
}
function isPng(obj) {
// Check if obj is a Uint8Array
if (!(obj instanceof Uint8Array)) {
return false;
}
// PNG files start with a specific 8-byte signature
const pngSignature = [137, 80, 78, 71, 13, 10, 26, 10];
// Check if the array is at least as long as the signature
if (obj.length < pngSignature.length) {
return false;
}
// Check each byte of the signature
for (let i = 0; i < pngSignature.length; i++) {
if (obj[i] !== pngSignature[i]) {
return false;
}
}
return true;
}
/** Possible HTML and SVG Elements */ /** Possible HTML and SVG Elements */
function isSVGElementLike(obj) { function isSVGElementLike(obj) {
return obj !== null && typeof obj === "object" && "outerHTML" in obj && return obj !== null && typeof obj === "object" && "outerHTML" in obj &&
@ -233,6 +279,16 @@ async function format(obj) {
if (isDataFrameLike(obj)) { if (isDataFrameLike(obj)) {
return extractDataFrame(obj); return extractDataFrame(obj);
} }
if (isJpg(obj)) {
return {
"image/jpeg": core.ops.op_base64_encode(obj),
};
}
if (isPng(obj)) {
return {
"image/png": core.ops.op_base64_encode(obj),
};
}
if (isSVGElementLike(obj)) { if (isSVGElementLike(obj)) {
return { return {
"image/svg+xml": obj.outerHTML, "image/svg+xml": obj.outerHTML,
@ -314,6 +370,28 @@ const html = createTaggedTemplateDisplayable("text/html");
*/ */
const svg = createTaggedTemplateDisplayable("image/svg+xml"); const svg = createTaggedTemplateDisplayable("image/svg+xml");
function image(obj) {
if (typeof obj === "string") {
try {
obj = Deno.readFileSync(obj);
} catch {
// pass
}
}
if (isJpg(obj)) {
return makeDisplayable({ "image/jpeg": core.ops.op_base64_encode(obj) });
}
if (isPng(obj)) {
return makeDisplayable({ "image/png": core.ops.op_base64_encode(obj) });
}
throw new TypeError(
"Object is not a valid image or a path to an image. `Deno.jupyter.image` supports displaying JPG or PNG images.",
);
}
function isMediaBundle(obj) { function isMediaBundle(obj) {
if (obj == null || typeof obj !== "object" || Array.isArray(obj)) { if (obj == null || typeof obj !== "object" || Array.isArray(obj)) {
return false; return false;
@ -465,6 +543,7 @@ function enableJupyter() {
md, md,
html, html,
svg, svg,
image,
$display, $display,
}; };
} }

View file

@ -113,7 +113,7 @@ function assertExit(fn, isTest) {
throw new Error( throw new Error(
`${ `${
isTest ? "Test case" : "Bench" isTest ? "Test case" : "Bench"
} finished with exit code set to ${exitCode}.`, } finished with exit code set to ${exitCode}`,
); );
} }
if (innerResult) { if (innerResult) {
@ -242,12 +242,12 @@ function testInner(
} }
if (optionsOrFn.fn != undefined) { if (optionsOrFn.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, test function is already provided as the third argument.", "Unexpected 'fn' field in options, test function is already provided as the third argument",
); );
} }
if (optionsOrFn.name != undefined) { if (optionsOrFn.name != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'name' field in options, test name is already provided as the first argument.", "Unexpected 'name' field in options, test name is already provided as the first argument",
); );
} }
testDesc = { testDesc = {
@ -279,7 +279,7 @@ function testInner(
fn = optionsOrFn; fn = optionsOrFn;
if (nameOrFnOrOptions.fn != undefined) { if (nameOrFnOrOptions.fn != undefined) {
throw new TypeError( throw new TypeError(
"Unexpected 'fn' field in options, test function is already provided as the second argument.", "Unexpected 'fn' field in options, test function is already provided as the second argument",
); );
} }
name = nameOrFnOrOptions.name ?? fn.name; name = nameOrFnOrOptions.name ?? fn.name;
@ -288,7 +288,7 @@ function testInner(
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function" !nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
) { ) {
throw new TypeError( throw new TypeError(
"Expected 'fn' field in the first argument to be a test function.", "Expected 'fn' field in the first argument to be a test function",
); );
} }
fn = nameOrFnOrOptions.fn; fn = nameOrFnOrOptions.fn;
@ -426,7 +426,7 @@ function createTestContext(desc) {
let stepDesc; let stepDesc;
if (typeof nameOrFnOrOptions === "string") { if (typeof nameOrFnOrOptions === "string") {
if (typeof maybeFn !== "function") { if (typeof maybeFn !== "function") {
throw new TypeError("Expected function for second argument."); throw new TypeError("Expected function for second argument");
} }
stepDesc = { stepDesc = {
name: nameOrFnOrOptions, name: nameOrFnOrOptions,
@ -434,7 +434,7 @@ function createTestContext(desc) {
}; };
} else if (typeof nameOrFnOrOptions === "function") { } else if (typeof nameOrFnOrOptions === "function") {
if (!nameOrFnOrOptions.name) { if (!nameOrFnOrOptions.name) {
throw new TypeError("The step function must have a name."); throw new TypeError("The step function must have a name");
} }
if (maybeFn != undefined) { if (maybeFn != undefined) {
throw new TypeError( throw new TypeError(
@ -449,7 +449,7 @@ function createTestContext(desc) {
stepDesc = nameOrFnOrOptions; stepDesc = nameOrFnOrOptions;
} else { } else {
throw new TypeError( throw new TypeError(
"Expected a test definition or name and function.", "Expected a test definition or name and function",
); );
} }
stepDesc.ignore ??= false; stepDesc.ignore ??= false;

View file

@ -6,7 +6,6 @@ use dashmap::DashMap;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo; use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use std::sync::Arc; use std::sync::Arc;
@ -68,10 +67,7 @@ impl JsrFetchResolver {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher.fetch_bypass_permissions(&meta_url).await.ok()
.fetch(&meta_url, &PermissionsContainer::allow_all())
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;
@ -96,10 +92,7 @@ impl JsrFetchResolver {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher.fetch_bypass_permissions(&meta_url).await.ok()
.fetch(&meta_url, &PermissionsContainer::allow_all())
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;

View file

@ -2,20 +2,25 @@
use super::diagnostics::DenoDiagnostic; use super::diagnostics::DenoDiagnostic;
use super::diagnostics::DiagnosticSource; use super::diagnostics::DiagnosticSource;
use super::documents::Document;
use super::documents::Documents; use super::documents::Documents;
use super::language_server; use super::language_server;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::tsc; use super::tsc;
use super::urls::url_to_uri;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::lsp::logging::lsp_warn;
use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
use deno_config::workspace::MappedResolution;
use deno_graph::source::ResolutionMode;
use deno_lint::diagnostic::LintDiagnosticRange; use deno_lint::diagnostic::LintDiagnosticRange;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
@ -23,6 +28,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PathClean; use deno_runtime::deno_node::PathClean;
use deno_semver::jsr::JsrPackageNvReference; use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
@ -33,13 +39,15 @@ use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference; use deno_semver::package::PackageReqReference;
use deno_semver::Version; use deno_semver::Version;
use import_map::ImportMap; use import_map::ImportMap;
use node_resolver::NpmResolver; use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use text_lines::LineAndColumnIndex;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::Position; use tower_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Range; use tower_lsp::lsp_types::Range;
@ -224,6 +232,8 @@ pub struct TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: ModuleSpecifier,
} }
impl<'a> TsResponseImportMapper<'a> { impl<'a> TsResponseImportMapper<'a> {
@ -231,11 +241,15 @@ impl<'a> TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: &ModuleSpecifier,
) -> Self { ) -> Self {
Self { Self {
documents, documents,
maybe_import_map, maybe_import_map,
resolver, resolver,
tsc_specifier_map,
file_referrer: file_referrer.clone(),
} }
} }
@ -256,8 +270,6 @@ impl<'a> TsResponseImportMapper<'a> {
} }
} }
let file_referrer = self.documents.get_file_referrer(referrer);
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str()) if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
{ {
let mut segments = jsr_path.split('/'); let mut segments = jsr_path.split('/');
@ -272,7 +284,7 @@ impl<'a> TsResponseImportMapper<'a> {
let export = self.resolver.jsr_lookup_export_for_path( let export = self.resolver.jsr_lookup_export_for_path(
&nv, &nv,
&path, &path,
file_referrer.as_deref(), Some(&self.file_referrer),
)?; )?;
let sub_path = (export != ".").then_some(export); let sub_path = (export != ".").then_some(export);
let mut req = None; let mut req = None;
@ -298,7 +310,7 @@ impl<'a> TsResponseImportMapper<'a> {
req = req.or_else(|| { req = req.or_else(|| {
self self
.resolver .resolver
.jsr_lookup_req_for_nv(&nv, file_referrer.as_deref()) .jsr_lookup_req_for_nv(&nv, Some(&self.file_referrer))
}); });
let spec_str = if let Some(req) = req { let spec_str = if let Some(req) = req {
let req_ref = PackageReqReference { req, sub_path }; let req_ref = PackageReqReference { req, sub_path };
@ -312,15 +324,29 @@ impl<'a> TsResponseImportMapper<'a> {
if let Some(result) = import_map.lookup(&specifier, referrer) { if let Some(result) = import_map.lookup(&specifier, referrer) {
return Some(result); return Some(result);
} }
if let Some(req_ref_str) = specifier.as_str().strip_prefix("jsr:") {
if !req_ref_str.starts_with('/') {
let specifier_str = format!("jsr:/{req_ref_str}");
if let Ok(specifier) = ModuleSpecifier::parse(&specifier_str) {
if let Some(result) = import_map.lookup(&specifier, referrer) {
return Some(result);
}
}
}
}
} }
return Some(spec_str); return Some(spec_str);
} }
if let Some(npm_resolver) = self if let Some(npm_resolver) = self
.resolver .resolver
.maybe_managed_npm_resolver(file_referrer.as_deref()) .maybe_managed_npm_resolver(Some(&self.file_referrer))
{ {
if npm_resolver.in_npm_package(specifier) { let in_npm_pkg = self
.resolver
.in_npm_pkg_checker(Some(&self.file_referrer))
.in_npm_package(specifier);
if in_npm_pkg {
if let Ok(Some(pkg_id)) = if let Ok(Some(pkg_id)) =
npm_resolver.resolve_pkg_id_from_specifier(specifier) npm_resolver.resolve_pkg_id_from_specifier(specifier)
{ {
@ -367,6 +393,11 @@ impl<'a> TsResponseImportMapper<'a> {
} }
} }
} }
} else if let Some(dep_name) = self
.resolver
.file_url_to_package_json_dep(specifier, Some(&self.file_referrer))
{
return Some(dep_name);
} }
// check if the import map has this specifier // check if the import map has this specifier
@ -390,7 +421,7 @@ impl<'a> TsResponseImportMapper<'a> {
.flatten()?; .flatten()?;
let root_folder = package_json.path.parent()?; let root_folder = package_json.path.parent()?;
let specifier_path = specifier_to_file_path(specifier).ok()?; let specifier_path = url_to_file_path(specifier).ok()?;
let mut search_paths = vec![specifier_path.clone()]; let mut search_paths = vec![specifier_path.clone()];
// TypeScript will provide a .js extension for quick fixes, so do // TypeScript will provide a .js extension for quick fixes, so do
// a search for the .d.ts file instead // a search for the .d.ts file instead
@ -436,24 +467,65 @@ impl<'a> TsResponseImportMapper<'a> {
&self, &self,
specifier: &str, specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
) -> Option<String> { ) -> Option<String> {
if let Ok(specifier) = referrer.join(specifier) { let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
if let Some(specifier) = self.check_specifier(&specifier, referrer) { let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
return Some(specifier); SUPPORTED_EXTENSIONS
} .iter()
} .map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
let specifier = specifier.strip_suffix(".js").unwrap_or(specifier); );
for ext in SUPPORTED_EXTENSIONS { for specifier in specifiers {
let specifier_with_ext = format!("{specifier}{ext}"); if let Some(specifier) = self
if self .resolver
.documents .as_cli_resolver(Some(&self.file_referrer))
.contains_import(&specifier_with_ext, referrer) .resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
.filter(|s| self.documents.exists(s, Some(&self.file_referrer)))
{ {
return Some(specifier_with_ext); if let Some(specifier) = self
.check_specifier(&specifier, referrer)
.or_else(|| relative_specifier(referrer, &specifier))
.filter(|s| !s.contains("/node_modules/"))
{
return Some(specifier);
}
} }
} }
None None
} }
pub fn is_valid_import(
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
) -> bool {
self
.resolver
.as_cli_resolver(Some(&self.file_referrer))
.resolve(
specifier_text,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
deno_graph::source::ResolutionMode::Types,
)
.is_ok()
}
} }
fn try_reverse_map_package_json_exports( fn try_reverse_map_package_json_exports(
@ -518,9 +590,11 @@ fn try_reverse_map_package_json_exports(
/// like an import and rewrite the import specifier to include the extension /// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes( pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
changes: &[tsc::FileTextChanges], changes: &[tsc::FileTextChanges],
import_mapper: &TsResponseImportMapper, language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> { ) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
let mut r = Vec::new(); let mut r = Vec::new();
for change in changes { for change in changes {
let mut text_changes = Vec::new(); let mut text_changes = Vec::new();
@ -533,8 +607,8 @@ pub fn fix_ts_import_changes(
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) { if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
let specifier = let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) = if let Some(new_specifier) = import_mapper
import_mapper.check_unresolved_specifier(specifier, referrer) .check_unresolved_specifier(specifier, referrer, referrer_kind)
{ {
line.replace(specifier, &new_specifier) line.replace(specifier, &new_specifier)
} else { } else {
@ -562,66 +636,64 @@ pub fn fix_ts_import_changes(
/// Fix tsc import code actions so that the module specifier is correct for /// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension). /// resolution by Deno (includes the extension).
fn fix_ts_import_action( fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
action: &tsc::CodeFixAction, referrer_kind: NodeModuleKind,
import_mapper: &TsResponseImportMapper, action: &'a tsc::CodeFixAction,
) -> Result<tsc::CodeFixAction, AnyError> { language_server: &language_server::Inner,
if matches!( ) -> Option<Cow<'a, tsc::CodeFixAction>> {
if !matches!(
action.fix_name.as_str(), action.fix_name.as_str(),
"import" | "fixMissingFunctionDeclaration" "import" | "fixMissingFunctionDeclaration"
) { ) {
let change = action return Some(Cow::Borrowed(action));
}
let specifier = (|| {
let text_change = action.changes.first()?.text_changes.first()?;
let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
Some(captures.get(1)?.as_str())
})();
let Some(specifier) = specifier else {
return Some(Cow::Borrowed(action));
};
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind)
{
let description = action.description.replace(specifier, &new_specifier);
let changes = action
.changes .changes
.first() .iter()
.ok_or_else(|| anyhow!("Unexpected action changes."))?; .map(|c| {
let text_change = change let text_changes = c
.text_changes .text_changes
.first()
.ok_or_else(|| anyhow!("Missing text change."))?;
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)
{
let specifier = captures
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer)
{
let description = action.description.replace(specifier, &new_specifier);
let changes = action
.changes
.iter() .iter()
.map(|c| { .map(|tc| tsc::TextChange {
let text_changes = c span: tc.span.clone(),
.text_changes new_text: tc.new_text.replace(specifier, &new_specifier),
.iter()
.map(|tc| tsc::TextChange {
span: tc.span.clone(),
new_text: tc.new_text.replace(specifier, &new_specifier),
})
.collect();
tsc::FileTextChanges {
file_name: c.file_name.clone(),
text_changes,
is_new_file: c.is_new_file,
}
}) })
.collect(); .collect();
tsc::FileTextChanges {
file_name: c.file_name.clone(),
text_changes,
is_new_file: c.is_new_file,
}
})
.collect();
return Ok(tsc::CodeFixAction { Some(Cow::Owned(tsc::CodeFixAction {
description, description,
changes, changes,
commands: None, commands: None,
fix_name: action.fix_name.clone(), fix_name: action.fix_name.clone(),
fix_id: None, fix_id: None,
fix_all_description: None, fix_all_description: None,
}); }))
} } else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) {
} None
} else {
Some(Cow::Borrowed(action))
} }
Ok(action.clone())
} }
/// Determines if two TypeScript diagnostic codes are effectively equivalent. /// Determines if two TypeScript diagnostic codes are effectively equivalent.
@ -682,8 +754,14 @@ pub fn ts_changes_to_edit(
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> { ) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
let mut text_document_edits = Vec::new(); let mut text_document_edits = Vec::new();
for change in changes { for change in changes {
let text_document_edit = change.to_text_document_edit(language_server)?; let edit = match change.to_text_document_edit(language_server) {
text_document_edits.push(text_document_edit); Ok(e) => e,
Err(err) => {
lsp_warn!("Couldn't covert text document edit: {:#}", err);
continue;
}
};
text_document_edits.push(edit);
} }
Ok(Some(lsp::WorkspaceEdit { Ok(Some(lsp::WorkspaceEdit {
changes: None, changes: None,
@ -692,7 +770,7 @@ pub fn ts_changes_to_edit(
})) }))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CodeActionData { pub struct CodeActionData {
pub specifier: ModuleSpecifier, pub specifier: ModuleSpecifier,
@ -740,10 +818,11 @@ impl CodeActionCollection {
.as_ref() .as_ref()
.and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok()) .and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok())
{ {
let uri = url_to_uri(specifier)?;
for quick_fix in data_quick_fixes { for quick_fix in data_quick_fixes {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri.clone(),
quick_fix quick_fix
.changes .changes
.into_iter() .into_iter()
@ -785,6 +864,7 @@ impl CodeActionCollection {
maybe_text_info: Option<&SourceTextInfo>, maybe_text_info: Option<&SourceTextInfo>,
maybe_parsed_source: Option<&deno_ast::ParsedSource>, maybe_parsed_source: Option<&deno_ast::ParsedSource>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let uri = url_to_uri(specifier)?;
let code = diagnostic let code = diagnostic
.code .code
.as_ref() .as_ref()
@ -801,7 +881,7 @@ impl CodeActionCollection {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri.clone(),
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: prepend_whitespace( new_text: prepend_whitespace(
format!("// deno-lint-ignore {code}\n"), format!("// deno-lint-ignore {code}\n"),
@ -882,7 +962,7 @@ impl CodeActionCollection {
} }
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]); changes.insert(uri.clone(), vec![lsp::TextEdit { new_text, range }]);
let ignore_file_action = lsp::CodeAction { let ignore_file_action = lsp::CodeAction {
title: format!("Disable {code} for the entire file"), title: format!("Disable {code} for the entire file"),
kind: Some(lsp::CodeActionKind::QUICKFIX), kind: Some(lsp::CodeActionKind::QUICKFIX),
@ -903,7 +983,7 @@ impl CodeActionCollection {
let mut changes = HashMap::new(); let mut changes = HashMap::new();
changes.insert( changes.insert(
specifier.clone(), uri,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: "// deno-lint-ignore-file\n".to_string(), new_text: "// deno-lint-ignore-file\n".to_string(),
range: lsp::Range { range: lsp::Range {
@ -943,6 +1023,7 @@ impl CodeActionCollection {
pub fn add_ts_fix_action( pub fn add_ts_fix_action(
&mut self, &mut self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
specifier_kind: NodeModuleKind,
action: &tsc::CodeFixAction, action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic, diagnostic: &lsp::Diagnostic,
language_server: &language_server::Inner, language_server: &language_server::Inner,
@ -960,11 +1041,11 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.", "The action returned from TypeScript is unsupported.",
)); ));
} }
let action = fix_ts_import_action( let Some(action) =
specifier, fix_ts_import_action(specifier, specifier_kind, action, language_server)
action, else {
&language_server.get_ts_response_import_mapper(specifier), return Ok(());
)?; };
let edit = ts_changes_to_edit(&action.changes, language_server)?; let edit = ts_changes_to_edit(&action.changes, language_server)?;
let code_action = lsp::CodeAction { let code_action = lsp::CodeAction {
title: action.description.clone(), title: action.description.clone(),
@ -984,7 +1065,7 @@ impl CodeActionCollection {
}); });
self self
.actions .actions
.push(CodeActionKind::Tsc(code_action, action.clone())); .push(CodeActionKind::Tsc(code_action, action.as_ref().clone()));
if let Some(fix_id) = &action.fix_id { if let Some(fix_id) = &action.fix_id {
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =
@ -1011,10 +1092,12 @@ impl CodeActionCollection {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic, diagnostic: &lsp::Diagnostic,
) { ) {
let data = Some(json!({ let data = action.fix_id.as_ref().map(|fix_id| {
"specifier": specifier, json!(CodeActionData {
"fixId": action.fix_id, specifier: specifier.clone(),
})); fix_id: fix_id.clone(),
})
});
let title = if let Some(description) = &action.fix_all_description { let title = if let Some(description) = &action.fix_all_description {
description.clone() description.clone()
} else { } else {
@ -1138,6 +1221,192 @@ impl CodeActionCollection {
..Default::default() ..Default::default()
})); }));
} }
pub async fn add_source_actions(
&mut self,
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) {
fn import_start_from_specifier(
document: &Document,
import: &deno_graph::Import,
) -> Option<LineAndColumnIndex> {
// find the top level statement that contains the specifier
let parsed_source = document.maybe_parsed_source()?.as_ref().ok()?;
let text_info = parsed_source.text_info_lazy();
let specifier_range = SourceRange::new(
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.start.line,
column_index: import.specifier_range.start.character,
}),
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.end.line,
column_index: import.specifier_range.end.character,
}),
);
parsed_source
.program_ref()
.body()
.find(|i| i.range().contains(&specifier_range))
.map(|i| text_info.line_and_column_index(i.range().start))
}
async fn deno_types_for_npm_action(
document: &Document,
range: &lsp::Range,
language_server: &language_server::Inner,
) -> Option<lsp::CodeAction> {
let (dep_key, dependency, _) =
document.get_maybe_dependency(&range.end)?;
if dependency.maybe_deno_types_specifier.is_some() {
return None;
}
if dependency.maybe_code.maybe_specifier().is_none()
&& dependency.maybe_type.maybe_specifier().is_none()
{
// We're using byonm and the package is not cached.
return None;
}
let position = deno_graph::Position::new(
range.end.line as usize,
range.end.character as usize,
);
let import_start = dependency.imports.iter().find_map(|i| {
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
return None;
}
if !i.specifier_range.includes(&position) {
return None;
}
import_start_from_specifier(document, i)
})?;
let referrer = document.specifier();
let referrer_kind = language_server
.is_cjs_resolver
.get_doc_module_kind(document);
let file_referrer = document.file_referrer();
let config_data = language_server
.config
.tree
.data_for_specifier(file_referrer?)?;
let workspace_resolver = config_data.resolver.clone();
let npm_ref = if let Ok(resolution) =
workspace_resolver.resolve(&dep_key, document.specifier())
{
let specifier = match resolution {
MappedResolution::Normal { specifier, .. }
| MappedResolution::ImportMap { specifier, .. } => specifier,
_ => {
return None;
}
};
NpmPackageReqReference::from_specifier(&specifier).ok()?
} else {
// Only resolve bare package.json deps for byonm.
if !config_data.byonm {
return None;
}
if !language_server.resolver.is_bare_package_json_dep(
&dep_key,
referrer,
referrer_kind,
) {
return None;
}
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
};
let package_name = &npm_ref.req().name;
if package_name.starts_with("@types/") {
return None;
}
let managed_npm_resolver = language_server
.resolver
.maybe_managed_npm_resolver(file_referrer);
if let Some(npm_resolver) = managed_npm_resolver {
if !npm_resolver.is_pkg_req_folder_cached(npm_ref.req()) {
return None;
}
}
if language_server
.resolver
.npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
.is_some()
{
// The package import has types.
return None;
}
let types_package_name = format!("@types/{package_name}");
let types_package_version = language_server
.npm_search_api
.versions(&types_package_name)
.await
.ok()
.and_then(|versions| versions.first().cloned())?;
let types_specifier_text =
if let Some(npm_resolver) = managed_npm_resolver {
let mut specifier_text = if let Some(req) =
npm_resolver.top_package_req_for_name(&types_package_name)
{
format!("npm:{req}")
} else {
format!("npm:{}@^{}", &types_package_name, types_package_version)
};
let specifier = ModuleSpecifier::parse(&specifier_text).ok()?;
if let Some(file_referrer) = file_referrer {
if let Some(text) = language_server
.get_ts_response_import_mapper(file_referrer)
.check_specifier(&specifier, referrer)
{
specifier_text = text;
}
}
specifier_text
} else {
types_package_name.clone()
};
let uri = language_server
.url_map
.specifier_to_uri(referrer, file_referrer)
.ok()?;
let position = lsp::Position {
line: import_start.line_index as u32,
character: import_start.column_index as u32,
};
let new_text = format!(
"{}// @deno-types=\"{}\"\n",
if position.character == 0 { "" } else { "\n" },
&types_specifier_text
);
let text_edit = lsp::TextEdit {
range: lsp::Range {
start: position,
end: position,
},
new_text,
};
Some(lsp::CodeAction {
title: format!(
"Add @deno-types directive for \"{}\"",
&types_specifier_text
),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some([(uri, vec![text_edit])].into_iter().collect()),
..Default::default()
}),
..Default::default()
})
}
if let Some(action) =
deno_types_for_npm_action(document, range, language_server).await
{
self.actions.push(CodeActionKind::Deno(action));
}
}
} }
/// Prepend the whitespace characters found at the start of line_content to content. /// Prepend the whitespace characters found at the start of line_content to content.

View file

@ -7,26 +7,16 @@ use crate::cache::LocalLspHttpCache;
use crate::lsp::config::Config; use crate::lsp::config::Config;
use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_log;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
/// In the LSP, we disallow the cache from automatically copying from
/// the global cache to the local cache for technical reasons.
///
/// 1. We need to verify the checksums from the lockfile are correct when
/// moving from the global to the local cache.
/// 2. We need to verify the checksums for JSR https specifiers match what
/// is found in the package's manifest.
pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy =
deno_cache_dir::GlobalToLocalCopy::Disallow;
pub fn calculate_fs_version( pub fn calculate_fs_version(
cache: &LspCache, cache: &LspCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -34,7 +24,7 @@ pub fn calculate_fs_version(
) -> Option<String> { ) -> Option<String> {
match specifier.scheme() { match specifier.scheme() {
"npm" | "node" | "data" | "blob" => None, "npm" | "node" | "data" | "blob" => None,
"file" => specifier_to_file_path(specifier) "file" => url_to_file_path(specifier)
.ok() .ok()
.and_then(|path| calculate_fs_version_at_path(&path)), .and_then(|path| calculate_fs_version_at_path(&path)),
_ => calculate_fs_version_in_cache(cache, specifier, file_referrer), _ => calculate_fs_version_in_cache(cache, specifier, file_referrer),
@ -92,7 +82,7 @@ impl Default for LspCache {
impl LspCache { impl LspCache {
pub fn new(global_cache_url: Option<Url>) -> Self { pub fn new(global_cache_url: Option<Url>) -> Self {
let global_cache_path = global_cache_url.and_then(|s| { let global_cache_path = global_cache_url.and_then(|s| {
specifier_to_file_path(&s) url_to_file_path(&s)
.inspect(|p| { .inspect(|p| {
lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy()); lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy());
}) })
@ -104,7 +94,7 @@ impl LspCache {
let deno_dir = DenoDir::new(global_cache_path) let deno_dir = DenoDir::new(global_cache_path)
.expect("should be infallible with absolute custom root"); .expect("should be infallible with absolute custom root");
let global = Arc::new(GlobalHttpCache::new( let global = Arc::new(GlobalHttpCache::new(
deno_dir.deps_folder_path(), deno_dir.remote_folder_path(),
crate::cache::RealDenoCacheEnv, crate::cache::RealDenoCacheEnv,
)); ));
Self { Self {
@ -175,7 +165,7 @@ impl LspCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> { ) -> Option<ModuleSpecifier> {
let path = specifier_to_file_path(specifier).ok()?; let path = url_to_file_path(specifier).ok()?;
let vendor = self let vendor = self
.vendors_by_scope .vendors_by_scope
.iter() .iter()
@ -186,7 +176,7 @@ impl LspCache {
} }
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool { pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
if let Ok(path) = specifier_to_file_path(specifier) { if let Ok(path) = url_to_file_path(specifier) {
if !path.starts_with(&self.deno_dir().root) { if !path.starts_with(&self.deno_dir().root) {
return true; return true;
} }

View file

@ -147,12 +147,14 @@ pub fn server_capabilities(
moniker_provider: None, moniker_provider: None,
experimental: Some(json!({ experimental: Some(json!({
"denoConfigTasks": true, "denoConfigTasks": true,
"testingApi":true, "testingApi": true,
"didRefreshDenoConfigurationTreeNotifications": true,
})), })),
inlay_hint_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)),
position_encoding: None, position_encoding: None,
// TODO(nayeemrmn): Support pull-based diagnostics.
diagnostic_provider: None, diagnostic_provider: None,
inline_value_provider: None, inline_value_provider: None,
inline_completion_provider: None,
notebook_document_sync: None,
} }
} }

View file

@ -8,6 +8,7 @@ use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use lsp_types::Uri;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::ConfigurationItem; use tower_lsp::lsp_types::ConfigurationItem;
@ -17,7 +18,6 @@ use super::config::WorkspaceSettings;
use super::config::SETTINGS_SECTION; use super::config::SETTINGS_SECTION;
use super::lsp_custom; use super::lsp_custom;
use super::testing::lsp_custom as testing_lsp_custom; use super::testing::lsp_custom as testing_lsp_custom;
use super::urls::LspClientUrl;
#[derive(Debug)] #[derive(Debug)]
pub enum TestingNotification { pub enum TestingNotification {
@ -52,14 +52,11 @@ impl Client {
pub async fn publish_diagnostics( pub async fn publish_diagnostics(
&self, &self,
uri: LspClientUrl, uri: Uri,
diags: Vec<lsp::Diagnostic>, diags: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
) { ) {
self self.0.publish_diagnostics(uri, diags, version).await;
.0
.publish_diagnostics(uri.into_url(), diags, version)
.await;
} }
pub fn send_registry_state_notification( pub fn send_registry_state_notification(
@ -95,6 +92,19 @@ impl Client {
}); });
} }
pub fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client
.send_did_refresh_deno_configuration_tree_notification(params)
.await;
});
}
pub fn send_did_change_deno_configuration_notification( pub fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -149,7 +159,7 @@ impl OutsideLockClient {
pub async fn workspace_configuration( pub async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
self.0.workspace_configuration(scopes).await self.0.workspace_configuration(scopes).await
} }
@ -159,7 +169,7 @@ impl OutsideLockClient {
trait ClientTrait: Send + Sync { trait ClientTrait: Send + Sync {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
uri: lsp::Url, uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>, diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
); );
@ -172,6 +182,10 @@ trait ClientTrait: Send + Sync {
params: lsp_custom::DiagnosticBatchNotificationParams, params: lsp_custom::DiagnosticBatchNotificationParams,
); );
async fn send_test_notification(&self, params: TestingNotification); async fn send_test_notification(&self, params: TestingNotification);
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
);
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -182,7 +196,7 @@ trait ClientTrait: Send + Sync {
); );
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError>; ) -> Result<Vec<WorkspaceSettings>, AnyError>;
async fn show_message(&self, message_type: lsp::MessageType, text: String); async fn show_message(&self, message_type: lsp::MessageType, text: String);
async fn register_capability( async fn register_capability(
@ -198,7 +212,7 @@ struct TowerClient(tower_lsp::Client);
impl ClientTrait for TowerClient { impl ClientTrait for TowerClient {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
uri: lsp::Url, uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>, diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>, version: Option<i32>,
) { ) {
@ -252,6 +266,18 @@ impl ClientTrait for TowerClient {
} }
} }
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::DidRefreshDenoConfigurationTreeNotification>(
params,
)
.await
}
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams, params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -276,7 +302,7 @@ impl ClientTrait for TowerClient {
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
let config_response = self let config_response = self
.0 .0
@ -349,7 +375,7 @@ struct ReplClient;
impl ClientTrait for ReplClient { impl ClientTrait for ReplClient {
async fn publish_diagnostics( async fn publish_diagnostics(
&self, &self,
_uri: lsp::Url, _uri: lsp::Uri,
_diagnostics: Vec<lsp::Diagnostic>, _diagnostics: Vec<lsp::Diagnostic>,
_version: Option<i32>, _version: Option<i32>,
) { ) {
@ -369,6 +395,12 @@ impl ClientTrait for ReplClient {
async fn send_test_notification(&self, _params: TestingNotification) {} async fn send_test_notification(&self, _params: TestingNotification) {}
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
_params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
}
async fn send_did_change_deno_configuration_notification( async fn send_did_change_deno_configuration_notification(
&self, &self,
_params: lsp_custom::DidChangeDenoConfigurationNotificationParams, _params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
@ -383,7 +415,7 @@ impl ClientTrait for ReplClient {
async fn workspace_configuration( async fn workspace_configuration(
&self, &self,
scopes: Vec<Option<lsp::Url>>, scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> { ) -> Result<Vec<WorkspaceSettings>, AnyError> {
Ok(vec![get_repl_workspace_settings(); scopes.len()]) Ok(vec![get_repl_workspace_settings(); scopes.len()])
} }

View file

@ -107,7 +107,7 @@ impl DenoTestCollector {
for prop in &obj_lit.props { for prop in &obj_lit.props {
if let ast::PropOrSpread::Prop(prop) = prop { if let ast::PropOrSpread::Prop(prop) = prop {
if let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() { if let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() {
if let ast::PropName::Ident(ast::Ident { sym, .. }) = if let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key &key_value_prop.key
{ {
if sym == "name" { if sym == "name" {
@ -421,7 +421,7 @@ pub fn collect_test(
) -> Result<Vec<lsp::CodeLens>, AnyError> { ) -> Result<Vec<lsp::CodeLens>, AnyError> {
let mut collector = let mut collector =
DenoTestCollector::new(specifier.clone(), parsed_source.clone()); DenoTestCollector::new(specifier.clone(), parsed_source.clone());
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Ok(collector.take()) Ok(collector.take())
} }
@ -581,7 +581,7 @@ mod tests {
.unwrap(); .unwrap();
let mut collector = let mut collector =
DenoTestCollector::new(specifier, parsed_module.clone()); DenoTestCollector::new(specifier, parsed_module.clone());
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
assert_eq!( assert_eq!(
collector.take(), collector.take(),
vec![ vec![

View file

@ -9,6 +9,7 @@ use super::jsr::CliJsrSearchApi;
use super::lsp_custom; use super::lsp_custom;
use super::npm::CliNpmSearchApi; use super::npm::CliNpmSearchApi;
use super::registries::ModuleRegistry; use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::search::PackageSearchApi; use super::search::PackageSearchApi;
use super::tsc; use super::tsc;
@ -18,7 +19,7 @@ use crate::util::path::is_importable_ext;
use crate::util::path::relative_specifier; use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
use deno_graph::Range; use deno_graph::Range;
use deno_runtime::fs_util::specifier_to_file_path; use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_ast::LineAndColumnIndex; use deno_ast::LineAndColumnIndex;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
@ -29,11 +30,13 @@ use deno_core::serde::Serialize;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::url::Position; use deno_core::url::Position;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use import_map::ImportMap; use import_map::ImportMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp_types::CompletionList; use lsp_types::CompletionList;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
@ -158,15 +161,17 @@ pub async fn get_import_completions(
jsr_search_api: &CliJsrSearchApi, jsr_search_api: &CliJsrSearchApi,
npm_search_api: &CliNpmSearchApi, npm_search_api: &CliNpmSearchApi,
documents: &Documents, documents: &Documents,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
maybe_import_map: Option<&ImportMap>, maybe_import_map: Option<&ImportMap>,
) -> Option<lsp::CompletionResponse> { ) -> Option<lsp::CompletionResponse> {
let document = documents.get(specifier)?; let document = documents.get(specifier)?;
let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document);
let file_referrer = document.file_referrer(); let file_referrer = document.file_referrer();
let (text, _, range) = document.get_maybe_dependency(position)?; let (text, _, range) = document.get_maybe_dependency(position)?;
let range = to_narrow_lsp_range(document.text_info(), &range); let range = to_narrow_lsp_range(document.text_info(), &range);
let resolved = resolver let resolved = resolver
.as_graph_resolver(file_referrer) .as_cli_resolver(file_referrer)
.resolve( .resolve(
&text, &text,
&Range { &Range {
@ -174,6 +179,7 @@ pub async fn get_import_completions(
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(),
}, },
specifier_kind,
ResolutionMode::Execution, ResolutionMode::Execution,
) )
.ok(); .ok();
@ -192,20 +198,18 @@ pub async fn get_import_completions(
get_npm_completions(specifier, &text, &range, npm_search_api).await get_npm_completions(specifier, &text, &range, npm_search_api).await
{ {
Some(lsp::CompletionResponse::List(completion_list)) Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) = get_node_completions(&text, &range) {
Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) = } else if let Some(completion_list) =
get_import_map_completions(specifier, &text, &range, maybe_import_map) get_import_map_completions(specifier, &text, &range, maybe_import_map)
{ {
// completions for import map specifiers // completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list)) Some(lsp::CompletionResponse::List(completion_list))
} else if text.starts_with("./") } else if let Some(completion_list) =
|| text.starts_with("../") get_local_completions(specifier, specifier_kind, &text, &range, resolver)
|| text.starts_with('/')
{ {
// completions for local relative modules // completions for local relative modules
Some(lsp::CompletionResponse::List(CompletionList { Some(lsp::CompletionResponse::List(completion_list))
is_incomplete: false,
items: get_local_completions(specifier, &text, &range, resolver)?,
}))
} else if !text.is_empty() { } else if !text.is_empty() {
// completion of modules from a module registry or cache // completion of modules from a module registry or cache
check_auto_config_registry( check_auto_config_registry(
@ -215,16 +219,13 @@ pub async fn get_import_completions(
module_registries, module_registries,
) )
.await; .await;
let offset = if position.character > range.start.character {
(position.character - range.start.character) as usize
} else {
0
};
let maybe_list = module_registries let maybe_list = module_registries
.get_completions(&text, offset, &range, |s| { .get_completions(&text, &range, resolved.as_ref(), |s| {
documents.exists(s, file_referrer) documents.exists(s, file_referrer)
}) })
.await; .await;
let maybe_list = maybe_list
.or_else(|| module_registries.get_origin_completions(&text, &range));
let list = maybe_list.unwrap_or_else(|| CompletionList { let list = maybe_list.unwrap_or_else(|| CompletionList {
items: get_workspace_completions(specifier, &text, &range, documents), items: get_workspace_completions(specifier, &text, &range, documents),
is_incomplete: false, is_incomplete: false,
@ -249,7 +250,7 @@ pub async fn get_import_completions(
.collect(); .collect();
let mut is_incomplete = false; let mut is_incomplete = false;
if let Some(import_map) = maybe_import_map { if let Some(import_map) = maybe_import_map {
items.extend(get_base_import_map_completions(import_map)); items.extend(get_base_import_map_completions(import_map, specifier));
} }
if let Some(origin_items) = if let Some(origin_items) =
module_registries.get_origin_completions(&text, &range) module_registries.get_origin_completions(&text, &range)
@ -268,20 +269,20 @@ pub async fn get_import_completions(
/// map as completion items. /// map as completion items.
fn get_base_import_map_completions( fn get_base_import_map_completions(
import_map: &ImportMap, import_map: &ImportMap,
referrer: &ModuleSpecifier,
) -> Vec<lsp::CompletionItem> { ) -> Vec<lsp::CompletionItem> {
import_map import_map
.imports() .entries_for_referrer(referrer)
.keys() .map(|entry| {
.map(|key| {
// for some strange reason, keys that start with `/` get stored in the // for some strange reason, keys that start with `/` get stored in the
// import map as `file:///`, and so when we pull the keys out, we need to // import map as `file:///`, and so when we pull the keys out, we need to
// change the behavior // change the behavior
let mut label = if key.starts_with("file://") { let mut label = if entry.key.starts_with("file://") {
FILE_PROTO_RE.replace(key, "").to_string() FILE_PROTO_RE.replace(entry.key, "").to_string()
} else { } else {
key.to_string() entry.key.to_string()
}; };
let kind = if key.ends_with('/') { let kind = if entry.key.ends_with('/') {
label.pop(); label.pop();
Some(lsp::CompletionItemKind::FOLDER) Some(lsp::CompletionItemKind::FOLDER)
} else { } else {
@ -359,84 +360,86 @@ fn get_import_map_completions(
/// Return local completions that are relative to the base specifier. /// Return local completions that are relative to the base specifier.
fn get_local_completions( fn get_local_completions(
base: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
text: &str, text: &str,
range: &lsp::Range, range: &lsp::Range,
resolver: &LspResolver, resolver: &LspResolver,
) -> Option<Vec<lsp::CompletionItem>> { ) -> Option<CompletionList> {
if base.scheme() != "file" { if referrer.scheme() != "file" {
return None; return None;
} }
let parent = base.join(text).ok()?.join(".").ok()?; let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
let resolved_parent = resolver let resolved_parent = resolver
.as_graph_resolver(Some(base)) .as_cli_resolver(Some(referrer))
.resolve( .resolve(
parent.as_str(), parent,
&Range { &Range {
specifier: base.clone(), specifier: referrer.clone(),
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(),
}, },
referrer_kind,
ResolutionMode::Execution, ResolutionMode::Execution,
) )
.ok()?; .ok()?;
let resolved_parent_path = specifier_to_file_path(&resolved_parent).ok()?; let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
let raw_parent =
&text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
if resolved_parent_path.is_dir() { if resolved_parent_path.is_dir() {
let cwd = std::env::current_dir().ok()?; let cwd = std::env::current_dir().ok()?;
let items = std::fs::read_dir(resolved_parent_path).ok()?; let entries = std::fs::read_dir(resolved_parent_path).ok()?;
Some( let items = entries
items .filter_map(|de| {
.filter_map(|de| { let de = de.ok()?;
let de = de.ok()?; let label = de.path().file_name()?.to_string_lossy().to_string();
let label = de.path().file_name()?.to_string_lossy().to_string(); let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?;
let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?; if entry_specifier == *referrer {
if entry_specifier == *base { return None;
return None; }
} let full_text = format!("{parent}{label}");
let full_text = format!("{raw_parent}{label}"); let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { range: *range,
range: *range, new_text: full_text.clone(),
new_text: full_text.clone(), }));
})); let filter_text = Some(full_text);
let filter_text = Some(full_text); match de.file_type() {
match de.file_type() { Ok(file_type) if file_type.is_dir() => Some(lsp::CompletionItem {
Ok(file_type) if file_type.is_dir() => Some(lsp::CompletionItem { label,
label, kind: Some(lsp::CompletionItemKind::FOLDER),
kind: Some(lsp::CompletionItemKind::FOLDER), detail: Some("(local)".to_string()),
detail: Some("(local)".to_string()), filter_text,
filter_text, sort_text: Some("1".to_string()),
sort_text: Some("1".to_string()), text_edit,
text_edit, commit_characters: Some(
commit_characters: Some( IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(), ),
), ..Default::default()
..Default::default() }),
}), Ok(file_type) if file_type.is_file() => {
Ok(file_type) if file_type.is_file() => { if is_importable_ext(&de.path()) {
if is_importable_ext(&de.path()) { Some(lsp::CompletionItem {
Some(lsp::CompletionItem { label,
label, kind: Some(lsp::CompletionItemKind::FILE),
kind: Some(lsp::CompletionItemKind::FILE), detail: Some("(local)".to_string()),
detail: Some("(local)".to_string()), filter_text,
filter_text, sort_text: Some("1".to_string()),
sort_text: Some("1".to_string()), text_edit,
text_edit, commit_characters: Some(
commit_characters: Some( IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(), ),
), ..Default::default()
..Default::default() })
}) } else {
} else { None
None
}
} }
_ => None,
} }
}) _ => None,
.collect(), }
) })
.collect();
Some(CompletionList {
is_incomplete: false,
items,
})
} else { } else {
None None
} }
@ -735,6 +738,40 @@ async fn get_npm_completions(
}) })
} }
/// Get completions for `node:` specifiers.
fn get_node_completions(
specifier: &str,
range: &lsp::Range,
) -> Option<CompletionList> {
if !specifier.starts_with("node:") {
return None;
}
let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|name| {
let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
.collect();
Some(CompletionList {
is_incomplete: false,
items,
})
}
/// Get workspace completions that include modules in the Deno cache which match /// Get workspace completions that include modules in the Deno cache which match
/// the current specifier string. /// the current specifier string.
fn get_workspace_completions( fn get_workspace_completions(
@ -804,7 +841,7 @@ mod tests {
fs_sources: &[(&str, &str)], fs_sources: &[(&str, &str)],
) -> Documents { ) -> Documents {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let mut documents = Documents::default(); let mut documents = Documents::default();
documents.update_config( documents.update_config(
&Default::default(), &Default::default(),
@ -824,8 +861,8 @@ mod tests {
.global() .global()
.set(&specifier, HashMap::default(), source.as_bytes()) .set(&specifier, HashMap::default(), source.as_bytes())
.expect("could not cache file"); .expect("could not cache file");
let document = let document = documents
documents.get_or_load(&specifier, &temp_dir.uri().join("$").unwrap()); .get_or_load(&specifier, Some(&temp_dir.url().join("$").unwrap()));
assert!(document.is_some(), "source could not be setup"); assert!(document.is_some(), "source could not be setup");
} }
documents documents
@ -875,6 +912,7 @@ mod tests {
ModuleSpecifier::from_file_path(file_c).expect("could not create"); ModuleSpecifier::from_file_path(file_c).expect("could not create");
let actual = get_local_completions( let actual = get_local_completions(
&specifier, &specifier,
NodeModuleKind::Esm,
"./", "./",
&lsp::Range { &lsp::Range {
start: lsp::Position { start: lsp::Position {
@ -887,11 +925,11 @@ mod tests {
}, },
}, },
&Default::default(), &Default::default(),
); )
assert!(actual.is_some()); .unwrap();
let actual = actual.unwrap(); assert!(!actual.is_incomplete);
assert_eq!(actual.len(), 3); assert_eq!(actual.items.len(), 3);
for item in actual { for item in actual.items {
match item.text_edit { match item.text_edit {
Some(lsp::CompletionTextEdit::Edit(text_edit)) => { Some(lsp::CompletionTextEdit::Edit(text_edit)) => {
assert!(["./b", "./f.mjs", "./g.json"] assert!(["./b", "./f.mjs", "./g.json"]

View file

@ -4,7 +4,9 @@ use deno_ast::MediaType;
use deno_config::deno_json::DenoJsonCache; use deno_config::deno_json::DenoJsonCache;
use deno_config::deno_json::FmtConfig; use deno_config::deno_json::FmtConfig;
use deno_config::deno_json::FmtOptionsConfig; use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::deno_json::LintConfig; use deno_config::deno_json::LintConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::TestConfig; use deno_config::deno_json::TestConfig;
use deno_config::deno_json::TsConfig; use deno_config::deno_json::TsConfig;
use deno_config::fs::DenoConfigFs; use deno_config::fs::DenoConfigFs;
@ -30,35 +32,40 @@ use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_lint::linter::LintConfig as DenoLintConfig; use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache; use deno_package_json::PackageJsonCache;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::fs_util::specifier_to_file_path;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp::Url;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref;
use std::ops::DerefMut;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
use super::logging::lsp_log; use super::logging::lsp_log;
use super::lsp_custom;
use super::urls::url_to_uri;
use crate::args::discover_npmrc_from_workspace; use crate::args::discover_npmrc_from_workspace;
use crate::args::has_flag_env_var; use crate::args::has_flag_env_var;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::CliLockfileReadFromPathOptions;
use crate::args::ConfigFile; use crate::args::ConfigFile;
use crate::args::LintFlags; use crate::args::LintFlags;
use crate::args::LintOptions; use crate::args::LintOptions;
use crate::args::DENO_FUTURE;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::resolver::SloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions; use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
@ -70,6 +77,54 @@ fn is_true() -> bool {
true true
} }
/// Wrapper that defaults if it fails to deserialize. Good for individual
/// settings.
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct SafeValue<T> {
inner: T,
}
impl<'de, T: Default + for<'de2> Deserialize<'de2>> Deserialize<'de>
for SafeValue<T>
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self {
inner: Deserialize::deserialize(deserializer).unwrap_or_default(),
})
}
}
impl<T: Serialize> Serialize for SafeValue<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.inner.serialize(serializer)
}
}
impl<T> Deref for SafeValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T> DerefMut for SafeValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.inner
}
}
impl<T> SafeValue<T> {
pub fn as_deref(&self) -> &T {
&self.inner
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CodeLensSettings { pub struct CodeLensSettings {
@ -386,6 +441,8 @@ pub struct LanguagePreferences {
pub use_aliases_for_renames: bool, pub use_aliases_for_renames: bool,
#[serde(default)] #[serde(default)]
pub quote_style: QuoteStyle, pub quote_style: QuoteStyle,
#[serde(default)]
pub prefer_type_only_auto_imports: bool,
} }
impl Default for LanguagePreferences { impl Default for LanguagePreferences {
@ -396,6 +453,7 @@ impl Default for LanguagePreferences {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: Default::default(), quote_style: Default::default(),
prefer_type_only_auto_imports: false,
} }
} }
} }
@ -538,7 +596,7 @@ pub struct WorkspaceSettings {
pub unsafely_ignore_certificate_errors: Option<Vec<String>>, pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
#[serde(default)] #[serde(default)]
pub unstable: bool, pub unstable: SafeValue<Vec<String>>,
#[serde(default)] #[serde(default)]
pub javascript: LanguageWorkspaceSettings, pub javascript: LanguageWorkspaceSettings,
@ -568,7 +626,7 @@ impl Default for WorkspaceSettings {
testing: Default::default(), testing: Default::default(),
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
javascript: Default::default(), javascript: Default::default(),
typescript: Default::default(), typescript: Default::default(),
} }
@ -752,7 +810,7 @@ impl Settings {
/// Returns `None` if the value should be deferred to the presence of a /// Returns `None` if the value should be deferred to the presence of a
/// `deno.json` file. /// `deno.json` file.
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> { pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
// Non-file URLs are not disabled by these settings. // Non-file URLs are not disabled by these settings.
return Some(true); return Some(true);
}; };
@ -761,7 +819,7 @@ impl Settings {
let mut disable_paths = vec![]; let mut disable_paths = vec![];
let mut enable_paths = None; let mut enable_paths = None;
if let Some(folder_uri) = folder_uri { if let Some(folder_uri) = folder_uri {
if let Ok(folder_path) = specifier_to_file_path(folder_uri) { if let Ok(folder_path) = url_to_file_path(folder_uri) {
disable_paths = settings disable_paths = settings
.disable_paths .disable_paths
.iter() .iter()
@ -798,12 +856,12 @@ impl Settings {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) { ) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
return (&self.unscoped, self.first_folder.as_ref()); return (&self.unscoped, self.first_folder.as_ref());
}; };
for (folder_uri, settings) in self.by_workspace_folder.iter().rev() { for (folder_uri, settings) in self.by_workspace_folder.iter().rev() {
if let Some(settings) = settings { if let Some(settings) = settings {
let Ok(folder_path) = specifier_to_file_path(folder_uri) else { let Ok(folder_path) = url_to_file_path(folder_uri) else {
continue; continue;
}; };
if path.starts_with(folder_path) { if path.starts_with(folder_path) {
@ -844,14 +902,17 @@ pub struct Config {
impl Config { impl Config {
#[cfg(test)] #[cfg(test)]
pub fn new_with_roots(root_uris: impl IntoIterator<Item = Url>) -> Self { pub fn new_with_roots(root_urls: impl IntoIterator<Item = Url>) -> Self {
use super::urls::url_to_uri;
let mut config = Self::default(); let mut config = Self::default();
let mut folders = vec![]; let mut folders = vec![];
for root_uri in root_uris { for root_url in root_urls {
let name = root_uri.path_segments().and_then(|s| s.last()); let root_uri = url_to_uri(&root_url).unwrap();
let name = root_url.path_segments().and_then(|s| s.last());
let name = name.unwrap_or_default().to_string(); let name = name.unwrap_or_default().to_string();
folders.push(( folders.push((
root_uri.clone(), root_url,
lsp::WorkspaceFolder { lsp::WorkspaceFolder {
uri: root_uri, uri: root_uri,
name, name,
@ -925,7 +986,7 @@ impl Config {
| MediaType::Tsx => Some(&workspace_settings.typescript), | MediaType::Tsx => Some(&workspace_settings.typescript),
MediaType::Json MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => None, | MediaType::Unknown => None,
} }
@ -1077,11 +1138,11 @@ impl Default for LspTsConfig {
"module": "esnext", "module": "esnext",
"moduleDetection": "force", "moduleDetection": "force",
"noEmit": true, "noEmit": true,
"noImplicitOverride": true,
"resolveJsonModule": true, "resolveJsonModule": true,
"strict": true, "strict": true,
"target": "esnext", "target": "esnext",
"useDefineForClassFields": true, "useDefineForClassFields": true,
"useUnknownInCatchVariables": false,
"jsx": "react", "jsx": "react",
"jsxFactory": "React.createElement", "jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment", "jsxFragmentFactory": "React.Fragment",
@ -1115,6 +1176,7 @@ pub enum ConfigWatchedFileType {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ConfigData { pub struct ConfigData {
pub scope: Arc<ModuleSpecifier>, pub scope: Arc<ModuleSpecifier>,
pub canonicalized_scope: Option<Arc<ModuleSpecifier>>,
pub member_dir: Arc<WorkspaceDirectory>, pub member_dir: Arc<WorkspaceDirectory>,
pub fmt_config: Arc<FmtConfig>, pub fmt_config: Arc<FmtConfig>,
pub lint_config: Arc<LintConfig>, pub lint_config: Arc<LintConfig>,
@ -1128,8 +1190,9 @@ pub struct ConfigData {
pub lockfile: Option<Arc<CliLockfile>>, pub lockfile: Option<Arc<CliLockfile>>,
pub npmrc: Option<Arc<ResolvedNpmRc>>, pub npmrc: Option<Arc<ResolvedNpmRc>>,
pub resolver: Arc<WorkspaceResolver>, pub resolver: Arc<WorkspaceResolver>,
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>, pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub import_map_from_settings: Option<ModuleSpecifier>, pub import_map_from_settings: Option<ModuleSpecifier>,
pub unstable: BTreeSet<String>,
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>, watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
} }
@ -1253,6 +1316,16 @@ impl ConfigData {
watched_files.entry(specifier).or_insert(file_type); watched_files.entry(specifier).or_insert(file_type);
}; };
let canonicalized_scope = (|| {
let path = scope.to_file_path().ok()?;
let path = canonicalize_path_maybe_not_exists(&path).ok()?;
let specifier = ModuleSpecifier::from_directory_path(path).ok()?;
if specifier == *scope {
return None;
}
Some(Arc::new(specifier))
})();
if let Some(deno_json) = member_dir.maybe_deno_json() { if let Some(deno_json) = member_dir.maybe_deno_json() {
lsp_log!( lsp_log!(
" Resolved Deno configuration file: \"{}\"", " Resolved Deno configuration file: \"{}\"",
@ -1373,11 +1446,12 @@ impl ConfigData {
} }
} }
let byonm = std::env::var("DENO_UNSTABLE_BYONM").is_ok() let node_modules_dir =
|| member_dir.workspace.has_unstable("byonm") member_dir.workspace.node_modules_dir().unwrap_or_default();
|| (*DENO_FUTURE let byonm = match node_modules_dir {
&& member_dir.workspace.package_jsons().next().is_some() Some(mode) => mode == NodeModulesDirMode::Manual,
&& member_dir.workspace.node_modules_dir().is_none()); None => member_dir.workspace.root_pkg_json().is_some(),
};
if byonm { if byonm {
lsp_log!(" Enabled 'bring your own node_modules'."); lsp_log!(" Enabled 'bring your own node_modules'.");
} }
@ -1387,9 +1461,10 @@ impl ConfigData {
// Mark the import map as a watched file // Mark the import map as a watched file
if let Some(import_map_specifier) = member_dir if let Some(import_map_specifier) = member_dir
.workspace .workspace
.to_import_map_specifier() .to_import_map_path()
.ok() .ok()
.flatten() .flatten()
.and_then(|path| Url::from_file_path(path).ok())
{ {
add_watched_file( add_watched_file(
import_map_specifier.clone(), import_map_specifier.clone(),
@ -1445,17 +1520,16 @@ impl ConfigData {
ConfigWatchedFileType::ImportMap, ConfigWatchedFileType::ImportMap,
); );
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let fetch_result = deno_core::unsync::spawn({ let fetch_result =
let file_fetcher = file_fetcher.cloned().unwrap(); deno_core::unsync::spawn({
let import_map_url = import_map_url.clone(); let file_fetcher = file_fetcher.cloned().unwrap();
async move { let import_map_url = import_map_url.clone();
file_fetcher async move {
.fetch(&import_map_url, &PermissionsContainer::allow_all()) file_fetcher.fetch_bypass_permissions(&import_map_url).await
.await }
} })
}) .await
.await .unwrap();
.unwrap();
let value_result = fetch_result.and_then(|f| { let value_result = fetch_result.and_then(|f| {
serde_json::from_slice::<Value>(&f.source).map_err(|e| e.into()) serde_json::from_slice::<Value>(&f.source).map_err(|e| e.into())
@ -1479,49 +1553,32 @@ impl ConfigData {
None None
} }
}; };
let resolver = deno_core::unsync::spawn({ let resolver = member_dir
let workspace = member_dir.clone(); .workspace
let file_fetcher = file_fetcher.cloned(); .create_resolver(
async move { CreateResolverOptions {
workspace pkg_json_dep_resolution,
.create_resolver( specified_import_map,
CreateResolverOptions { },
pkg_json_dep_resolution, |path| Ok(std::fs::read_to_string(path)?),
specified_import_map,
},
move |specifier| {
let specifier = specifier.clone();
let file_fetcher = file_fetcher.clone().unwrap();
async move {
let file = file_fetcher
.fetch(&specifier, &PermissionsContainer::allow_all())
.await?
.into_text_decoded()?;
Ok(file.source.to_string())
}
},
)
.await
.inspect_err(|err| {
lsp_warn!(
" Failed to load resolver: {}",
err // will contain the specifier
);
})
.ok()
}
})
.await
.unwrap()
.unwrap_or_else(|| {
// create a dummy resolver
WorkspaceResolver::new_raw(
scope.clone(),
None,
member_dir.workspace.package_jsons().cloned().collect(),
pkg_json_dep_resolution,
) )
}); .inspect_err(|err| {
lsp_warn!(
" Failed to load resolver: {}",
err // will contain the specifier
);
})
.ok()
.unwrap_or_else(|| {
// create a dummy resolver
WorkspaceResolver::new_raw(
scope.clone(),
None,
member_dir.workspace.resolver_jsr_pkgs().collect(),
member_dir.workspace.package_jsons().cloned().collect(),
pkg_json_dep_resolution,
)
});
if !resolver.diagnostics().is_empty() { if !resolver.diagnostics().is_empty() {
lsp_warn!( lsp_warn!(
" Import map diagnostics:\n{}", " Import map diagnostics:\n{}",
@ -1533,13 +1590,22 @@ impl ConfigData {
.join("\n") .join("\n")
); );
} }
let unstable = member_dir
.workspace
.unstable_features()
.iter()
.chain(settings.unstable.as_deref())
.cloned()
.collect::<BTreeSet<_>>();
let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS")
.is_ok() .is_ok()
|| member_dir.workspace.has_unstable("sloppy-imports"); || unstable.contains("sloppy-imports");
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
Arc::new(SloppyImportsResolver::new_without_stat_cache(Arc::new( Arc::new(CliSloppyImportsResolver::new(
deno_runtime::deno_fs::RealFs, SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
))) deno_runtime::deno_fs::RealFs,
)),
))
}); });
let resolver = Arc::new(resolver); let resolver = Arc::new(resolver);
let lint_rule_provider = LintRuleProvider::new( let lint_rule_provider = LintRuleProvider::new(
@ -1558,6 +1624,7 @@ impl ConfigData {
ConfigData { ConfigData {
scope, scope,
canonicalized_scope,
member_dir, member_dir,
resolver, resolver,
sloppy_imports_resolver, sloppy_imports_resolver,
@ -1573,6 +1640,7 @@ impl ConfigData {
lockfile, lockfile,
npmrc, npmrc,
import_map_from_settings, import_map_from_settings,
unstable,
watched_files, watched_files,
} }
} }
@ -1586,6 +1654,26 @@ impl ConfigData {
pub fn maybe_pkg_json(&self) -> Option<&Arc<deno_package_json::PackageJson>> { pub fn maybe_pkg_json(&self) -> Option<&Arc<deno_package_json::PackageJson>> {
self.member_dir.maybe_pkg_json() self.member_dir.maybe_pkg_json()
} }
pub fn maybe_jsx_import_source_config(
&self,
) -> Option<JsxImportSourceConfig> {
self
.member_dir
.workspace
.to_maybe_jsx_import_source_config()
.ok()
.flatten()
}
pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
specifier.as_str().starts_with(self.scope.as_str())
|| self
.canonicalized_scope
.as_ref()
.map(|s| specifier.as_str().starts_with(s.as_str()))
.unwrap_or(false)
}
} }
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
@ -1600,8 +1688,9 @@ impl ConfigTree {
) -> Option<&ModuleSpecifier> { ) -> Option<&ModuleSpecifier> {
self self
.scopes .scopes
.keys() .iter()
.rfind(|s| specifier.as_str().starts_with(s.as_str())) .rfind(|(_, d)| d.scope_contains_specifier(specifier))
.map(|(s, _)| s)
} }
pub fn data_for_specifier( pub fn data_for_specifier(
@ -1654,23 +1743,28 @@ impl ConfigTree {
.unwrap_or_else(|| Arc::new(FmtConfig::new_with_base(PathBuf::from("/")))) .unwrap_or_else(|| Arc::new(FmtConfig::new_with_base(PathBuf::from("/"))))
} }
/// Returns (scope_uri, type). /// Returns (scope_url, type).
pub fn watched_file_type( pub fn watched_file_type(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> { ) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> {
for (scope_uri, data) in self.scopes.iter() { for (scope_url, data) in self.scopes.iter() {
if let Some(typ) = data.watched_files.get(specifier) { if let Some(typ) = data.watched_files.get(specifier) {
return Some((scope_uri, *typ)); return Some((scope_url, *typ));
} }
} }
None None
} }
pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool { pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.path().ends_with("/deno.json") let path = specifier.path();
|| specifier.path().ends_with("/deno.jsonc") if path.ends_with("/deno.json")
|| specifier.path().ends_with("/package.json") || path.ends_with("/deno.jsonc")
|| path.ends_with("/package.json")
|| path.ends_with("/node_modules/.package-lock.json")
|| path.ends_with("/node_modules/.yarn-integrity.json")
|| path.ends_with("/node_modules/.modules.yaml")
|| path.ends_with("/node_modules/.deno/.setup-cache.bin")
{ {
return true; return true;
} }
@ -1680,6 +1774,46 @@ impl ConfigTree {
.any(|data| data.watched_files.contains_key(specifier)) .any(|data| data.watched_files.contains_key(specifier))
} }
pub fn to_did_refresh_params(
&self,
) -> lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams {
let data = self
.scopes
.values()
.filter_map(|data| {
let workspace_root_scope_uri =
Some(data.member_dir.workspace.root_dir())
.filter(|s| *s != data.member_dir.dir_url())
.and_then(|s| url_to_uri(s).ok());
Some(lsp_custom::DenoConfigurationData {
scope_uri: url_to_uri(&data.scope).ok()?,
deno_json: data.maybe_deno_json().and_then(|c| {
if workspace_root_scope_uri.is_some()
&& Some(&c.specifier)
== data
.member_dir
.workspace
.root_deno_json()
.map(|c| &c.specifier)
{
return None;
}
Some(lsp::TextDocumentIdentifier {
uri: url_to_uri(&c.specifier).ok()?,
})
}),
package_json: data.maybe_pkg_json().and_then(|p| {
Some(lsp::TextDocumentIdentifier {
uri: url_to_uri(&p.specifier()).ok()?,
})
}),
workspace_root_scope_uri,
})
})
.collect();
lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { data }
}
pub async fn refresh( pub async fn refresh(
&mut self, &mut self,
settings: &Settings, settings: &Settings,
@ -1701,27 +1835,28 @@ impl ConfigTree {
ws_settings = ws_settings.or(Some(&settings.unscoped)); ws_settings = ws_settings.or(Some(&settings.unscoped));
} }
if let Some(ws_settings) = ws_settings { if let Some(ws_settings) = ws_settings {
if let Some(config_path) = &ws_settings.config { let config_file_path = (|| {
if let Ok(config_uri) = folder_uri.join(config_path) { let config_setting = ws_settings.config.as_ref()?;
if let Ok(config_file_path) = config_uri.to_file_path() { let config_uri = folder_uri.join(config_setting).ok()?;
scopes.insert( url_to_file_path(&config_uri).ok()
folder_uri.clone(), })();
Arc::new( if config_file_path.is_some() || ws_settings.import_map.is_some() {
ConfigData::load( scopes.insert(
Some(&config_file_path), folder_uri.clone(),
folder_uri, Arc::new(
settings, ConfigData::load(
file_fetcher, config_file_path.as_deref(),
&cached_fs, folder_uri,
&deno_json_cache, settings,
&pkg_json_cache, file_fetcher,
&workspace_cache, &cached_fs,
) &deno_json_cache,
.await, &pkg_json_cache,
), &workspace_cache,
); )
} .await,
} ),
);
} }
} }
} }
@ -1772,29 +1907,6 @@ impl ConfigTree {
} }
} }
for folder_uri in settings.by_workspace_folder.keys() {
if !scopes
.keys()
.any(|s| folder_uri.as_str().starts_with(s.as_str()))
{
scopes.insert(
folder_uri.clone(),
Arc::new(
ConfigData::load(
None,
folder_uri,
settings,
file_fetcher,
&cached_fs,
&deno_json_cache,
&pkg_json_cache,
&workspace_cache,
)
.await,
),
);
}
}
self.scopes = Arc::new(scopes); self.scopes = Arc::new(scopes);
} }
@ -1803,7 +1915,7 @@ impl ConfigTree {
let scope = config_file.specifier.join(".").unwrap(); let scope = config_file.specifier.join(".").unwrap();
let json_text = serde_json::to_string(&config_file.json).unwrap(); let json_text = serde_json::to_string(&config_file.json).unwrap();
let test_fs = deno_runtime::deno_fs::InMemoryFs::default(); let test_fs = deno_runtime::deno_fs::InMemoryFs::default();
let config_path = specifier_to_file_path(&config_file.specifier).unwrap(); let config_path = url_to_file_path(&config_file.specifier).unwrap();
test_fs.setup_text_files(vec![( test_fs.setup_text_files(vec![(
config_path.to_string_lossy().to_string(), config_path.to_string_lossy().to_string(),
json_text, json_text,
@ -1845,7 +1957,12 @@ fn resolve_lockfile_from_workspace(
return None; return None;
} }
}; };
resolve_lockfile_from_path(lockfile_path) let frozen = workspace
.workspace
.root_deno_json()
.and_then(|c| c.to_lock_config().ok().flatten().map(|c| c.frozen()))
.unwrap_or(false);
resolve_lockfile_from_path(lockfile_path, frozen)
} }
fn resolve_node_modules_dir( fn resolve_node_modules_dir(
@ -1856,13 +1973,17 @@ fn resolve_node_modules_dir(
// `nodeModulesDir: true` setting in the deno.json file. This is to // `nodeModulesDir: true` setting in the deno.json file. This is to
// reduce the chance of modifying someone's node_modules directory // reduce the chance of modifying someone's node_modules directory
// without them having asked us to do so. // without them having asked us to do so.
let explicitly_disabled = workspace.node_modules_dir() == Some(false); let node_modules_mode = workspace.node_modules_dir().ok().flatten();
let explicitly_disabled = node_modules_mode == Some(NodeModulesDirMode::None);
if explicitly_disabled { if explicitly_disabled {
return None; return None;
} }
let enabled = byonm let enabled = byonm
|| workspace.node_modules_dir() == Some(true) || node_modules_mode
.map(|m| m.uses_node_modules_dir())
.unwrap_or(false)
|| workspace.vendor_dir_path().is_some(); || workspace.vendor_dir_path().is_some();
if !enabled { if !enabled {
return None; return None;
} }
@ -1874,8 +1995,15 @@ fn resolve_node_modules_dir(
canonicalize_path_maybe_not_exists(&node_modules_dir).ok() canonicalize_path_maybe_not_exists(&node_modules_dir).ok()
} }
fn resolve_lockfile_from_path(lockfile_path: PathBuf) -> Option<CliLockfile> { fn resolve_lockfile_from_path(
match CliLockfile::read_from_path(lockfile_path, false) { lockfile_path: PathBuf,
frozen: bool,
) -> Option<CliLockfile> {
match CliLockfile::read_from_path(CliLockfileReadFromPathOptions {
file_path: lockfile_path,
frozen,
skip_write: false,
}) {
Ok(value) => { Ok(value) => {
if value.filename.exists() { if value.filename.exists() {
if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename) if let Ok(specifier) = ModuleSpecifier::from_file_path(&value.filename)
@ -2120,7 +2248,7 @@ mod tests {
}, },
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
javascript: LanguageWorkspaceSettings { javascript: LanguageWorkspaceSettings {
inlay_hints: InlayHintsSettings { inlay_hints: InlayHintsSettings {
parameter_names: InlayHintsParamNamesOptions { parameter_names: InlayHintsParamNamesOptions {
@ -2148,6 +2276,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,
@ -2193,6 +2322,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,

View file

@ -12,14 +12,15 @@ use super::language_server::StateSnapshot;
use super::performance::Performance; use super::performance::Performance;
use super::tsc; use super::tsc;
use super::tsc::TsServer; use super::tsc::TsServer;
use super::urls::LspClientUrl; use super::urls::uri_parse_unencoded;
use super::urls::url_to_uri;
use super::urls::LspUrlMap; use super::urls::LspUrlMap;
use crate::graph_util; use crate::graph_util;
use crate::graph_util::enhanced_resolution_error_message; use crate::graph_util::enhanced_resolution_error_message;
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams; use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
use crate::resolver::SloppyImportsResolution; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions; use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
@ -37,12 +38,14 @@ use deno_core::serde_json::json;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::unsync::JoinHandle; use deno_core::unsync::JoinHandle;
use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
@ -160,15 +163,14 @@ impl DiagnosticsPublisher {
.state .state
.update(&record.specifier, version, &all_specifier_diagnostics); .update(&record.specifier, version, &all_specifier_diagnostics);
let file_referrer = documents.get_file_referrer(&record.specifier); let file_referrer = documents.get_file_referrer(&record.specifier);
let Ok(uri) =
url_map.specifier_to_uri(&record.specifier, file_referrer.as_deref())
else {
continue;
};
self self
.client .client
.publish_diagnostics( .publish_diagnostics(uri, all_specifier_diagnostics, version)
url_map
.normalize_specifier(&record.specifier, file_referrer.as_deref())
.unwrap_or(LspClientUrl::new(record.specifier)),
all_specifier_diagnostics,
version,
)
.await; .await;
messages_sent += 1; messages_sent += 1;
} }
@ -191,15 +193,14 @@ impl DiagnosticsPublisher {
// clear out any diagnostics for this specifier // clear out any diagnostics for this specifier
self.state.update(specifier, removed_value.version, &[]); self.state.update(specifier, removed_value.version, &[]);
let file_referrer = documents.get_file_referrer(specifier); let file_referrer = documents.get_file_referrer(specifier);
let Ok(uri) =
url_map.specifier_to_uri(specifier, file_referrer.as_deref())
else {
continue;
};
self self
.client .client
.publish_diagnostics( .publish_diagnostics(uri, Vec::new(), removed_value.version)
url_map
.normalize_specifier(specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(specifier.clone())),
Vec::new(),
removed_value.version,
)
.await; .await;
messages_sent += 1; messages_sent += 1;
} }
@ -337,9 +338,9 @@ impl DiagnosticsState {
if diagnostic.code if diagnostic.code
== Some(lsp::NumberOrString::String("no-cache".to_string())) == Some(lsp::NumberOrString::String("no-cache".to_string()))
|| diagnostic.code || diagnostic.code
== Some(lsp::NumberOrString::String("no-cache-jsr".to_string())) == Some(lsp::NumberOrString::String("not-installed-jsr".to_string()))
|| diagnostic.code || diagnostic.code
== Some(lsp::NumberOrString::String("no-cache-npm".to_string())) == Some(lsp::NumberOrString::String("not-installed-npm".to_string()))
{ {
no_cache_diagnostics.push(diagnostic.clone()); no_cache_diagnostics.push(diagnostic.clone());
} }
@ -737,7 +738,7 @@ fn to_lsp_related_information(
if let (Some(file_name), Some(start), Some(end)) = if let (Some(file_name), Some(start), Some(end)) =
(&ri.file_name, &ri.start, &ri.end) (&ri.file_name, &ri.start, &ri.end)
{ {
let uri = lsp::Url::parse(file_name).unwrap(); let uri = uri_parse_unencoded(file_name).unwrap();
Some(lsp::DiagnosticRelatedInformation { Some(lsp::DiagnosticRelatedInformation {
location: lsp::Location { location: lsp::Location {
uri, uri,
@ -991,9 +992,9 @@ pub enum DenoDiagnostic {
/// A remote module was not found in the cache. /// A remote module was not found in the cache.
NoCache(ModuleSpecifier), NoCache(ModuleSpecifier),
/// A remote jsr package reference was not found in the cache. /// A remote jsr package reference was not found in the cache.
NoCacheJsr(PackageReq, ModuleSpecifier), NotInstalledJsr(PackageReq, ModuleSpecifier),
/// A remote npm package reference was not found in the cache. /// A remote npm package reference was not found in the cache.
NoCacheNpm(PackageReq, ModuleSpecifier), NotInstalledNpm(PackageReq, ModuleSpecifier),
/// A local module was not found on the local file system. /// A local module was not found on the local file system.
NoLocal(ModuleSpecifier), NoLocal(ModuleSpecifier),
/// The specifier resolved to a remote specifier that was redirected to /// The specifier resolved to a remote specifier that was redirected to
@ -1018,8 +1019,8 @@ impl DenoDiagnostic {
Self::InvalidAttributeType(_) => "invalid-attribute-type", Self::InvalidAttributeType(_) => "invalid-attribute-type",
Self::NoAttributeType => "no-attribute-type", Self::NoAttributeType => "no-attribute-type",
Self::NoCache(_) => "no-cache", Self::NoCache(_) => "no-cache",
Self::NoCacheJsr(_, _) => "no-cache-jsr", Self::NotInstalledJsr(_, _) => "not-installed-jsr",
Self::NoCacheNpm(_, _) => "no-cache-npm", Self::NotInstalledNpm(_, _) => "not-installed-npm",
Self::NoLocal(_) => "no-local", Self::NoLocal(_) => "no-local",
Self::Redirect { .. } => "redirect", Self::Redirect { .. } => "redirect",
Self::ResolutionError(err) => { Self::ResolutionError(err) => {
@ -1070,7 +1071,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!("\"{to}\""), new_text: format!("\"{to}\""),
range: diagnostic.range, range: diagnostic.range,
@ -1087,7 +1088,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: " with { type: \"json\" }".to_string(), new_text: " with { type: \"json\" }".to_string(),
range: lsp::Range { range: lsp::Range {
@ -1100,17 +1101,22 @@ impl DenoDiagnostic {
}), }),
..Default::default() ..Default::default()
}, },
"no-cache" | "no-cache-jsr" | "no-cache-npm" => { "no-cache" | "not-installed-jsr" | "not-installed-npm" => {
let data = diagnostic let data = diagnostic
.data .data
.clone() .clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?; .ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataSpecifier = serde_json::from_value(data)?; let data: DiagnosticDataSpecifier = serde_json::from_value(data)?;
let title = if matches!(
code.as_str(),
"not-installed-jsr" | "not-installed-npm"
) {
format!("Install \"{}\" and its dependencies.", data.specifier)
} else {
format!("Cache \"{}\" and its dependencies.", data.specifier)
};
lsp::CodeAction { lsp::CodeAction {
title: format!( title,
"Cache \"{}\" and its dependencies.",
data.specifier
),
kind: Some(lsp::CodeActionKind::QUICKFIX), kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
command: Some(lsp::Command { command: Some(lsp::Command {
@ -1133,7 +1139,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!( new_text: format!(
"\"{}\"", "\"{}\"",
@ -1159,7 +1165,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!( new_text: format!(
"\"{}\"", "\"{}\"",
@ -1185,7 +1191,7 @@ impl DenoDiagnostic {
diagnostics: Some(vec![diagnostic.clone()]), diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit { edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([( changes: Some(HashMap::from([(
specifier.clone(), url_to_uri(specifier)?,
vec![lsp::TextEdit { vec![lsp::TextEdit {
new_text: format!("\"node:{}\"", data.specifier), new_text: format!("\"node:{}\"", data.specifier),
range: diagnostic.range, range: diagnostic.range,
@ -1216,8 +1222,8 @@ impl DenoDiagnostic {
match code.as_str() { match code.as_str() {
"import-map-remap" "import-map-remap"
| "no-cache" | "no-cache"
| "no-cache-jsr" | "not-installed-jsr"
| "no-cache-npm" | "not-installed-npm"
| "no-attribute-type" | "no-attribute-type"
| "redirect" | "redirect"
| "import-node-prefix-missing" => true, | "import-node-prefix-missing" => true,
@ -1255,10 +1261,12 @@ impl DenoDiagnostic {
Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None), Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None),
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None), Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))), Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))), Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => { Self::NoLocal(specifier) => {
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution); let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
).resolve(specifier, SloppyImportsResolutionMode::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| { let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({ json!({
"specifier": specifier, "specifier": specifier,
@ -1303,10 +1311,7 @@ impl DenoDiagnostic {
} }
} }
fn specifier_text_for_redirected( fn specifier_text_for_redirected(redirect: &Url, referrer: &Url) -> String {
redirect: &lsp::Url,
referrer: &lsp::Url,
) -> String {
if redirect.scheme() == "file" && referrer.scheme() == "file" { if redirect.scheme() == "file" && referrer.scheme() == "file" {
// use a relative specifier when it's going to a file url // use a relative specifier when it's going to a file url
relative_specifier(redirect, referrer) relative_specifier(redirect, referrer)
@ -1315,7 +1320,7 @@ fn specifier_text_for_redirected(
} }
} }
fn relative_specifier(specifier: &lsp::Url, referrer: &lsp::Url) -> String { fn relative_specifier(specifier: &Url, referrer: &Url) -> String {
match referrer.make_relative(specifier) { match referrer.make_relative(specifier) {
Some(relative) => { Some(relative) => {
if relative.starts_with('.') { if relative.starts_with('.') {
@ -1367,21 +1372,20 @@ fn diagnose_resolution(
let mut diagnostics = vec![]; let mut diagnostics = vec![];
match resolution { match resolution {
Resolution::Ok(resolved) => { Resolution::Ok(resolved) => {
let file_referrer = referrer_doc.file_referrer();
let specifier = &resolved.specifier; let specifier = &resolved.specifier;
let managed_npm_resolver = snapshot let managed_npm_resolver =
.resolver snapshot.resolver.maybe_managed_npm_resolver(file_referrer);
.maybe_managed_npm_resolver(referrer_doc.file_referrer());
for (_, headers) in snapshot for (_, headers) in snapshot
.resolver .resolver
.redirect_chain_headers(specifier, referrer_doc.file_referrer()) .redirect_chain_headers(specifier, file_referrer)
{ {
if let Some(message) = headers.get("x-deno-warning") { if let Some(message) = headers.get("x-deno-warning") {
diagnostics.push(DenoDiagnostic::DenoWarn(message.clone())); diagnostics.push(DenoDiagnostic::DenoWarn(message.clone()));
} }
} }
if let Some(doc) = snapshot if let Some(doc) =
.documents snapshot.documents.get_or_load(specifier, file_referrer)
.get_or_load(specifier, referrer_doc.specifier())
{ {
if let Some(headers) = doc.maybe_headers() { if let Some(headers) = doc.maybe_headers() {
if let Some(message) = headers.get("x-deno-warning") { if let Some(message) = headers.get("x-deno-warning") {
@ -1411,7 +1415,8 @@ fn diagnose_resolution(
JsrPackageReqReference::from_specifier(specifier) JsrPackageReqReference::from_specifier(specifier)
{ {
let req = pkg_ref.into_inner().req; let req = pkg_ref.into_inner().req;
diagnostics.push(DenoDiagnostic::NoCacheJsr(req, specifier.clone())); diagnostics
.push(DenoDiagnostic::NotInstalledJsr(req, specifier.clone()));
} else if let Ok(pkg_ref) = } else if let Ok(pkg_ref) =
NpmPackageReqReference::from_specifier(specifier) NpmPackageReqReference::from_specifier(specifier)
{ {
@ -1420,7 +1425,7 @@ fn diagnose_resolution(
let req = pkg_ref.into_inner().req; let req = pkg_ref.into_inner().req;
if !npm_resolver.is_pkg_req_folder_cached(&req) { if !npm_resolver.is_pkg_req_folder_cached(&req) {
diagnostics diagnostics
.push(DenoDiagnostic::NoCacheNpm(req, specifier.clone())); .push(DenoDiagnostic::NotInstalledNpm(req, specifier.clone()));
} }
} }
} else if let Some(module_name) = specifier.as_str().strip_prefix("node:") } else if let Some(module_name) = specifier.as_str().strip_prefix("node:")
@ -1446,7 +1451,7 @@ fn diagnose_resolution(
// check that a @types/node package exists in the resolver // check that a @types/node package exists in the resolver
let types_node_req = PackageReq::from_str("@types/node").unwrap(); let types_node_req = PackageReq::from_str("@types/node").unwrap();
if !npm_resolver.is_pkg_req_folder_cached(&types_node_req) { if !npm_resolver.is_pkg_req_folder_cached(&types_node_req) {
diagnostics.push(DenoDiagnostic::NoCacheNpm( diagnostics.push(DenoDiagnostic::NotInstalledNpm(
types_node_req, types_node_req,
ModuleSpecifier::parse("npm:@types/node").unwrap(), ModuleSpecifier::parse("npm:@types/node").unwrap(),
)); ));
@ -1494,7 +1499,11 @@ fn diagnose_dependency(
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
.and_then(|d| d.resolver.maybe_import_map()); .and_then(|d| d.resolver.maybe_import_map());
if let Some(import_map) = import_map { if let Some(import_map) = import_map {
if let Resolution::Ok(resolved) = &dependency.maybe_code { let resolved = dependency
.maybe_code
.ok()
.or_else(|| dependency.maybe_type.ok());
if let Some(resolved) = resolved {
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
if dependency_key != to { if dependency_key != to {
diagnostics.push( diagnostics.push(
@ -1512,17 +1521,19 @@ fn diagnose_dependency(
let import_ranges: Vec<_> = dependency let import_ranges: Vec<_> = dependency
.imports .imports
.iter() .iter()
.map(|i| documents::to_lsp_range(&i.range)) .map(|i| documents::to_lsp_range(&i.specifier_range))
.collect(); .collect();
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has // TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
// a different specifier and therefore needs a separate call to // a different specifier and therefore needs a separate call to
// `diagnose_resolution()`. It would be much cleaner if that were modelled as // `diagnose_resolution()`. It would be much cleaner if that were modelled as
// a separate dependency: https://github.com/denoland/deno_graph/issues/247. // a separate dependency: https://github.com/denoland/deno_graph/issues/247.
let is_types_deno_types = !dependency.maybe_type.is_none() let is_types_deno_types = !dependency.maybe_type.is_none()
&& !dependency && !dependency.imports.iter().any(|i| {
.imports dependency
.iter() .maybe_type
.any(|i| dependency.maybe_type.includes(&i.range.start).is_some()); .includes(&i.specifier_range.start)
.is_some()
});
diagnostics.extend( diagnostics.extend(
diagnose_resolution( diagnose_resolution(
@ -1532,7 +1543,7 @@ fn diagnose_dependency(
// If not @deno-types, diagnose the types if the code errored because // If not @deno-types, diagnose the types if the code errored because
// it's likely resolving into the node_modules folder, which might be // it's likely resolving into the node_modules folder, which might be
// erroring correctly due to resolution only being for bundlers. Let this // erroring correctly due to resolution only being for bundlers. Let this
// fail at runtime if necesarry, but don't bother erroring in the editor // fail at runtime if necessary, but don't bother erroring in the editor
|| !is_types_deno_types && matches!(dependency.maybe_type, Resolution::Ok(_)) || !is_types_deno_types && matches!(dependency.maybe_type, Resolution::Ok(_))
&& matches!(dependency.maybe_code, Resolution::Err(_)) && matches!(dependency.maybe_code, Resolution::Err(_))
{ {
@ -1635,7 +1646,8 @@ mod tests {
use test_util::TempDir; use test_util::TempDir;
fn mock_config() -> Config { fn mock_config() -> Config {
let root_uri = resolve_url("file:///").unwrap(); let root_url = resolve_url("file:///").unwrap();
let root_uri = url_to_uri(&root_url).unwrap();
Config { Config {
settings: Arc::new(Settings { settings: Arc::new(Settings {
unscoped: Arc::new(WorkspaceSettings { unscoped: Arc::new(WorkspaceSettings {
@ -1646,7 +1658,7 @@ mod tests {
..Default::default() ..Default::default()
}), }),
workspace_folders: Arc::new(vec![( workspace_folders: Arc::new(vec![(
root_uri.clone(), root_url,
lsp::WorkspaceFolder { lsp::WorkspaceFolder {
uri: root_uri, uri: root_uri,
name: "".to_string(), name: "".to_string(),
@ -1661,7 +1673,7 @@ mod tests {
maybe_import_map: Option<(&str, &str)>, maybe_import_map: Option<(&str, &str)>,
) -> (TempDir, StateSnapshot) { ) -> (TempDir, StateSnapshot) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let root_uri = temp_dir.uri(); let root_uri = temp_dir.url();
let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap())); let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap()));
let mut config = Config::new_with_roots([root_uri.clone()]); let mut config = Config::new_with_roots([root_uri.clone()]);
if let Some((relative_path, json_string)) = maybe_import_map { if let Some((relative_path, json_string)) = maybe_import_map {
@ -1695,6 +1707,7 @@ mod tests {
documents: Arc::new(documents), documents: Arc::new(documents),
assets: Default::default(), assets: Default::default(),
config: Arc::new(config), config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver, resolver,
}, },
) )
@ -1828,7 +1841,7 @@ let c: number = "a";
assert_eq!(actual.len(), 2); assert_eq!(actual.len(), 2);
for record in actual { for record in actual {
let relative_specifier = let relative_specifier =
temp_dir.uri().make_relative(&record.specifier).unwrap(); temp_dir.url().make_relative(&record.specifier).unwrap();
match relative_specifier.as_str() { match relative_specifier.as_str() {
"std/assert/mod.ts" => { "std/assert/mod.ts" => {
assert_eq!(json!(record.versioned.diagnostics), json!([])) assert_eq!(json!(record.versioned.diagnostics), json!([]))
@ -2047,7 +2060,7 @@ let c: number = "a";
"source": "deno", "source": "deno",
"message": format!( "message": format!(
"Unable to load a local module: {}🦕.ts\nPlease check the file path.", "Unable to load a local module: {}🦕.ts\nPlease check the file path.",
temp_dir.uri(), temp_dir.url(),
), ),
} }
]) ])

View file

@ -2,9 +2,11 @@
use super::cache::calculate_fs_version; use super::cache::calculate_fs_version;
use super::cache::LspCache; use super::cache::LspCache;
use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY;
use super::config::Config; use super::config::Config;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::resolver::ScopeDepInfo;
use super::resolver::SingleReferrerGraphResolver;
use super::testing::TestCollector; use super::testing::TestCollector;
use super::testing::TestModule; use super::testing::TestModule;
use super::text::LineIndex; use super::text::LineIndex;
@ -12,7 +14,6 @@ use super::tsc;
use super::tsc::AssetDocument; use super::tsc::AssetDocument;
use crate::graph_util::CliJsrUrlProvider; use crate::graph_util::CliJsrUrlProvider;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::swc::visit::VisitWith; use deno_ast::swc::visit::VisitWith;
@ -28,15 +29,16 @@ use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use indexmap::IndexMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use node_resolver::NodeModuleKind;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;
@ -61,6 +63,9 @@ pub enum LanguageId {
Json, Json,
JsonC, JsonC,
Markdown, Markdown,
Html,
Css,
Yaml,
Unknown, Unknown,
} }
@ -74,6 +79,9 @@ impl LanguageId {
LanguageId::Json => Some("json"), LanguageId::Json => Some("json"),
LanguageId::JsonC => Some("jsonc"), LanguageId::JsonC => Some("jsonc"),
LanguageId::Markdown => Some("md"), LanguageId::Markdown => Some("md"),
LanguageId::Html => Some("html"),
LanguageId::Css => Some("css"),
LanguageId::Yaml => Some("yaml"),
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -86,6 +94,9 @@ impl LanguageId {
LanguageId::Tsx => Some("text/tsx"), LanguageId::Tsx => Some("text/tsx"),
LanguageId::Json | LanguageId::JsonC => Some("application/json"), LanguageId::Json | LanguageId::JsonC => Some("application/json"),
LanguageId::Markdown => Some("text/markdown"), LanguageId::Markdown => Some("text/markdown"),
LanguageId::Html => Some("text/html"),
LanguageId::Css => Some("text/css"),
LanguageId::Yaml => Some("application/yaml"),
LanguageId::Unknown => None, LanguageId::Unknown => None,
} }
} }
@ -110,6 +121,9 @@ impl FromStr for LanguageId {
"json" => Ok(Self::Json), "json" => Ok(Self::Json),
"jsonc" => Ok(Self::JsonC), "jsonc" => Ok(Self::JsonC),
"markdown" => Ok(Self::Markdown), "markdown" => Ok(Self::Markdown),
"html" => Ok(Self::Html),
"css" => Ok(Self::Css),
"yaml" => Ok(Self::Yaml),
_ => Ok(Self::Unknown), _ => Ok(Self::Unknown),
} }
} }
@ -261,7 +275,7 @@ fn get_maybe_test_module_fut(
parsed_source.specifier().clone(), parsed_source.specifier().clone(),
parsed_source.text_info_lazy().clone(), parsed_source.text_info_lazy().clone(),
); );
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Arc::new(collector.take()) Arc::new(collector.take())
}) })
.map(Result::ok) .map(Result::ok)
@ -282,6 +296,8 @@ pub struct Document {
/// Contains the last-known-good set of dependencies from parsing the module. /// Contains the last-known-good set of dependencies from parsing the module.
config: Arc<Config>, config: Arc<Config>,
dependencies: Arc<IndexMap<String, deno_graph::Dependency>>, dependencies: Arc<IndexMap<String, deno_graph::Dependency>>,
/// If this is maybe a CJS script and maybe not an ES module.
is_script: Option<bool>,
// TODO(nayeemrmn): This is unused, use it for scope attribution for remote // TODO(nayeemrmn): This is unused, use it for scope attribution for remote
// modules. // modules.
file_referrer: Option<ModuleSpecifier>, file_referrer: Option<ModuleSpecifier>,
@ -312,6 +328,7 @@ impl Document {
maybe_lsp_version: Option<i32>, maybe_lsp_version: Option<i32>,
maybe_language_id: Option<LanguageId>, maybe_language_id: Option<LanguageId>,
maybe_headers: Option<HashMap<String, String>>, maybe_headers: Option<HashMap<String, String>>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>, resolver: Arc<LspResolver>,
config: Arc<Config>, config: Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
@ -321,12 +338,8 @@ impl Document {
.filter(|s| cache.is_valid_file_referrer(s)) .filter(|s| cache.is_valid_file_referrer(s))
.cloned() .cloned()
.or(file_referrer); .or(file_referrer);
let media_type = resolve_media_type( let media_type =
&specifier, resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
maybe_headers.as_ref(),
maybe_language_id,
&resolver,
);
let (maybe_parsed_source, maybe_module) = let (maybe_parsed_source, maybe_module) =
if media_type_is_diagnosable(media_type) { if media_type_is_diagnosable(media_type) {
parse_and_analyze_module( parse_and_analyze_module(
@ -335,6 +348,7 @@ impl Document {
maybe_headers.as_ref(), maybe_headers.as_ref(),
media_type, media_type,
file_referrer.as_ref(), file_referrer.as_ref(),
is_cjs_resolver,
&resolver, &resolver,
) )
} else { } else {
@ -360,6 +374,7 @@ impl Document {
file_referrer.as_ref(), file_referrer.as_ref(),
), ),
file_referrer, file_referrer,
is_script: maybe_module.as_ref().map(|m| m.is_script),
maybe_types_dependency, maybe_types_dependency,
line_index, line_index,
maybe_language_id, maybe_language_id,
@ -381,6 +396,7 @@ impl Document {
fn with_new_config( fn with_new_config(
&self, &self,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>, resolver: Arc<LspResolver>,
config: Arc<Config>, config: Arc<Config>,
) -> Arc<Self> { ) -> Arc<Self> {
@ -388,11 +404,11 @@ impl Document {
&self.specifier, &self.specifier,
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
self.maybe_language_id, self.maybe_language_id,
&resolver,
); );
let dependencies; let dependencies;
let maybe_types_dependency; let maybe_types_dependency;
let maybe_parsed_source; let maybe_parsed_source;
let is_script;
let maybe_test_module_fut; let maybe_test_module_fut;
if media_type != self.media_type { if media_type != self.media_type {
let parsed_source_result = let parsed_source_result =
@ -402,6 +418,7 @@ impl Document {
&parsed_source_result, &parsed_source_result,
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
self.file_referrer.as_ref(), self.file_referrer.as_ref(),
is_cjs_resolver,
&resolver, &resolver,
) )
.ok(); .ok();
@ -409,6 +426,7 @@ impl Document {
.as_ref() .as_ref()
.map(|m| Arc::new(m.dependencies.clone())) .map(|m| Arc::new(m.dependencies.clone()))
.unwrap_or_default(); .unwrap_or_default();
is_script = maybe_module.as_ref().map(|m| m.is_script);
maybe_types_dependency = maybe_module maybe_types_dependency = maybe_module
.as_ref() .as_ref()
.and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?)));
@ -416,10 +434,19 @@ impl Document {
maybe_test_module_fut = maybe_test_module_fut =
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
} else { } else {
let graph_resolver = let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
resolver.as_graph_resolver(self.file_referrer.as_ref());
let npm_resolver = let npm_resolver =
resolver.create_graph_npm_resolver(self.file_referrer.as_ref()); resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &self.specifier,
referrer_kind: is_cjs_resolver
.get_lsp_referrer_kind(&self.specifier, self.is_script),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
dependencies = Arc::new( dependencies = Arc::new(
self self
.dependencies .dependencies
@ -430,7 +457,7 @@ impl Document {
d.with_new_resolver( d.with_new_resolver(
s, s,
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver), Some(&resolver),
Some(&npm_resolver), Some(&npm_resolver),
), ),
) )
@ -440,10 +467,11 @@ impl Document {
maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| { maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| {
Arc::new(d.with_new_resolver( Arc::new(d.with_new_resolver(
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver), Some(&resolver),
Some(&npm_resolver), Some(&npm_resolver),
)) ))
}); });
is_script = self.is_script;
maybe_parsed_source = self.maybe_parsed_source().cloned(); maybe_parsed_source = self.maybe_parsed_source().cloned();
maybe_test_module_fut = self maybe_test_module_fut = self
.maybe_test_module_fut .maybe_test_module_fut
@ -455,6 +483,7 @@ impl Document {
// updated properties // updated properties
dependencies, dependencies,
file_referrer: self.file_referrer.clone(), file_referrer: self.file_referrer.clone(),
is_script,
maybe_types_dependency, maybe_types_dependency,
maybe_navigation_tree: Mutex::new(None), maybe_navigation_tree: Mutex::new(None),
// maintain - this should all be copies/clones // maintain - this should all be copies/clones
@ -479,6 +508,7 @@ impl Document {
fn with_change( fn with_change(
&self, &self,
is_cjs_resolver: &LspIsCjsResolver,
version: i32, version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>, changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<Self>, AnyError> { ) -> Result<Arc<Self>, AnyError> {
@ -512,6 +542,7 @@ impl Document {
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
media_type, media_type,
self.file_referrer.as_ref(), self.file_referrer.as_ref(),
is_cjs_resolver,
self.resolver.as_ref(), self.resolver.as_ref(),
) )
} else { } else {
@ -535,6 +566,7 @@ impl Document {
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config); get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config);
Ok(Arc::new(Self { Ok(Arc::new(Self {
config: self.config.clone(), config: self.config.clone(),
is_script: maybe_module.as_ref().map(|m| m.is_script),
specifier: self.specifier.clone(), specifier: self.specifier.clone(),
file_referrer: self.file_referrer.clone(), file_referrer: self.file_referrer.clone(),
maybe_fs_version: self.maybe_fs_version.clone(), maybe_fs_version: self.maybe_fs_version.clone(),
@ -569,6 +601,7 @@ impl Document {
), ),
maybe_language_id: self.maybe_language_id, maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(), dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(), maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(), text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(), text_info_cell: once_cell::sync::OnceCell::new(),
@ -596,6 +629,7 @@ impl Document {
), ),
maybe_language_id: self.maybe_language_id, maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(), dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(), maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(), text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(), text_info_cell: once_cell::sync::OnceCell::new(),
@ -644,6 +678,13 @@ impl Document {
}) })
} }
/// If this is maybe a CJS script and maybe not an ES module.
///
/// Use `LspIsCjsResolver` to determine for sure.
pub fn is_script(&self) -> Option<bool> {
self.is_script
}
pub fn line_index(&self) -> Arc<LineIndex> { pub fn line_index(&self) -> Arc<LineIndex> {
self.line_index.clone() self.line_index.clone()
} }
@ -753,14 +794,7 @@ fn resolve_media_type(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
maybe_language_id: Option<LanguageId>, maybe_language_id: Option<LanguageId>,
resolver: &LspResolver,
) -> MediaType { ) -> MediaType {
if resolver.in_node_modules(specifier) {
if let Some(media_type) = resolver.node_media_type(specifier) {
return media_type;
}
}
if let Some(language_id) = maybe_language_id { if let Some(language_id) = maybe_language_id {
return MediaType::from_specifier_and_content_type( return MediaType::from_specifier_and_content_type(
specifier, specifier,
@ -798,6 +832,7 @@ impl FileSystemDocuments {
pub fn get( pub fn get(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>, resolver: &Arc<LspResolver>,
config: &Arc<Config>, config: &Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
@ -821,7 +856,14 @@ impl FileSystemDocuments {
}; };
if dirty { if dirty {
// attempt to update the file on the file system // attempt to update the file on the file system
self.refresh_document(specifier, resolver, config, cache, file_referrer) self.refresh_document(
specifier,
is_cjs_resolver,
resolver,
config,
cache,
file_referrer,
)
} else { } else {
old_doc old_doc
} }
@ -832,22 +874,29 @@ impl FileSystemDocuments {
fn refresh_document( fn refresh_document(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>, resolver: &Arc<LspResolver>,
config: &Arc<Config>, config: &Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<Arc<Document>> { ) -> Option<Arc<Document>> {
let doc = if specifier.scheme() == "file" { let doc = if specifier.scheme() == "file" {
let path = specifier_to_file_path(specifier).ok()?; let path = url_to_file_path(specifier).ok()?;
let bytes = fs::read(path).ok()?; let bytes = fs::read(path).ok()?;
let content = let content = bytes_to_content(
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?; specifier,
MediaType::from_specifier(specifier),
bytes,
None,
)
.ok()?;
Document::new( Document::new(
specifier.clone(), specifier.clone(),
content.into(), content.into(),
None, None,
None, None,
None, None,
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -864,6 +913,7 @@ impl FileSystemDocuments {
None, None,
None, None,
None, None,
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -872,28 +922,31 @@ impl FileSystemDocuments {
} else { } else {
let http_cache = cache.for_specifier(file_referrer); let http_cache = cache.for_specifier(file_referrer);
let cache_key = http_cache.cache_item_key(specifier).ok()?; let cache_key = http_cache.cache_item_key(specifier).ok()?;
let bytes = http_cache let cached_file = http_cache.get(&cache_key, None).ok()??;
.read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY)
.ok()??;
let specifier_headers = http_cache.read_headers(&cache_key).ok()??;
let (_, maybe_charset) = let (_, maybe_charset) =
deno_graph::source::resolve_media_type_and_charset_from_headers( deno_graph::source::resolve_media_type_and_charset_from_headers(
specifier, specifier,
Some(&specifier_headers), Some(&cached_file.metadata.headers),
); );
let content = deno_graph::source::decode_owned_source( let media_type = resolve_media_type(
specifier, specifier,
bytes, Some(&cached_file.metadata.headers),
None,
);
let content = bytes_to_content(
specifier,
media_type,
cached_file.content,
maybe_charset, maybe_charset,
) )
.ok()?; .ok()?;
let maybe_headers = Some(specifier_headers);
Document::new( Document::new(
specifier.clone(), specifier.clone(),
content.into(), content.into(),
None, None,
None, None,
maybe_headers, Some(cached_file.metadata.headers),
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -934,6 +987,11 @@ pub struct Documents {
/// The DENO_DIR that the documents looks for non-file based modules. /// The DENO_DIR that the documents looks for non-file based modules.
cache: Arc<LspCache>, cache: Arc<LspCache>,
config: Arc<Config>, config: Arc<Config>,
/// Resolver for detecting if a document is CJS or ESM.
is_cjs_resolver: Arc<LspIsCjsResolver>,
/// A resolver that takes into account currently loaded import map and JSX
/// settings.
resolver: Arc<LspResolver>,
/// A flag that indicates that stated data is potentially invalid and needs to /// A flag that indicates that stated data is potentially invalid and needs to
/// be recalculated before being considered valid. /// be recalculated before being considered valid.
dirty: bool, dirty: bool,
@ -941,15 +999,7 @@ pub struct Documents {
open_docs: HashMap<ModuleSpecifier, Arc<Document>>, open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
/// Documents stored on the file system. /// Documents stored on the file system.
file_system_docs: Arc<FileSystemDocuments>, file_system_docs: Arc<FileSystemDocuments>,
/// A resolver that takes into account currently loaded import map and JSX dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>,
/// settings.
resolver: Arc<LspResolver>,
/// The npm package requirements found in npm specifiers.
npm_reqs_by_scope:
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
/// Config scopes that contain a node: specifier such that a @types/node
/// package should be injected.
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
} }
impl Documents { impl Documents {
@ -974,6 +1024,7 @@ impl Documents {
// the cache for remote modules here in order to get the // the cache for remote modules here in order to get the
// x-typescript-types? // x-typescript-types?
None, None,
&self.is_cjs_resolver,
self.resolver.clone(), self.resolver.clone(),
self.config.clone(), self.config.clone(),
&self.cache, &self.cache,
@ -1008,7 +1059,7 @@ impl Documents {
)) ))
})?; })?;
self.dirty = true; self.dirty = true;
let doc = doc.with_change(version, changes)?; let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
self.open_docs.insert(doc.specifier().clone(), doc.clone()); self.open_docs.insert(doc.specifier().clone(), doc.clone());
Ok(doc) Ok(doc)
} }
@ -1063,34 +1114,6 @@ impl Documents {
self.cache.is_valid_file_referrer(specifier) self.cache.is_valid_file_referrer(specifier)
} }
/// Return `true` if the provided specifier can be resolved to a document,
/// otherwise `false`.
pub fn contains_import(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
) -> bool {
let file_referrer = self.get_file_referrer(referrer);
let maybe_specifier = self
.resolver
.as_graph_resolver(file_referrer.as_deref())
.resolve(
specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok();
if let Some(import_specifier) = maybe_specifier {
self.exists(&import_specifier, file_referrer.as_deref())
} else {
false
}
}
pub fn resolve_document_specifier( pub fn resolve_document_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -1128,7 +1151,7 @@ impl Documents {
return true; return true;
} }
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
return specifier_to_file_path(&specifier) return url_to_file_path(&specifier)
.map(|p| p.is_file()) .map(|p| p.is_file())
.unwrap_or(false); .unwrap_or(false);
} }
@ -1139,17 +1162,20 @@ impl Documents {
false false
} }
pub fn npm_reqs_by_scope( pub fn dep_info_by_scope(
&mut self, &mut self,
) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> { ) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> {
self.calculate_npm_reqs_if_dirty(); self.calculate_dep_info_if_dirty();
self.npm_reqs_by_scope.clone() self.dep_info_by_scope.clone()
} }
pub fn scopes_with_node_specifier( pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> {
&self, self
) -> &Arc<HashSet<Option<ModuleSpecifier>>> { .dep_info_by_scope
&self.scopes_with_node_specifier .iter()
.filter(|(_, i)| i.has_node_specifier)
.map(|(s, _)| s.clone())
.collect::<HashSet<_>>()
} }
/// Return a document for the specifier. /// Return a document for the specifier.
@ -1165,6 +1191,7 @@ impl Documents {
if let Some(old_doc) = old_doc { if let Some(old_doc) = old_doc {
self.file_system_docs.get( self.file_system_docs.get(
specifier, specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
@ -1180,20 +1207,20 @@ impl Documents {
pub fn get_or_load( pub fn get_or_load(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier, file_referrer: Option<&ModuleSpecifier>,
) -> Option<Arc<Document>> { ) -> Option<Arc<Document>> {
let file_referrer = self.get_file_referrer(referrer);
let specifier = let specifier =
self.resolve_document_specifier(specifier, file_referrer.as_deref())?; self.resolve_document_specifier(specifier, file_referrer)?;
if let Some(document) = self.open_docs.get(&specifier) { if let Some(document) = self.open_docs.get(&specifier) {
Some(document.clone()) Some(document.clone())
} else { } else {
self.file_system_docs.get( self.file_system_docs.get(
&specifier, &specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
file_referrer.as_deref(), file_referrer,
) )
} }
} }
@ -1244,57 +1271,64 @@ impl Documents {
/// tsc when type checking. /// tsc when type checking.
pub fn resolve( pub fn resolve(
&self, &self,
specifiers: &[String], raw_specifiers: &[String],
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Vec<Option<(ModuleSpecifier, MediaType)>> { ) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
let document = self.get(referrer); let referrer_doc = self.get(referrer);
let file_referrer = document let file_referrer = referrer_doc
.as_ref() .as_ref()
.and_then(|d| d.file_referrer()) .and_then(|d| d.file_referrer())
.or(file_referrer); .or(file_referrer);
let dependencies = document.as_ref().map(|d| d.dependencies()); let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
let referrer_kind = self
.is_cjs_resolver
.get_maybe_doc_module_kind(referrer, referrer_doc.as_deref());
let mut results = Vec::new(); let mut results = Vec::new();
for specifier in specifiers { for raw_specifier in raw_specifiers {
if specifier.starts_with("asset:") { if raw_specifier.starts_with("asset:") {
if let Ok(specifier) = ModuleSpecifier::parse(specifier) { if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
let media_type = MediaType::from_specifier(&specifier); let media_type = MediaType::from_specifier(&specifier);
results.push(Some((specifier, media_type))); results.push(Some((specifier, media_type)));
} else { } else {
results.push(None); results.push(None);
} }
} else if let Some(dep) = } else if let Some(dep) =
dependencies.as_ref().and_then(|d| d.get(specifier)) dependencies.as_ref().and_then(|d| d.get(raw_specifier))
{ {
if let Some(specifier) = dep.maybe_type.maybe_specifier() { if let Some(specifier) = dep.maybe_type.maybe_specifier() {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
specifier, specifier,
referrer, referrer,
referrer_kind,
file_referrer, file_referrer,
)); ));
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() { } else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
specifier, specifier,
referrer, referrer,
referrer_kind,
file_referrer, file_referrer,
)); ));
} else { } else {
results.push(None); results.push(None);
} }
} else if let Ok(specifier) = } else if let Ok(specifier) =
self.resolver.as_graph_resolver(file_referrer).resolve( self.resolver.as_cli_resolver(file_referrer).resolve(
specifier, raw_specifier,
&deno_graph::Range { &deno_graph::Range {
specifier: referrer.clone(), specifier: referrer.clone(),
start: deno_graph::Position::zeroed(), start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(),
}, },
referrer_kind,
ResolutionMode::Types, ResolutionMode::Types,
) )
{ {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
&specifier, &specifier,
referrer, referrer,
referrer_kind,
file_referrer, file_referrer,
)); ));
} else { } else {
@ -1313,12 +1347,16 @@ impl Documents {
) { ) {
self.config = Arc::new(config.clone()); self.config = Arc::new(config.clone());
self.cache = Arc::new(cache.clone()); self.cache = Arc::new(cache.clone());
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
self.resolver = resolver.clone(); self.resolver = resolver.clone();
node_resolver::PackageJsonThreadLocalCache::clear();
{ {
let fs_docs = &self.file_system_docs; let fs_docs = &self.file_system_docs;
// Clean up non-existent documents. // Clean up non-existent documents.
fs_docs.docs.retain(|specifier, _| { fs_docs.docs.retain(|specifier, _| {
let Ok(path) = specifier_to_file_path(specifier) else { let Ok(path) = url_to_file_path(specifier) else {
// Remove non-file schemed docs (deps). They may not be dependencies // Remove non-file schemed docs (deps). They may not be dependencies
// anymore after updating resolvers. // anymore after updating resolvers.
return false; return false;
@ -1333,14 +1371,21 @@ impl Documents {
if !config.specifier_enabled(doc.specifier()) { if !config.specifier_enabled(doc.specifier()) {
continue; continue;
} }
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone()); *doc = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
} }
for mut doc in self.file_system_docs.docs.iter_mut() { for mut doc in self.file_system_docs.docs.iter_mut() {
if !config.specifier_enabled(doc.specifier()) { if !config.specifier_enabled(doc.specifier()) {
continue; continue;
} }
*doc.value_mut() = *doc.value_mut() = doc.with_new_config(
doc.with_new_config(self.resolver.clone(), self.config.clone()); &self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
} }
self.open_docs = open_docs; self.open_docs = open_docs;
let mut preload_count = 0; let mut preload_count = 0;
@ -1357,6 +1402,7 @@ impl Documents {
{ {
fs_docs.refresh_document( fs_docs.refresh_document(
specifier, specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
@ -1372,34 +1418,46 @@ impl Documents {
/// Iterate through the documents, building a map where the key is a unique /// Iterate through the documents, building a map where the key is a unique
/// document and the value is a set of specifiers that depend on that /// document and the value is a set of specifiers that depend on that
/// document. /// document.
fn calculate_npm_reqs_if_dirty(&mut self) { fn calculate_dep_info_if_dirty(&mut self) {
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default(); let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default();
let mut scopes_with_specifier = HashSet::new();
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
if !is_fs_docs_dirty && !self.dirty { if !is_fs_docs_dirty && !self.dirty {
return; return;
} }
let mut visit_doc = |doc: &Arc<Document>| { let mut visit_doc = |doc: &Arc<Document>| {
let scope = doc.scope(); let scope = doc.scope();
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default(); let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default();
for dependency in doc.dependencies().values() { for dependency in doc.dependencies().values() {
if let Some(dep) = dependency.get_code() { let code_specifier = dependency.get_code();
let type_specifier = dependency.get_type();
if let Some(dep) = code_specifier {
if dep.scheme() == "node" { if dep.scheme() == "node" {
scopes_with_specifier.insert(scope.cloned()); dep_info.has_node_specifier = true;
} }
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
if let Some(dep) = dependency.get_type() { if let Some(dep) = type_specifier {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
}
}
if dependency.maybe_deno_types_specifier.is_some() {
if let (Some(code_specifier), Some(type_specifier)) =
(code_specifier, type_specifier)
{
if MediaType::from_specifier(type_specifier).is_declaration() {
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier.clone(), code_specifier.clone());
}
} }
} }
} }
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
}; };
@ -1410,16 +1468,49 @@ impl Documents {
visit_doc(doc); visit_doc(doc);
} }
// fill the reqs from the lockfile
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() { for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default();
(|| {
let config_file = config_data.maybe_deno_json()?;
let jsx_config =
config_file.to_maybe_jsx_import_source_config().ok()??;
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
let code_specifier = jsx_config.default_specifier.as_ref()?;
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
let range = deno_graph::Range {
specifier: jsx_config.base_url.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
};
let type_specifier = cli_resolver
.resolve(
type_specifier,
&range,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Types,
)
.ok()?;
let code_specifier = cli_resolver
.resolve(
code_specifier,
&range,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Execution,
)
.ok()?;
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier, code_specifier);
Some(())
})();
// fill the reqs from the lockfile
if let Some(lockfile) = config_data.lockfile.as_ref() { if let Some(lockfile) = config_data.lockfile.as_ref() {
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for key in lockfile.content.packages.specifiers.keys() { for dep_req in lockfile.content.packages.specifiers.keys() {
if let Some(key) = key.strip_prefix("npm:") { if dep_req.kind == deno_semver::package::PackageKind::Npm {
if let Ok(req) = PackageReq::from_str(key) { dep_info.npm_reqs.insert(dep_req.req.clone());
reqs.insert(req);
}
} }
} }
} }
@ -1428,15 +1519,22 @@ impl Documents {
// Ensure a @types/node package exists when any module uses a node: specifier. // Ensure a @types/node package exists when any module uses a node: specifier.
// Unlike on the command line, here we just add @types/node to the npm package // Unlike on the command line, here we just add @types/node to the npm package
// requirements since this won't end up in the lockfile. // requirements since this won't end up in the lockfile.
for scope in &scopes_with_specifier { for dep_info in dep_info_by_scope.values_mut() {
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default(); if dep_info.has_node_specifier
if !reqs.iter().any(|r| r.name == "@types/node") { && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node")
reqs.insert(PackageReq::from_str("@types/node").unwrap()); {
dep_info
.npm_reqs
.insert(PackageReq::from_str("@types/node").unwrap());
} }
} }
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope); self.dep_info_by_scope = Arc::new(
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier); dep_info_by_scope
.into_iter()
.map(|(s, i)| (s, Arc::new(i)))
.collect(),
);
self.dirty = false; self.dirty = false;
} }
@ -1444,6 +1542,7 @@ impl Documents {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
if let Some(module_name) = specifier.as_str().strip_prefix("node:") { if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
@ -1457,20 +1556,23 @@ impl Documents {
let mut specifier = specifier.clone(); let mut specifier = specifier.clone();
let mut media_type = None; let mut media_type = None;
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) {
let (s, mt) = let (s, mt) = self.resolver.npm_to_file_url(
self &npm_ref,
.resolver referrer,
.npm_to_file_url(&npm_ref, referrer, file_referrer)?; referrer_kind,
file_referrer,
)?;
specifier = s; specifier = s;
media_type = Some(mt); media_type = Some(mt);
} }
let Some(doc) = self.get_or_load(&specifier, referrer) else { let Some(doc) = self.get_or_load(&specifier, file_referrer) else {
let media_type = let media_type =
media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier)); media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier));
return Some((specifier, media_type)); return Some((specifier, media_type));
}; };
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() { if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
self.resolve_dependency(types, &specifier, file_referrer) let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc);
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
} else { } else {
Some((doc.specifier().clone(), doc.media_type())) Some((doc.specifier().clone(), doc.media_type()))
} }
@ -1519,12 +1621,16 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
fn cache_module_info( fn cache_module_info(
&self, &self,
specifier: &deno_ast::ModuleSpecifier, specifier: &deno_ast::ModuleSpecifier,
media_type: MediaType,
source: &Arc<[u8]>, source: &Arc<[u8]>,
module_info: &deno_graph::ModuleInfo, module_info: &deno_graph::ModuleInfo,
) { ) {
self self.inner_loader.cache_module_info(
.inner_loader specifier,
.cache_module_info(specifier, source, module_info) media_type,
source,
module_info,
)
} }
} }
@ -1534,6 +1640,7 @@ fn parse_and_analyze_module(
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
media_type: MediaType, media_type: MediaType,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) { ) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
let parsed_source_result = parse_source(specifier.clone(), text, media_type); let parsed_source_result = parse_source(specifier.clone(), text, media_type);
@ -1542,6 +1649,7 @@ fn parse_and_analyze_module(
&parsed_source_result, &parsed_source_result,
maybe_headers, maybe_headers,
file_referrer, file_referrer,
is_cjs_resolver,
resolver, resolver,
); );
(Some(parsed_source_result), Some(module_result)) (Some(parsed_source_result), Some(module_result))
@ -1552,7 +1660,7 @@ fn parse_source(
text: Arc<str>, text: Arc<str>,
media_type: MediaType, media_type: MediaType,
) -> ParsedSourceResult { ) -> ParsedSourceResult {
deno_ast::parse_module(deno_ast::ParseParams { deno_ast::parse_program(deno_ast::ParseParams {
specifier, specifier,
text, text,
media_type, media_type,
@ -1567,11 +1675,26 @@ fn analyze_module(
parsed_source_result: &ParsedSourceResult, parsed_source_result: &ParsedSourceResult,
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
) -> ModuleResult { ) -> ModuleResult {
match parsed_source_result { match parsed_source_result {
Ok(parsed_source) => { Ok(parsed_source) => {
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer); let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
let cli_resolver = resolver.as_cli_resolver(file_referrer);
let config_data = resolver.as_config_data(file_referrer);
let valid_referrer = specifier.clone();
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &valid_referrer,
referrer_kind: is_cjs_resolver.get_lsp_referrer_kind(
&specifier,
Some(parsed_source.compute_is_script()),
),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
Ok(deno_graph::parse_module_from_ast( Ok(deno_graph::parse_module_from_ast(
deno_graph::ParseModuleFromAstOptions { deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::TypesOnly, graph_kind: deno_graph::GraphKind::TypesOnly,
@ -1582,7 +1705,7 @@ fn analyze_module(
// dynamic imports like import(`./dir/${something}`) in the LSP // dynamic imports like import(`./dir/${something}`) in the LSP
file_system: &deno_graph::source::NullFileSystem, file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider, jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)), maybe_resolver: Some(&resolver),
maybe_npm_resolver: Some(&npm_resolver), maybe_npm_resolver: Some(&npm_resolver),
}, },
)) ))
@ -1593,6 +1716,24 @@ fn analyze_module(
} }
} }
fn bytes_to_content(
specifier: &ModuleSpecifier,
media_type: MediaType,
bytes: Vec<u8>,
maybe_charset: Option<&str>,
) -> Result<String, AnyError> {
if media_type == MediaType::Wasm {
// we use the dts representation for Wasm modules
Ok(deno_graph::source::wasm::wasm_module_to_dts(&bytes)?)
} else {
Ok(deno_graph::source::decode_owned_source(
specifier,
bytes,
maybe_charset,
)?)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -1608,7 +1749,7 @@ mod tests {
async fn setup() -> (Documents, LspCache, TempDir) { async fn setup() -> (Documents, LspCache, TempDir) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
temp_dir.create_dir_all(".deno_dir"); temp_dir.create_dir_all(".deno_dir");
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let config = Config::default(); let config = Config::default();
let resolver = let resolver =
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
@ -1691,7 +1832,7 @@ console.log(b, "hello deno");
// but we'll guard against it anyway // but we'll guard against it anyway
let (mut documents, _, temp_dir) = setup().await; let (mut documents, _, temp_dir) = setup().await;
let file_path = temp_dir.path().join("file.ts"); let file_path = temp_dir.path().join("file.ts");
let file_specifier = temp_dir.uri().join("file.ts").unwrap(); let file_specifier = temp_dir.url().join("file.ts").unwrap();
file_path.write(""); file_path.write("");
// open the document // open the document
@ -1719,18 +1860,18 @@ console.log(b, "hello deno");
let (mut documents, cache, temp_dir) = setup().await; let (mut documents, cache, temp_dir) = setup().await;
let file1_path = temp_dir.path().join("file1.ts"); let file1_path = temp_dir.path().join("file1.ts");
let file1_specifier = temp_dir.uri().join("file1.ts").unwrap(); let file1_specifier = temp_dir.url().join("file1.ts").unwrap();
fs::write(&file1_path, "").unwrap(); fs::write(&file1_path, "").unwrap();
let file2_path = temp_dir.path().join("file2.ts"); let file2_path = temp_dir.path().join("file2.ts");
let file2_specifier = temp_dir.uri().join("file2.ts").unwrap(); let file2_specifier = temp_dir.url().join("file2.ts").unwrap();
fs::write(&file2_path, "").unwrap(); fs::write(&file2_path, "").unwrap();
let file3_path = temp_dir.path().join("file3.ts"); let file3_path = temp_dir.path().join("file3.ts");
let file3_specifier = temp_dir.uri().join("file3.ts").unwrap(); let file3_specifier = temp_dir.url().join("file3.ts").unwrap();
fs::write(&file3_path, "").unwrap(); fs::write(&file3_path, "").unwrap();
let mut config = Config::new_with_roots([temp_dir.uri()]); let mut config = Config::new_with_roots([temp_dir.url()]);
let workspace_settings = let workspace_settings =
serde_json::from_str(r#"{ "enable": true }"#).unwrap(); serde_json::from_str(r#"{ "enable": true }"#).unwrap();
config.set_workspace_settings(workspace_settings, vec![]); config.set_workspace_settings(workspace_settings, vec![]);

View file

@ -14,13 +14,11 @@ use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageInfoVersion; use deno_graph::packages::JsrPackageInfoVersion;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::Version; use deno_semver::Version;
use serde::Deserialize; use serde::Deserialize;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -93,20 +91,23 @@ impl JsrCacheResolver {
} }
} }
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) { if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
for (req_url, nv_url) in &lockfile.lock().content.packages.specifiers { for (dep_req, version) in &lockfile.lock().content.packages.specifiers {
let Some(req) = req_url.strip_prefix("jsr:") else { let req = match dep_req.kind {
deno_semver::package::PackageKind::Jsr => &dep_req.req,
deno_semver::package::PackageKind::Npm => {
continue;
}
};
let Ok(version) = Version::parse_standard(version) else {
continue; continue;
}; };
let Some(nv) = nv_url.strip_prefix("jsr:") else { nv_by_req.insert(
continue; req.clone(),
}; Some(PackageNv {
let Ok(req) = PackageReq::from_str(req) else { name: req.name.clone(),
continue; version,
}; }),
let Ok(nv) = PackageNv::from_str(nv) else { );
continue;
};
nv_by_req.insert(req, Some(nv));
} }
} }
Self { Self {
@ -157,7 +158,7 @@ impl JsrCacheResolver {
let maybe_nv = self.req_to_nv(&req); let maybe_nv = self.req_to_nv(&req);
let nv = maybe_nv.as_ref()?; let nv = maybe_nv.as_ref()?;
let info = self.package_version_info(nv)?; let info = self.package_version_info(nv)?;
let path = info.export(&normalize_export_name(req_ref.sub_path()))?; let path = info.export(&req_ref.export_name())?;
if let Some(workspace_scope) = self.workspace_scope_by_name.get(&nv.name) { if let Some(workspace_scope) = self.workspace_scope_by_name.get(&nv.name) {
workspace_scope.join(path).ok() workspace_scope.join(path).ok()
} else { } else {
@ -259,36 +260,9 @@ fn read_cached_url(
cache: &Arc<dyn HttpCache>, cache: &Arc<dyn HttpCache>,
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
cache cache
.read_file_bytes( .get(&cache.cache_item_key(url).ok()?, None)
&cache.cache_item_key(url).ok()?,
None,
deno_cache_dir::GlobalToLocalCopy::Disallow,
)
.ok()? .ok()?
} .map(|f| f.content)
// TODO(nayeemrmn): This is duplicated from a private function in deno_graph
// 0.65.1. Make it public or cleanup otherwise.
fn normalize_export_name(sub_path: Option<&str>) -> Cow<str> {
let Some(sub_path) = sub_path else {
return Cow::Borrowed(".");
};
if sub_path.is_empty() || matches!(sub_path, "/" | ".") {
Cow::Borrowed(".")
} else {
let sub_path = if sub_path.starts_with('/') {
Cow::Owned(format!(".{}", sub_path))
} else if !sub_path.starts_with("./") {
Cow::Owned(format!("./{}", sub_path))
} else {
Cow::Borrowed(sub_path)
};
if let Some(prefix) = sub_path.strip_suffix('/') {
Cow::Owned(prefix.to_string())
} else {
sub_path
}
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -336,7 +310,7 @@ impl PackageSearchApi for CliJsrSearchApi {
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher
.fetch(&search_url, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&search_url)
.await? .await?
.into_text_decoded() .into_text_decoded()
}) })

File diff suppressed because it is too large Load diff

View file

@ -17,7 +17,7 @@ pub struct TaskDefinition {
// TODO(nayeemrmn): Rename this to `command` in vscode_deno. // TODO(nayeemrmn): Rename this to `command` in vscode_deno.
#[serde(rename = "detail")] #[serde(rename = "detail")]
pub command: String, pub command: String,
pub source_uri: lsp::Url, pub source_uri: lsp::Uri,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
@ -46,6 +46,30 @@ pub struct DiagnosticBatchNotificationParams {
pub messages_len: usize, pub messages_len: usize,
} }
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DenoConfigurationData {
pub scope_uri: lsp::Uri,
pub workspace_root_scope_uri: Option<lsp::Uri>,
pub deno_json: Option<lsp::TextDocumentIdentifier>,
pub package_json: Option<lsp::TextDocumentIdentifier>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DidRefreshDenoConfigurationTreeNotificationParams {
pub data: Vec<DenoConfigurationData>,
}
pub enum DidRefreshDenoConfigurationTreeNotification {}
impl lsp::notification::Notification
for DidRefreshDenoConfigurationTreeNotification
{
type Params = DidRefreshDenoConfigurationTreeNotificationParams;
const METHOD: &'static str = "deno/didRefreshDenoConfigurationTree";
}
#[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)] #[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum DenoConfigurationChangeType { pub enum DenoConfigurationChangeType {
@ -75,8 +99,8 @@ pub enum DenoConfigurationType {
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DenoConfigurationChangeEvent { pub struct DenoConfigurationChangeEvent {
pub scope_uri: lsp::Url, pub scope_uri: lsp::Uri,
pub file_uri: lsp::Url, pub file_uri: lsp::Uri,
#[serde(rename = "type")] #[serde(rename = "type")]
pub typ: DenoConfigurationChangeType, pub typ: DenoConfigurationChangeType,
pub configuration_type: DenoConfigurationType, pub configuration_type: DenoConfigurationType,
@ -88,13 +112,15 @@ pub struct DidChangeDenoConfigurationNotificationParams {
pub changes: Vec<DenoConfigurationChangeEvent>, pub changes: Vec<DenoConfigurationChangeEvent>,
} }
// TODO(nayeemrmn): This is being replaced by
// `DidRefreshDenoConfigurationTreeNotification` for Deno > v2.0.0. Remove it
// soon.
pub enum DidChangeDenoConfigurationNotification {} pub enum DidChangeDenoConfigurationNotification {}
impl lsp::notification::Notification impl lsp::notification::Notification
for DidChangeDenoConfigurationNotification for DidChangeDenoConfigurationNotification
{ {
type Params = DidChangeDenoConfigurationNotificationParams; type Params = DidChangeDenoConfigurationNotificationParams;
const METHOD: &'static str = "deno/didChangeDenoConfiguration"; const METHOD: &'static str = "deno/didChangeDenoConfiguration";
} }
@ -102,7 +128,6 @@ pub enum DidUpgradeCheckNotification {}
impl lsp::notification::Notification for DidUpgradeCheckNotification { impl lsp::notification::Notification for DidUpgradeCheckNotification {
type Params = DidUpgradeCheckNotificationParams; type Params = DidUpgradeCheckNotificationParams;
const METHOD: &'static str = "deno/didUpgradeCheck"; const METHOD: &'static str = "deno/didUpgradeCheck";
} }
@ -125,6 +150,5 @@ pub enum DiagnosticBatchNotification {}
impl lsp::notification::Notification for DiagnosticBatchNotification { impl lsp::notification::Notification for DiagnosticBatchNotification {
type Params = DiagnosticBatchNotificationParams; type Params = DiagnosticBatchNotificationParams;
const METHOD: &'static str = "deno/internalTestDiagnosticBatch"; const METHOD: &'static str = "deno/internalTestDiagnosticBatch";
} }

View file

@ -4,7 +4,7 @@ use dashmap::DashMap;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_npm::npm_rc::NpmRc;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use serde::Deserialize; use serde::Deserialize;
@ -26,7 +26,10 @@ pub struct CliNpmSearchApi {
impl CliNpmSearchApi { impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(file_fetcher.clone()); let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
);
Self { Self {
file_fetcher, file_fetcher,
resolver, resolver,
@ -55,7 +58,7 @@ impl PackageSearchApi for CliNpmSearchApi {
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher file_fetcher
.fetch(&search_url, &PermissionsContainer::allow_all()) .fetch_bypass_permissions(&search_url)
.await? .await?
.into_text_decoded() .into_text_decoded()
}) })

View file

@ -11,7 +11,7 @@ pub fn start(parent_process_id: u32) {
std::thread::sleep(Duration::from_secs(10)); std::thread::sleep(Duration::from_secs(10));
if !is_process_active(parent_process_id) { if !is_process_active(parent_process_id) {
std::process::exit(1); deno_runtime::exit(1);
} }
}); });
} }

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,7 @@ use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use lsp_types::Uri;
use tower_lsp::lsp_types::ClientCapabilities; use tower_lsp::lsp_types::ClientCapabilities;
use tower_lsp::lsp_types::ClientInfo; use tower_lsp::lsp_types::ClientInfo;
use tower_lsp::lsp_types::CompletionContext; use tower_lsp::lsp_types::CompletionContext;
@ -40,6 +41,8 @@ use super::config::LanguageWorkspaceSettings;
use super::config::ObjectLiteralMethodSnippets; use super::config::ObjectLiteralMethodSnippets;
use super::config::TestingSettings; use super::config::TestingSettings;
use super::config::WorkspaceSettings; use super::config::WorkspaceSettings;
use super::urls::uri_parse_unencoded;
use super::urls::url_to_uri;
#[derive(Debug)] #[derive(Debug)]
pub struct ReplCompletionItem { pub struct ReplCompletionItem {
@ -73,7 +76,7 @@ impl ReplLanguageServer {
.initialize(InitializeParams { .initialize(InitializeParams {
process_id: None, process_id: None,
root_path: None, root_path: None,
root_uri: Some(cwd_uri.clone()), root_uri: Some(url_to_uri(&cwd_uri).unwrap()),
initialization_options: Some( initialization_options: Some(
serde_json::to_value(get_repl_workspace_settings()).unwrap(), serde_json::to_value(get_repl_workspace_settings()).unwrap(),
), ),
@ -84,6 +87,7 @@ impl ReplLanguageServer {
general: None, general: None,
experimental: None, experimental: None,
offset_encoding: None, offset_encoding: None,
notebook_document: None,
}, },
trace: None, trace: None,
workspace_folders: None, workspace_folders: None,
@ -92,6 +96,7 @@ impl ReplLanguageServer {
version: None, version: None,
}), }),
locale: None, locale: None,
work_done_progress_params: Default::default(),
}) })
.await?; .await?;
@ -133,7 +138,7 @@ impl ReplLanguageServer {
.completion(CompletionParams { .completion(CompletionParams {
text_document_position: TextDocumentPositionParams { text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { text_document: TextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
}, },
position: Position { position: Position {
line: line_and_column.line_index as u32, line: line_and_column.line_index as u32,
@ -208,7 +213,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_change(DidChangeTextDocumentParams { .did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier { text_document: VersionedTextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
version: self.document_version, version: self.document_version,
}, },
content_changes: vec![TextDocumentContentChangeEvent { content_changes: vec![TextDocumentContentChangeEvent {
@ -233,7 +238,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_close(DidCloseTextDocumentParams { .did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { text_document: TextDocumentIdentifier {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
}, },
}) })
.await; .await;
@ -248,7 +253,7 @@ impl ReplLanguageServer {
.language_server .language_server
.did_open(DidOpenTextDocumentParams { .did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem { text_document: TextDocumentItem {
uri: self.get_document_specifier(), uri: self.get_document_uri(),
language_id: "typescript".to_string(), language_id: "typescript".to_string(),
version: self.document_version, version: self.document_version,
text: format!("{}{}", self.document_text, self.pending_text), text: format!("{}{}", self.document_text, self.pending_text),
@ -257,8 +262,9 @@ impl ReplLanguageServer {
.await; .await;
} }
fn get_document_specifier(&self) -> ModuleSpecifier { fn get_document_uri(&self) -> Uri {
self.cwd_uri.join("$deno$repl.ts").unwrap() uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str())
.unwrap()
} }
} }
@ -306,7 +312,7 @@ pub fn get_repl_workspace_settings() -> WorkspaceSettings {
document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl
tls_certificate: None, tls_certificate: None,
unsafely_ignore_certificate_errors: None, unsafely_ignore_certificate_errors: None,
unstable: false, unstable: Default::default(),
suggest: DenoCompletionSettings { suggest: DenoCompletionSettings {
imports: ImportCompletionSettings { imports: ImportCompletionSettings {
auto_discover: false, auto_discover: false,

View file

@ -1,52 +1,37 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::PackageJsonInstallDepsProvider;
use crate::args::DENO_FUTURE;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverByonmCreateOptions;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache; use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::source::Resolver; use deno_graph::source::ResolutionMode;
use deno_graph::GraphImport; use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path; use deno_runtime::deno_node::PackageJsonResolver;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError; use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolution; use node_resolver::InNpmPackageChecker;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
use node_resolver::NpmResolver;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet; use std::collections::BTreeSet;
@ -55,28 +40,71 @@ use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use super::cache::LspCache; use super::cache::LspCache;
use super::documents::Document;
use super::jsr::JsrCacheResolver; use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::factory::Deferred;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::IsCjsResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::tsc::into_specifier_and_media_type;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct LspScopeResolver { struct LspScopeResolver {
graph_resolver: Arc<CliGraphResolver>, resolver: Arc<CliResolver>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
jsr_resolver: Option<Arc<JsrCacheResolver>>, jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>, npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>, node_resolver: Option<Arc<NodeResolver>>,
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
pkg_json_resolver: Arc<PackageJsonResolver>,
redirect_resolver: Option<Arc<RedirectResolver>>, redirect_resolver: Option<Arc<RedirectResolver>>,
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>, graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
config_data: Option<Arc<ConfigData>>, config_data: Option<Arc<ConfigData>>,
} }
impl Default for LspScopeResolver { impl Default for LspScopeResolver {
fn default() -> Self { fn default() -> Self {
let factory = ResolverFactory::new(None);
Self { Self {
graph_resolver: create_graph_resolver(None, None, None), resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
jsr_resolver: None, jsr_resolver: None,
npm_resolver: None, npm_resolver: None,
node_resolver: None, node_resolver: None,
npm_pkg_req_resolver: None,
pkg_json_resolver: factory.pkg_json_resolver().clone(),
redirect_resolver: None, redirect_resolver: None,
graph_imports: Default::default(), graph_imports: Default::default(),
dep_info: Default::default(),
package_json_deps_by_resolution: Default::default(),
config_data: None, config_data: None,
} }
} }
@ -88,22 +116,16 @@ impl LspScopeResolver {
cache: &LspCache, cache: &LspCache,
http_client_provider: Option<&Arc<HttpClientProvider>>, http_client_provider: Option<&Arc<HttpClientProvider>>,
) -> Self { ) -> Self {
let mut npm_resolver = None; let mut factory = ResolverFactory::new(config_data);
let mut node_resolver = None; if let Some(http_client_provider) = http_client_provider {
if let Some(http_client) = http_client_provider { factory.init_npm_resolver(http_client_provider, cache).await;
npm_resolver = create_npm_resolver(
config_data.map(|d| d.as_ref()),
cache,
http_client,
)
.await;
node_resolver = create_node_resolver(npm_resolver.as_ref());
} }
let graph_resolver = create_graph_resolver( let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone();
config_data.map(|d| d.as_ref()), let npm_resolver = factory.npm_resolver().cloned();
npm_resolver.as_ref(), let node_resolver = factory.node_resolver().cloned();
node_resolver.as_ref(), let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned();
); let cli_resolver = factory.cli_resolver().clone();
let pkg_json_resolver = factory.pkg_json_resolver().clone();
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new( let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
cache.for_specifier(config_data.map(|d| d.scope.as_ref())), cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.map(|d| d.as_ref()), config_data.map(|d| d.as_ref()),
@ -112,7 +134,9 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())), cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()), config_data.and_then(|d| d.lockfile.clone()),
))); )));
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver(); let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data let graph_imports = config_data
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok()) .and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
.map(|imports| { .map(|imports| {
@ -120,11 +144,18 @@ impl LspScopeResolver {
imports imports
.into_iter() .into_iter()
.map(|(referrer, imports)| { .map(|(referrer, imports)| {
let resolver = SingleReferrerGraphResolver {
valid_referrer: &referrer,
referrer_kind: NodeModuleKind::Esm,
cli_resolver: &cli_resolver,
jsx_import_source_config: maybe_jsx_import_source_config
.as_ref(),
};
let graph_import = GraphImport::new( let graph_import = GraphImport::new(
&referrer, &referrer,
imports, imports,
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver.as_ref()), Some(&resolver),
Some(&npm_graph_resolver), Some(&npm_graph_resolver),
); );
(referrer, graph_import) (referrer, graph_import)
@ -133,33 +164,81 @@ impl LspScopeResolver {
) )
}) })
.unwrap_or_default(); .unwrap_or_default();
let package_json_deps_by_resolution = (|| {
let npm_pkg_req_resolver = npm_pkg_req_resolver.as_ref()?;
let package_json = config_data?.maybe_pkg_json()?;
let referrer = package_json.specifier();
let dependencies = package_json.dependencies.as_ref()?;
let result = dependencies
.iter()
.flat_map(|(name, _)| {
let req_ref =
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
let specifier = into_specifier_and_media_type(Some(
npm_pkg_req_resolver
.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Types,
)
.or_else(|_| {
npm_pkg_req_resolver.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Execution,
)
})
.ok()?,
))
.0;
Some((specifier, name.clone()))
})
.collect();
Some(result)
})();
let package_json_deps_by_resolution =
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
Self { Self {
graph_resolver, resolver: cli_resolver,
in_npm_pkg_checker,
jsr_resolver, jsr_resolver,
npm_pkg_req_resolver,
npm_resolver, npm_resolver,
node_resolver, node_resolver,
pkg_json_resolver,
redirect_resolver, redirect_resolver,
graph_imports, graph_imports,
dep_info: Default::default(),
package_json_deps_by_resolution,
config_data: config_data.cloned(), config_data: config_data.cloned(),
} }
} }
fn snapshot(&self) -> Arc<Self> { fn snapshot(&self) -> Arc<Self> {
let mut factory = ResolverFactory::new(self.config_data.as_ref());
let npm_resolver = let npm_resolver =
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
let node_resolver = create_node_resolver(npm_resolver.as_ref()); if let Some(npm_resolver) = &npm_resolver {
let graph_resolver = create_graph_resolver( factory.set_npm_resolver(npm_resolver.clone());
self.config_data.as_deref(), }
npm_resolver.as_ref(),
node_resolver.as_ref(),
);
Arc::new(Self { Arc::new(Self {
graph_resolver, resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
jsr_resolver: self.jsr_resolver.clone(), jsr_resolver: self.jsr_resolver.clone(),
npm_resolver, npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
node_resolver, npm_resolver: factory.npm_resolver().cloned(),
node_resolver: factory.node_resolver().cloned(),
redirect_resolver: self.redirect_resolver.clone(), redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: factory.pkg_json_resolver().clone(),
graph_imports: self.graph_imports.clone(), graph_imports: self.graph_imports.clone(),
dep_info: self.dep_info.clone(),
package_json_deps_by_resolution: self
.package_json_deps_by_resolution
.clone(),
config_data: self.config_data.clone(), config_data: self.config_data.clone(),
}) })
} }
@ -223,19 +302,24 @@ impl LspResolver {
} }
} }
pub async fn set_npm_reqs( pub async fn set_dep_info_by_scope(
&self, &self,
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>, dep_info_by_scope: &Arc<
BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>,
>,
) { ) {
for (scope, resolver) in [(None, &self.unscoped)] for (scope, resolver) in [(None, &self.unscoped)]
.into_iter() .into_iter()
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r))) .chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
{ {
let dep_info = dep_info_by_scope.get(&scope.cloned());
if let Some(dep_info) = dep_info {
*resolver.dep_info.lock() = dep_info.clone();
}
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() { if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
if let Some(npm_resolver) = npm_resolver.as_managed() { if let Some(npm_resolver) = npm_resolver.as_managed() {
let reqs = reqs let reqs = dep_info
.get(&scope.cloned()) .map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default(); .unwrap_or_default();
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await { if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
lsp_warn!("Could not set npm package requirements: {:#}", err); lsp_warn!("Could not set npm package requirements: {:#}", err);
@ -245,12 +329,12 @@ impl LspResolver {
} }
} }
pub fn as_graph_resolver( pub fn as_cli_resolver(
&self, &self,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> &dyn Resolver { ) -> &CliResolver {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.as_ref() resolver.resolver.as_ref()
} }
pub fn create_graph_npm_resolver( pub fn create_graph_npm_resolver(
@ -258,7 +342,23 @@ impl LspResolver {
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver { ) -> WorkerCliNpmGraphResolver {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.create_graph_npm_resolver() resolver.resolver.create_graph_npm_resolver()
}
pub fn as_config_data(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&Arc<ConfigData>> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.config_data.as_ref()
}
pub fn in_npm_pkg_checker(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &Arc<dyn InNpmPackageChecker> {
let resolver = self.get_scope_resolver(file_referrer);
&resolver.in_npm_pkg_checker
} }
pub fn maybe_managed_npm_resolver( pub fn maybe_managed_npm_resolver(
@ -324,15 +424,46 @@ impl LspResolver {
&self, &self,
req_ref: &NpmPackageReqReference, req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?; let npm_pkg_req_resolver = resolver.npm_pkg_req_resolver.as_ref()?;
Some(NodeResolution::into_specifier_and_media_type( Some(into_specifier_and_media_type(Some(
node_resolver npm_pkg_req_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .resolve_req_reference(
.ok(), req_ref,
)) referrer,
referrer_kind,
NodeResolutionMode::Types,
)
.ok()?,
)))
}
pub fn file_url_to_package_json_dep(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<String> {
let resolver = self.get_scope_resolver(file_referrer);
resolver
.package_json_deps_by_resolution
.get(specifier)
.cloned()
}
pub fn deno_types_to_code_resolution(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<ModuleSpecifier> {
let resolver = self.get_scope_resolver(file_referrer);
let dep_info = resolver.dep_info.lock().clone();
dep_info
.deno_types_to_code_resolutions
.get(specifier)
.cloned()
} }
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
@ -346,14 +477,10 @@ impl LspResolver {
.contains("/node_modules/") .contains("/node_modules/")
} }
let global_npm_resolver = self if let Some(node_resolver) =
.get_scope_resolver(Some(specifier)) &self.get_scope_resolver(Some(specifier)).node_resolver
.npm_resolver {
.as_ref() if node_resolver.in_npm_package(specifier) {
.and_then(|npm_resolver| npm_resolver.as_managed())
.filter(|r| r.root_node_modules_path().is_none());
if let Some(npm_resolver) = &global_npm_resolver {
if npm_resolver.in_npm_package(specifier) {
return true; return true;
} }
} }
@ -361,16 +488,27 @@ impl LspResolver {
has_node_modules_dir(specifier) has_node_modules_dir(specifier)
} }
pub fn node_media_type( pub fn is_bare_package_json_dep(
&self, &self,
specifier: &ModuleSpecifier, specifier_text: &str,
) -> Option<MediaType> { referrer: &ModuleSpecifier,
let resolver = self.get_scope_resolver(Some(specifier)); referrer_kind: NodeModuleKind,
let node_resolver = resolver.node_resolver.as_ref()?; ) -> bool {
let resolution = node_resolver let resolver = self.get_scope_resolver(Some(referrer));
.url_to_node_resolution(specifier.clone()) let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
.ok()?; else {
Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) return false;
};
npm_pkg_req_resolver
.resolve_if_for_npm_pkg(
specifier_text,
referrer,
referrer_kind,
NodeResolutionMode::Types,
)
.ok()
.flatten()
.is_some()
} }
pub fn get_closest_package_json( pub fn get_closest_package_json(
@ -378,10 +516,9 @@ impl LspResolver {
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> { ) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
let resolver = self.get_scope_resolver(Some(referrer)); let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else { resolver
return Ok(None); .pkg_json_resolver
}; .get_closest_package_json(referrer)
node_resolver.get_closest_package_json(referrer)
} }
pub fn resolve_redirects( pub fn resolve_redirects(
@ -421,121 +558,225 @@ impl LspResolver {
}; };
self self
.by_scope .by_scope
.iter() .values()
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str())) .rfind(|r| {
.map(|(_, r)| r.as_ref()) r.config_data
.as_ref()
.map(|d| d.scope_contains_specifier(file_referrer))
.unwrap_or(false)
})
.map(|r| r.as_ref())
.unwrap_or(self.unscoped.as_ref()) .unwrap_or(self.unscoped.as_ref())
} }
} }
async fn create_npm_resolver( #[derive(Debug, Default, Clone)]
config_data: Option<&ConfigData>, pub struct ScopeDepInfo {
cache: &LspCache, pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
http_client_provider: &Arc<HttpClientProvider>, pub npm_reqs: BTreeSet<PackageReq>,
) -> Option<Arc<dyn CliNpmResolver>> { pub has_node_specifier: bool,
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(*DENO_FUTURE); }
let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions { #[derive(Default)]
fs: Arc::new(deno_fs::RealFs), struct ResolverFactoryServices {
root_node_modules_dir: config_data.and_then(|config_data| { cli_resolver: Deferred<Arc<CliResolver>>,
config_data.node_modules_dir.clone().or_else(|| { in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
specifier_to_file_path(&config_data.scope) node_resolver: Deferred<Option<Arc<NodeResolver>>>,
.ok() npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
.map(|p| p.join("node_modules/")) npm_resolver: Option<Arc<dyn CliNpmResolver>>,
}) }
}),
}) struct ResolverFactory<'a> {
} else { config_data: Option<&'a Arc<ConfigData>>,
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { fs: Arc<dyn deno_fs::FileSystem>,
http_client_provider: http_client_provider.clone(), pkg_json_resolver: Arc<PackageJsonResolver>,
snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { services: ResolverFactoryServices,
Some(lockfile) => { }
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(), impl<'a> ResolverFactory<'a> {
) pub fn new(config_data: Option<&'a Arc<ConfigData>>) -> Self {
} let fs = Arc::new(deno_fs::RealFs);
None => CliNpmResolverManagedSnapshotOption::Specified(None), let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
}, deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
// Don't provide the lockfile. We don't want these resolvers ));
// updating it. Only the cache request should update the lockfile. Self {
maybe_lockfile: None, config_data,
fs: Arc::new(deno_fs::RealFs), fs,
npm_global_cache_dir: cache.deno_dir().npm_folder_path(), pkg_json_resolver,
// Use an "only" cache setting in order to make the services: Default::default(),
// user do an explicit "cache" command and prevent }
// the cache from being filled with lots of packages while }
// the user is typing.
cache_setting: CacheSetting::Only, async fn init_npm_resolver(
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), &mut self,
maybe_node_modules_path: config_data http_client_provider: &Arc<HttpClientProvider>,
.and_then(|d| d.node_modules_dir.clone()), cache: &LspCache,
// only used for top level install, so we can ignore this ) {
package_json_deps_provider: Arc::new( let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false);
PackageJsonInstallDepsProvider::empty(), let options = if enable_byonm {
), CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
npmrc: config_data fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
pkg_json_resolver: self.pkg_json_resolver.clone(),
root_node_modules_dir: self.config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| {
url_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})
}),
})
} else {
let npmrc = self
.config_data
.and_then(|d| d.npmrc.clone()) .and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc), .unwrap_or_else(create_default_npmrc);
npm_system_info: NpmSystemInfo::default(), let npm_cache_dir = Arc::new(NpmCacheDir::new(
lifecycle_scripts: Default::default(), &DenoCacheEnvFsAdapter(self.fs.as_ref()),
cache.deno_dir().npm_folder_path(),
npmrc.get_all_known_registries_urls(),
));
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
http_client_provider: http_client_provider.clone(),
snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
},
// Don't provide the lockfile. We don't want these resolvers
// updating it. Only the cache request should update the lockfile.
maybe_lockfile: None,
fs: Arc::new(deno_fs::RealFs),
npm_cache_dir,
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
// the user is typing.
cache_setting: CacheSetting::Only,
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
maybe_node_modules_path: self
.config_data
.and_then(|d| d.node_modules_dir.clone()),
// only used for top level install, so we can ignore this
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
npmrc,
npm_system_info: NpmSystemInfo::default(),
lifecycle_scripts: Default::default(),
})
};
self.set_npm_resolver(create_cli_npm_resolver_for_lsp(options).await);
}
pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) {
self.services.npm_resolver = Some(npm_resolver);
}
pub fn npm_resolver(&self) -> Option<&Arc<dyn CliNpmResolver>> {
self.services.npm_resolver.as_ref()
}
pub fn cli_resolver(&self) -> &Arc<CliResolver> {
self.services.cli_resolver.get_or_init(|| {
let npm_req_resolver = self.npm_pkg_req_resolver().cloned();
let deno_resolver = Arc::new(CliDenoResolver::new(DenoResolverOptions {
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
node_and_req_resolver: match (self.node_resolver(), npm_req_resolver) {
(Some(node_resolver), Some(npm_req_resolver)) => {
Some(NodeAndNpmReqResolver {
node_resolver: node_resolver.clone(),
npm_req_resolver,
})
}
_ => None,
},
sloppy_imports_resolver: self
.config_data
.and_then(|d| d.sloppy_imports_resolver.clone()),
workspace_resolver: self
.config_data
.map(|d| d.resolver.clone())
.unwrap_or_else(|| {
Arc::new(WorkspaceResolver::new_raw(
// this is fine because this is only used before initialization
Arc::new(ModuleSpecifier::parse("file:///").unwrap()),
None,
Vec::new(),
Vec::new(),
PackageJsonDepResolution::Disabled,
))
}),
is_byonm: self.config_data.map(|d| d.byonm).unwrap_or(false),
maybe_vendor_dir: self.config_data.and_then(|d| d.vendor_dir.as_ref()),
}));
Arc::new(CliResolver::new(CliResolverOptions {
deno_resolver,
npm_resolver: self.npm_resolver().cloned(),
bare_node_builtins_enabled: self
.config_data
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
}))
}) })
}; }
Some(create_cli_npm_resolver_for_lsp(options).await)
}
fn create_node_resolver( pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
npm_resolver: Option<&Arc<dyn CliNpmResolver>>, &self.pkg_json_resolver
) -> Option<Arc<CliNodeResolver>> { }
use once_cell::sync::Lazy;
// it's not ideal to share this across all scopes and to pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> {
// never clear it, but it's fine for the time being self.services.in_npm_pkg_checker.get_or_init(|| {
static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> = crate::npm::create_in_npm_pkg_checker(
Lazy::new(Default::default); match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) {
Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => {
CreateInNpmPkgCheckerOptions::Byonm
}
Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.maybe_node_modules_path(),
},
)
}
},
)
})
}
let npm_resolver = npm_resolver?; pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
let fs = Arc::new(deno_fs::RealFs); self
let node_resolver_inner = Arc::new(NodeResolver::new( .services
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), .node_resolver
npm_resolver.clone().into_npm_resolver(), .get_or_init(|| {
)); let npm_resolver = self.services.npm_resolver.as_ref()?;
Some(Arc::new(CliNodeResolver::new( Some(Arc::new(NodeResolver::new(
CJS_RESOLUTIONS.clone(), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()),
fs, self.in_npm_pkg_checker().clone(),
node_resolver_inner, npm_resolver.clone().into_npm_pkg_folder_resolver(),
npm_resolver.clone(), self.pkg_json_resolver.clone(),
))) )))
} })
.as_ref()
}
fn create_graph_resolver( pub fn npm_pkg_req_resolver(&self) -> Option<&Arc<CliNpmReqResolver>> {
config_data: Option<&ConfigData>, self
npm_resolver: Option<&Arc<dyn CliNpmResolver>>, .services
node_resolver: Option<&Arc<CliNodeResolver>>, .npm_pkg_req_resolver
) -> Arc<CliGraphResolver> { .get_or_init(|| {
let workspace = config_data.map(|d| &d.member_dir.workspace); let node_resolver = self.node_resolver()?;
Arc::new(CliGraphResolver::new(CliGraphResolverOptions { let npm_resolver = self.npm_resolver()?;
node_resolver: node_resolver.cloned(), Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
npm_resolver: npm_resolver.cloned(), byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else( fs: CliDenoResolverFs(self.fs.clone()),
|| { in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
Arc::new(WorkspaceResolver::new_raw( node_resolver: node_resolver.clone(),
// this is fine because this is only used before initialization npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
Arc::new(ModuleSpecifier::parse("file:///").unwrap()), })))
None, })
Vec::new(), .as_ref()
PackageJsonDepResolution::Disabled, }
))
},
),
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
workspace.to_maybe_jsx_import_source_config().ok().flatten()
}),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: workspace
.is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")),
sloppy_imports_resolver: config_data
.and_then(|d| d.sloppy_imports_resolver.clone()),
}))
} }
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
@ -562,6 +803,141 @@ impl std::fmt::Debug for RedirectResolver {
} }
} }
#[derive(Debug)]
pub struct LspIsCjsResolver {
inner: IsCjsResolver,
}
impl Default for LspIsCjsResolver {
fn default() -> Self {
LspIsCjsResolver::new(&Default::default())
}
}
impl LspIsCjsResolver {
pub fn new(cache: &LspCache) -> Self {
#[derive(Debug)]
struct LspInNpmPackageChecker {
global_cache_dir: ModuleSpecifier,
}
impl LspInNpmPackageChecker {
pub fn new(cache: &LspCache) -> Self {
let npm_folder_path = cache.deno_dir().npm_folder_path();
Self {
global_cache_dir: url_from_directory_path(
&canonicalize_path_maybe_not_exists(&npm_folder_path)
.unwrap_or(npm_folder_path),
)
.unwrap_or_else(|_| {
ModuleSpecifier::parse("file:///invalid/").unwrap()
}),
}
}
}
impl InNpmPackageChecker for LspInNpmPackageChecker {
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
if specifier
.as_str()
.starts_with(self.global_cache_dir.as_str())
{
return true;
}
specifier.as_str().contains("/node_modules/")
}
}
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
LspIsCjsResolver {
inner: IsCjsResolver::new(
Arc::new(LspInNpmPackageChecker::new(cache)),
pkg_json_resolver,
crate::resolver::IsCjsResolverOptions {
detect_cjs: true,
is_node_main: false,
},
),
}
}
pub fn get_maybe_doc_module_kind(
&self,
specifier: &ModuleSpecifier,
maybe_document: Option<&Document>,
) -> NodeModuleKind {
self.get_lsp_referrer_kind(
specifier,
maybe_document.and_then(|d| d.is_script()),
)
}
pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind {
self.get_lsp_referrer_kind(document.specifier(), document.is_script())
}
pub fn get_lsp_referrer_kind(
&self,
specifier: &ModuleSpecifier,
is_script: Option<bool>,
) -> NodeModuleKind {
self.inner.get_lsp_referrer_kind(specifier, is_script)
}
}
#[derive(Debug)]
pub struct SingleReferrerGraphResolver<'a> {
pub valid_referrer: &'a ModuleSpecifier,
pub referrer_kind: NodeModuleKind,
pub cli_resolver: &'a CliResolver,
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
mode: ResolutionMode,
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
// this resolver assumes it will only be used with a single referrer
// with the provided referrer kind
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
self.cli_resolver.resolve(
specifier_text,
referrer_range,
self.referrer_kind,
mode,
)
}
}
impl RedirectResolver { impl RedirectResolver {
fn new( fn new(
cache: Arc<dyn HttpCache>, cache: Arc<dyn HttpCache>,

View file

@ -147,7 +147,7 @@ fn visit_call_expr(
let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() else { let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() else {
continue; continue;
}; };
let ast::PropName::Ident(ast::Ident { sym, .. }) = let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key &key_value_prop.key
else { else {
continue; continue;
@ -187,7 +187,7 @@ fn visit_call_expr(
}; };
match prop.as_ref() { match prop.as_ref() {
ast::Prop::KeyValue(key_value_prop) => { ast::Prop::KeyValue(key_value_prop) => {
let ast::PropName::Ident(ast::Ident { sym, .. }) = let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key &key_value_prop.key
else { else {
continue; continue;
@ -206,7 +206,7 @@ fn visit_call_expr(
} }
} }
ast::Prop::Method(method_prop) => { ast::Prop::Method(method_prop) => {
let ast::PropName::Ident(ast::Ident { sym, .. }) = let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&method_prop.key &method_prop.key
else { else {
continue; continue;
@ -472,7 +472,7 @@ impl Visit for TestCollector {
collector: &mut TestCollector, collector: &mut TestCollector,
node: &ast::CallExpr, node: &ast::CallExpr,
range: &deno_ast::SourceRange, range: &deno_ast::SourceRange,
ns_prop_ident: &ast::Ident, ns_prop_ident: &ast::IdentName,
member_expr: &ast::MemberExpr, member_expr: &ast::MemberExpr,
) { ) {
if ns_prop_ident.sym == "test" { if ns_prop_ident.sym == "test" {
@ -650,7 +650,7 @@ pub mod tests {
.unwrap(); .unwrap();
let text_info = parsed_module.text_info_lazy().clone(); let text_info = parsed_module.text_info_lazy().clone();
let mut collector = TestCollector::new(specifier, text_info); let mut collector = TestCollector::new(specifier, text_info);
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
collector.take() collector.take()
} }

View file

@ -5,10 +5,12 @@ use super::lsp_custom::TestData;
use crate::lsp::client::TestingNotification; use crate::lsp::client::TestingNotification;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::lsp::urls::url_to_uri;
use crate::tools::test::TestDescription; use crate::tools::test::TestDescription;
use crate::tools::test::TestStepDescription; use crate::tools::test::TestStepDescription;
use crate::util::checksum; use crate::util::checksum;
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use lsp::Range; use lsp::Range;
use std::collections::HashMap; use std::collections::HashMap;
@ -143,21 +145,23 @@ impl TestModule {
pub fn as_replace_notification( pub fn as_replace_notification(
&self, &self,
maybe_root_uri: Option<&ModuleSpecifier>, maybe_root_uri: Option<&ModuleSpecifier>,
) -> TestingNotification { ) -> Result<TestingNotification, AnyError> {
let label = self.label(maybe_root_uri); let label = self.label(maybe_root_uri);
TestingNotification::Module(lsp_custom::TestModuleNotificationParams { Ok(TestingNotification::Module(
text_document: lsp::TextDocumentIdentifier { lsp_custom::TestModuleNotificationParams {
uri: self.specifier.clone(), text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(&self.specifier)?,
},
kind: lsp_custom::TestModuleNotificationKind::Replace,
label,
tests: self
.defs
.iter()
.filter(|(_, def)| def.parent_id.is_none())
.map(|(id, _)| self.get_test_data(id))
.collect(),
}, },
kind: lsp_custom::TestModuleNotificationKind::Replace, ))
label,
tests: self
.defs
.iter()
.filter(|(_, def)| def.parent_id.is_none())
.map(|(id, _)| self.get_test_data(id))
.collect(),
})
} }
pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String { pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String {

View file

@ -12,9 +12,13 @@ use crate::lsp::client::Client;
use crate::lsp::client::TestingNotification; use crate::lsp::client::TestingNotification;
use crate::lsp::config; use crate::lsp::config;
use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_log;
use crate::lsp::urls::uri_parse_unencoded;
use crate::lsp::urls::uri_to_url;
use crate::lsp::urls::url_to_uri;
use crate::tools::test; use crate::tools::test;
use crate::tools::test::create_test_event_channel; use crate::tools::test::create_test_event_channel;
use crate::tools::test::FailFastTracker; use crate::tools::test::FailFastTracker;
use crate::tools::test::TestFailureFormatOptions;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -27,8 +31,10 @@ use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util::create_and_run_current_thread; use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap; use indexmap::IndexMap;
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
@ -52,12 +58,12 @@ fn as_queue_and_filters(
if let Some(include) = &params.include { if let Some(include) = &params.include {
for item in include { for item in include {
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) { let url = uri_to_url(&item.text_document.uri);
queue.insert(item.text_document.uri.clone()); if let Some((test_definitions, _)) = tests.get(&url) {
queue.insert(url.clone());
if let Some(id) = &item.id { if let Some(id) = &item.id {
if let Some(test) = test_definitions.get(id) { if let Some(test) = test_definitions.get(id) {
let filter = let filter = filters.entry(url).or_default();
filters.entry(item.text_document.uri.clone()).or_default();
if let Some(include) = filter.include.as_mut() { if let Some(include) = filter.include.as_mut() {
include.insert(test.id.clone(), test.clone()); include.insert(test.id.clone(), test.clone());
} else { } else {
@ -74,19 +80,19 @@ fn as_queue_and_filters(
} }
for item in &params.exclude { for item in &params.exclude {
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) { let url = uri_to_url(&item.text_document.uri);
if let Some((test_definitions, _)) = tests.get(&url) {
if let Some(id) = &item.id { if let Some(id) = &item.id {
// there is no way to exclude a test step // there is no way to exclude a test step
if item.step_id.is_none() { if item.step_id.is_none() {
if let Some(test) = test_definitions.get(id) { if let Some(test) = test_definitions.get(id) {
let filter = let filter = filters.entry(url.clone()).or_default();
filters.entry(item.text_document.uri.clone()).or_default();
filter.exclude.insert(test.id.clone(), test.clone()); filter.exclude.insert(test.id.clone(), test.clone());
} }
} }
} else { } else {
// the entire test module is excluded // the entire test module is excluded
queue.remove(&item.text_document.uri); queue.remove(&url);
} }
} }
} }
@ -181,7 +187,7 @@ impl TestRun {
self self
.queue .queue
.iter() .iter()
.map(|s| { .filter_map(|s| {
let ids = if let Some((test_module, _)) = tests.get(s) { let ids = if let Some((test_module, _)) = tests.get(s) {
if let Some(filter) = self.filters.get(s) { if let Some(filter) = self.filters.get(s) {
filter.as_ids(test_module) filter.as_ids(test_module)
@ -191,10 +197,12 @@ impl TestRun {
} else { } else {
Vec::new() Vec::new()
}; };
lsp_custom::EnqueuedTestModule { Some(lsp_custom::EnqueuedTestModule {
text_document: lsp::TextDocumentIdentifier { uri: s.clone() }, text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(s).ok()?,
},
ids, ids,
} })
}) })
.collect() .collect()
} }
@ -212,26 +220,23 @@ impl TestRun {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let args = self.get_args(); let args = self.get_args();
lsp_log!("Executing test run with arguments: {}", args.join(" ")); lsp_log!("Executing test run with arguments: {}", args.join(" "));
let flags = let flags = Arc::new(flags_from_vec(
Arc::new(flags_from_vec(args.into_iter().map(From::from).collect())?); args.into_iter().map(|s| From::from(s.as_ref())).collect(),
)?);
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
// Various test files should not share the same permissions in terms of // Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one // `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable. // file would have impact on other files, which is undesirable.
let permissions = let permission_desc_parser = factory.permission_desc_parser()?.clone();
Permissions::from_options(&cli_options.permissions_options()?)?; let permissions = Permissions::from_options(
permission_desc_parser.as_ref(),
&cli_options.permissions_options(),
)?;
let main_graph_container = factory.main_module_graph_container().await?; let main_graph_container = factory.main_module_graph_container().await?;
test::check_specifiers( main_graph_container
factory.file_fetcher()?, .check_specifiers(&self.queue.iter().cloned().collect::<Vec<_>>(), None)
main_graph_container, .await?;
self
.queue
.iter()
.map(|s| (s.clone(), test::TestMode::Executable))
.collect(),
)
.await?;
let (concurrent_jobs, fail_fast) = let (concurrent_jobs, fail_fast) =
if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() { if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() {
@ -268,7 +273,10 @@ impl TestRun {
let join_handles = queue.into_iter().map(move |specifier| { let join_handles = queue.into_iter().map(move |specifier| {
let specifier = specifier.clone(); let specifier = specifier.clone();
let worker_factory = worker_factory.clone(); let worker_factory = worker_factory.clone();
let permissions = permissions.clone(); let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
permissions.clone(),
);
let worker_sender = test_event_sender_factory.worker(); let worker_sender = test_event_sender_factory.worker();
let fail_fast_tracker = fail_fast_tracker.clone(); let fail_fast_tracker = fail_fast_tracker.clone();
let lsp_filter = self.filters.get(&specifier); let lsp_filter = self.filters.get(&specifier);
@ -297,7 +305,7 @@ impl TestRun {
// channel. // channel.
create_and_run_current_thread(test::test_specifier( create_and_run_current_thread(test::test_specifier(
worker_factory, worker_factory,
permissions, permissions_container,
specifier, specifier,
worker_sender, worker_sender,
fail_fast_tracker, fail_fast_tracker,
@ -445,37 +453,42 @@ impl TestRun {
Ok(()) Ok(())
} }
fn get_args(&self) -> Vec<&str> { fn get_args(&self) -> Vec<Cow<str>> {
let mut args = vec!["deno", "test"]; let mut args = vec![Cow::Borrowed("deno"), Cow::Borrowed("test")];
args.extend( args.extend(
self self
.workspace_settings .workspace_settings
.testing .testing
.args .args
.iter() .iter()
.map(|s| s.as_str()), .map(|s| Cow::Borrowed(s.as_str())),
); );
args.push("--trace-leaks"); args.push(Cow::Borrowed("--trace-leaks"));
if self.workspace_settings.unstable && !args.contains(&"--unstable") { for unstable_feature in self.workspace_settings.unstable.as_deref() {
args.push("--unstable"); let flag = format!("--unstable-{unstable_feature}");
if !args.contains(&Cow::Borrowed(&flag)) {
args.push(Cow::Owned(flag));
}
} }
if let Some(config) = &self.workspace_settings.config { if let Some(config) = &self.workspace_settings.config {
if !args.contains(&"--config") && !args.contains(&"-c") { if !args.contains(&Cow::Borrowed("--config"))
args.push("--config"); && !args.contains(&Cow::Borrowed("-c"))
args.push(config.as_str()); {
args.push(Cow::Borrowed("--config"));
args.push(Cow::Borrowed(config.as_str()));
} }
} }
if let Some(import_map) = &self.workspace_settings.import_map { if let Some(import_map) = &self.workspace_settings.import_map {
if !args.contains(&"--import-map") { if !args.contains(&Cow::Borrowed("--import-map")) {
args.push("--import-map"); args.push(Cow::Borrowed("--import-map"));
args.push(import_map.as_str()); args.push(Cow::Borrowed(import_map.as_str()));
} }
} }
if self.kind == lsp_custom::TestRunKind::Debug if self.kind == lsp_custom::TestRunKind::Debug
&& !args.contains(&"--inspect") && !args.contains(&Cow::Borrowed("--inspect"))
&& !args.contains(&"--inspect-brk") && !args.contains(&Cow::Borrowed("--inspect-brk"))
{ {
args.push("--inspect"); args.push(Cow::Borrowed("--inspect"));
} }
args args
} }
@ -522,7 +535,7 @@ impl LspTestDescription {
&self, &self,
tests: &IndexMap<usize, LspTestDescription>, tests: &IndexMap<usize, LspTestDescription>,
) -> lsp_custom::TestIdentifier { ) -> lsp_custom::TestIdentifier {
let uri = ModuleSpecifier::parse(&self.location().file_name).unwrap(); let uri = uri_parse_unencoded(&self.location().file_name).unwrap();
let static_id = self.static_id(); let static_id = self.static_id();
let mut root_desc = self; let mut root_desc = self;
while let Some(parent_id) = root_desc.parent_id() { while let Some(parent_id) = root_desc.parent_id() {
@ -586,6 +599,9 @@ impl LspTestReporter {
let (test_module, _) = files let (test_module, _) = files
.entry(specifier.clone()) .entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string())); .or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_dynamic(desc); let (static_id, is_new) = test_module.register_dynamic(desc);
self.tests.insert( self.tests.insert(
desc.id, desc.id,
@ -596,9 +612,7 @@ impl LspTestReporter {
.client .client
.send_test_notification(TestingNotification::Module( .send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams { lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier { uri },
uri: test_module.specifier.clone(),
},
kind: lsp_custom::TestModuleNotificationKind::Insert, kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()), label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)], tests: vec![test_module.get_test_data(&static_id)],
@ -655,7 +669,10 @@ impl LspTestReporter {
let desc = self.tests.get(&desc.id).unwrap(); let desc = self.tests.get(&desc.id).unwrap();
self.progress(lsp_custom::TestRunProgressMessage::Failed { self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests), test: desc.as_test_identifier(&self.tests),
messages: as_test_messages(failure.to_string(), false), messages: as_test_messages(
failure.format(&TestFailureFormatOptions::default()),
false,
),
duration: Some(elapsed as u32), duration: Some(elapsed as u32),
}) })
} }
@ -675,7 +692,7 @@ impl LspTestReporter {
let err_string = format!( let err_string = format!(
"Uncaught error from {}: {}\nThis error was not caught from a test and caused the test runner to fail on the referenced module.\nIt most likely originated from a dangling promise, event/timeout handler or top-level code.", "Uncaught error from {}: {}\nThis error was not caught from a test and caused the test runner to fail on the referenced module.\nIt most likely originated from a dangling promise, event/timeout handler or top-level code.",
origin, origin,
test::fmt::format_test_error(js_error) test::fmt::format_test_error(js_error, &TestFailureFormatOptions::default())
); );
let messages = as_test_messages(err_string, false); let messages = as_test_messages(err_string, false);
for desc in self.tests.values().filter(|d| d.origin() == origin) { for desc in self.tests.values().filter(|d| d.origin() == origin) {
@ -693,6 +710,9 @@ impl LspTestReporter {
let (test_module, _) = files let (test_module, _) = files
.entry(specifier.clone()) .entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string())); .or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_step_dynamic( let (static_id, is_new) = test_module.register_step_dynamic(
desc, desc,
self.tests.get(&desc.parent_id).unwrap().static_id(), self.tests.get(&desc.parent_id).unwrap().static_id(),
@ -706,9 +726,7 @@ impl LspTestReporter {
.client .client
.send_test_notification(TestingNotification::Module( .send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams { lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier { uri },
uri: test_module.specifier.clone(),
},
kind: lsp_custom::TestModuleNotificationKind::Insert, kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()), label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)], tests: vec![test_module.get_test_data(&static_id)],
@ -751,7 +769,10 @@ impl LspTestReporter {
test::TestStepResult::Failed(failure) => { test::TestStepResult::Failed(failure) => {
self.progress(lsp_custom::TestRunProgressMessage::Failed { self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests), test: desc.as_test_identifier(&self.tests),
messages: as_test_messages(failure.to_string(), false), messages: as_test_messages(
failure.format(&TestFailureFormatOptions::default()),
false,
),
duration: Some(elapsed as u32), duration: Some(elapsed as u32),
}) })
} }
@ -789,14 +810,14 @@ mod tests {
include: Some(vec![ include: Some(vec![
lsp_custom::TestIdentifier { lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: specifier.clone(), uri: url_to_uri(&specifier).unwrap(),
}, },
id: None, id: None,
step_id: None, step_id: None,
}, },
lsp_custom::TestIdentifier { lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: non_test_specifier.clone(), uri: url_to_uri(&non_test_specifier).unwrap(),
}, },
id: None, id: None,
step_id: None, step_id: None,
@ -804,7 +825,7 @@ mod tests {
]), ]),
exclude: vec![lsp_custom::TestIdentifier { exclude: vec![lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { text_document: lsp::TextDocumentIdentifier {
uri: specifier.clone(), uri: url_to_uri(&specifier).unwrap(),
}, },
id: Some( id: Some(
"69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f" "69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"

View file

@ -10,6 +10,7 @@ use crate::lsp::config;
use crate::lsp::documents::DocumentsFilter; use crate::lsp::documents::DocumentsFilter;
use crate::lsp::language_server::StateSnapshot; use crate::lsp::language_server::StateSnapshot;
use crate::lsp::performance::Performance; use crate::lsp::performance::Performance;
use crate::lsp::urls::url_to_uri;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
@ -26,12 +27,16 @@ use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult; use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
fn as_delete_notification(uri: ModuleSpecifier) -> TestingNotification { fn as_delete_notification(
TestingNotification::DeleteModule( url: &ModuleSpecifier,
) -> Result<TestingNotification, AnyError> {
Ok(TestingNotification::DeleteModule(
lsp_custom::TestModuleDeleteNotificationParams { lsp_custom::TestModuleDeleteNotificationParams {
text_document: lsp::TextDocumentIdentifier { uri }, text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(url)?,
},
}, },
) ))
} }
pub type TestServerTests = pub type TestServerTests =
@ -123,20 +128,24 @@ impl TestServer {
.map(|tm| tm.as_ref().clone()) .map(|tm| tm.as_ref().clone())
.unwrap_or_else(|| TestModule::new(specifier.clone())); .unwrap_or_else(|| TestModule::new(specifier.clone()));
if !test_module.is_empty() { if !test_module.is_empty() {
client.send_test_notification( if let Ok(params) =
test_module.as_replace_notification(mru.as_ref()), test_module.as_replace_notification(mru.as_ref())
); {
client.send_test_notification(params);
}
} else if !was_empty { } else if !was_empty {
client.send_test_notification(as_delete_notification( if let Ok(params) = as_delete_notification(specifier) {
specifier.clone(), client.send_test_notification(params);
)); }
} }
tests tests
.insert(specifier.clone(), (test_module, script_version)); .insert(specifier.clone(), (test_module, script_version));
} }
} }
for key in keys { for key in &keys {
client.send_test_notification(as_delete_notification(key)); if let Ok(params) = as_delete_notification(key) {
client.send_test_notification(params);
}
} }
performance.measure(mark); performance.measure(mark);
} }

View file

@ -19,8 +19,10 @@ use super::refactor::EXTRACT_TYPE;
use super::semantic_tokens; use super::semantic_tokens;
use super::semantic_tokens::SemanticTokensBuilder; use super::semantic_tokens::SemanticTokensBuilder;
use super::text::LineIndex; use super::text::LineIndex;
use super::urls::LspClientUrl; use super::urls::uri_to_url;
use super::urls::url_to_uri;
use super::urls::INVALID_SPECIFIER; use super::urls::INVALID_SPECIFIER;
use super::urls::INVALID_URI;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::args::FmtOptionsConfig; use crate::args::FmtOptionsConfig;
@ -32,12 +34,12 @@ use crate::util::path::relative_specifier;
use crate::util::path::to_percent_decoded_str; use crate::util::path::to_percent_decoded_str;
use crate::util::result::InfallibleResultExt; use crate::util::result::InfallibleResultExt;
use crate::util::v8::convert; use crate::util::v8::convert;
use crate::worker::create_isolate_create_params;
use deno_core::convert::Smi; use deno_core::convert::Smi;
use deno_core::convert::ToV8; use deno_core::convert::ToV8;
use deno_core::error::StdAnyError; use deno_core::error::StdAnyError;
use deno_core::futures::stream::FuturesOrdered; use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_runtime::fs_util::specifier_to_file_path;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -61,12 +63,14 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_core::PollEventLoopOptions; use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions; use deno_core::RuntimeOptions;
use deno_path_util::url_to_file_path;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
use indexmap::IndexMap; use indexmap::IndexMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use lazy_regex::lazy_regex; use lazy_regex::lazy_regex;
use log::error; use log::error;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -215,6 +219,8 @@ pub enum SemicolonPreference {
Remove, Remove,
} }
// Allow due to false positive https://github.com/rust-lang/rust-clippy/issues/13170
#[allow(clippy::needless_borrows_for_generic_args)]
fn normalize_diagnostic( fn normalize_diagnostic(
diagnostic: &mut crate::tsc::Diagnostic, diagnostic: &mut crate::tsc::Diagnostic,
specifier_map: &TscSpecifierMap, specifier_map: &TscSpecifierMap,
@ -232,7 +238,7 @@ pub struct TsServer {
performance: Arc<Performance>, performance: Arc<Performance>,
sender: mpsc::UnboundedSender<Request>, sender: mpsc::UnboundedSender<Request>,
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>, receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
specifier_map: Arc<TscSpecifierMap>, pub specifier_map: Arc<TscSpecifierMap>,
inspector_server: Mutex<Option<Arc<InspectorServer>>>, inspector_server: Mutex<Option<Arc<InspectorServer>>>,
pending_change: Mutex<Option<PendingChange>>, pending_change: Mutex<Option<PendingChange>>,
} }
@ -878,20 +884,22 @@ impl TsServer {
options: GetCompletionsAtPositionOptions, options: GetCompletionsAtPositionOptions,
format_code_settings: FormatCodeSettings, format_code_settings: FormatCodeSettings,
scope: Option<ModuleSpecifier>, scope: Option<ModuleSpecifier>,
) -> Option<CompletionInfo> { ) -> Result<Option<CompletionInfo>, AnyError> {
let req = TscRequest::GetCompletionsAtPosition(Box::new(( let req = TscRequest::GetCompletionsAtPosition(Box::new((
self.specifier_map.denormalize(&specifier), self.specifier_map.denormalize(&specifier),
position, position,
options, options,
format_code_settings, format_code_settings,
))); )));
match self.request(snapshot, req, scope).await { self
Ok(maybe_info) => maybe_info, .request::<Option<CompletionInfo>>(snapshot, req, scope)
Err(err) => { .await
log::error!("Unable to get completion info from TypeScript: {:#}", err); .map(|mut info| {
None if let Some(info) = &mut info {
} info.normalize(&self.specifier_map);
} }
info
})
} }
pub async fn get_completion_details( pub async fn get_completion_details(
@ -2041,12 +2049,10 @@ impl DocumentSpan {
let target_asset_or_doc = let target_asset_or_doc =
language_server.get_maybe_asset_or_document(&target_specifier)?; language_server.get_maybe_asset_or_document(&target_specifier)?;
let target_line_index = target_asset_or_doc.line_index(); let target_line_index = target_asset_or_doc.line_index();
let file_referrer = language_server let file_referrer = target_asset_or_doc.file_referrer();
.documents
.get_file_referrer(&target_specifier);
let target_uri = language_server let target_uri = language_server
.url_map .url_map
.normalize_specifier(&target_specifier, file_referrer.as_deref()) .specifier_to_uri(&target_specifier, file_referrer)
.ok()?; .ok()?;
let (target_range, target_selection_range) = let (target_range, target_selection_range) =
if let Some(context_span) = &self.context_span { if let Some(context_span) = &self.context_span {
@ -2071,7 +2077,7 @@ impl DocumentSpan {
}; };
let link = lsp::LocationLink { let link = lsp::LocationLink {
origin_selection_range, origin_selection_range,
target_uri: target_uri.into_url(), target_uri,
target_range, target_range,
target_selection_range, target_selection_range,
}; };
@ -2090,12 +2096,12 @@ impl DocumentSpan {
language_server.get_maybe_asset_or_document(&specifier)?; language_server.get_maybe_asset_or_document(&specifier)?;
let line_index = asset_or_doc.line_index(); let line_index = asset_or_doc.line_index();
let range = self.text_span.to_range(line_index); let range = self.text_span.to_range(line_index);
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = asset_or_doc.file_referrer();
let mut target = language_server let target_uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer)
.ok()? .ok()?;
.into_url(); let mut target = uri_to_url(&target_uri);
target.set_fragment(Some(&format!( target.set_fragment(Some(&format!(
"L{},{}", "L{},{}",
range.start.line + 1, range.start.line + 1,
@ -2151,16 +2157,13 @@ impl NavigateToItem {
let asset_or_doc = let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?; language_server.get_asset_or_document(&specifier).ok()?;
let line_index = asset_or_doc.line_index(); let line_index = asset_or_doc.line_index();
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = asset_or_doc.file_referrer();
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer)
.ok()?; .ok()?;
let range = self.text_span.to_range(line_index); let range = self.text_span.to_range(line_index);
let location = lsp::Location { let location = lsp::Location { uri, range };
uri: uri.into_url(),
range,
};
let mut tags: Option<Vec<lsp::SymbolTag>> = None; let mut tags: Option<Vec<lsp::SymbolTag>> = None;
let kind_modifiers = parse_kind_modifier(&self.kind_modifiers); let kind_modifiers = parse_kind_modifier(&self.kind_modifiers);
@ -2183,6 +2186,50 @@ impl NavigateToItem {
} }
} }
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintDisplayPart {
pub text: String,
pub span: Option<TextSpan>,
pub file: Option<String>,
}
impl InlayHintDisplayPart {
pub fn to_lsp(
&self,
language_server: &language_server::Inner,
) -> lsp::InlayHintLabelPart {
let location = self.file.as_ref().map(|f| {
let specifier =
resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone());
let file_referrer =
language_server.documents.get_file_referrer(&specifier);
let uri = language_server
.url_map
.specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| INVALID_URI.clone());
let range = self
.span
.as_ref()
.and_then(|s| {
let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?;
Some(s.to_range(asset_or_doc.line_index()))
})
.unwrap_or_else(|| {
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0))
});
lsp::Location { uri, range }
});
lsp::InlayHintLabelPart {
value: self.text.clone(),
tooltip: None,
location,
command: None,
}
}
}
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub enum InlayHintKind { pub enum InlayHintKind {
Type, Type,
@ -2204,6 +2251,7 @@ impl InlayHintKind {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct InlayHint { pub struct InlayHint {
pub text: String, pub text: String,
pub display_parts: Option<Vec<InlayHintDisplayPart>>,
pub position: u32, pub position: u32,
pub kind: InlayHintKind, pub kind: InlayHintKind,
pub whitespace_before: Option<bool>, pub whitespace_before: Option<bool>,
@ -2211,10 +2259,23 @@ pub struct InlayHint {
} }
impl InlayHint { impl InlayHint {
pub fn to_lsp(&self, line_index: Arc<LineIndex>) -> lsp::InlayHint { pub fn to_lsp(
&self,
line_index: Arc<LineIndex>,
language_server: &language_server::Inner,
) -> lsp::InlayHint {
lsp::InlayHint { lsp::InlayHint {
position: line_index.position_tsc(self.position.into()), position: line_index.position_tsc(self.position.into()),
label: lsp::InlayHintLabel::String(self.text.clone()), label: if let Some(display_parts) = &self.display_parts {
lsp::InlayHintLabel::LabelParts(
display_parts
.iter()
.map(|p| p.to_lsp(language_server))
.collect(),
)
} else {
lsp::InlayHintLabel::String(self.text.clone())
},
kind: self.kind.to_lsp(), kind: self.kind.to_lsp(),
padding_left: self.whitespace_before, padding_left: self.whitespace_before,
padding_right: self.whitespace_after, padding_right: self.whitespace_after,
@ -2413,12 +2474,10 @@ impl ImplementationLocation {
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| { .unwrap_or_else(|_| INVALID_URI.clone());
LspClientUrl::new(ModuleSpecifier::parse("deno://invalid").unwrap())
});
lsp::Location { lsp::Location {
uri: uri.into_url(), uri,
range: self.document_span.text_span.to_range(line_index), range: self.document_span.text_span.to_range(line_index),
} }
} }
@ -2474,7 +2533,7 @@ impl RenameLocations {
language_server.documents.get_file_referrer(&specifier); language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref())?; .specifier_to_uri(&specifier, file_referrer.as_deref())?;
let asset_or_doc = language_server.get_asset_or_document(&specifier)?; let asset_or_doc = language_server.get_asset_or_document(&specifier)?;
// ensure TextDocumentEdit for `location.file_name`. // ensure TextDocumentEdit for `location.file_name`.
@ -2483,7 +2542,7 @@ impl RenameLocations {
uri.clone(), uri.clone(),
lsp::TextDocumentEdit { lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: uri.as_url().clone(), uri: uri.clone(),
version: asset_or_doc.document_lsp_version(), version: asset_or_doc.document_lsp_version(),
}, },
edits: edits:
@ -2685,7 +2744,7 @@ impl FileTextChanges {
.collect(); .collect();
Ok(lsp::TextDocumentEdit { Ok(lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: specifier, uri: url_to_uri(&specifier)?,
version: asset_or_doc.document_lsp_version(), version: asset_or_doc.document_lsp_version(),
}, },
edits, edits,
@ -2712,7 +2771,7 @@ impl FileTextChanges {
if self.is_new_file.unwrap_or(false) { if self.is_new_file.unwrap_or(false) {
ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create( ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(
lsp::CreateFile { lsp::CreateFile {
uri: specifier.clone(), uri: url_to_uri(&specifier)?,
options: Some(lsp::CreateFileOptions { options: Some(lsp::CreateFileOptions {
ignore_if_exists: Some(true), ignore_if_exists: Some(true),
overwrite: None, overwrite: None,
@ -2729,7 +2788,7 @@ impl FileTextChanges {
.collect(); .collect();
ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: specifier, uri: url_to_uri(&specifier)?,
version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()), version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()),
}, },
edits, edits,
@ -3127,10 +3186,10 @@ impl ReferenceEntry {
let file_referrer = language_server.documents.get_file_referrer(&specifier); let file_referrer = language_server.documents.get_file_referrer(&specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&specifier, file_referrer.as_deref()) .specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone())); .unwrap_or_else(|_| INVALID_URI.clone());
lsp::Location { lsp::Location {
uri: uri.into_url(), uri,
range: self.document_span.text_span.to_range(line_index), range: self.document_span.text_span.to_range(line_index),
} }
} }
@ -3188,12 +3247,13 @@ impl CallHierarchyItem {
.get_file_referrer(&target_specifier); .get_file_referrer(&target_specifier);
let uri = language_server let uri = language_server
.url_map .url_map
.normalize_specifier(&target_specifier, file_referrer.as_deref()) .specifier_to_uri(&target_specifier, file_referrer.as_deref())
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone())); .unwrap_or_else(|_| INVALID_URI.clone());
let use_file_name = self.is_source_file_item(); let use_file_name = self.is_source_file_item();
let maybe_file_path = if uri.as_url().scheme() == "file" { let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file")
specifier_to_file_path(uri.as_url()).ok() {
url_to_file_path(&uri_to_url(&uri)).ok()
} else { } else {
None None
}; };
@ -3237,7 +3297,7 @@ impl CallHierarchyItem {
lsp::CallHierarchyItem { lsp::CallHierarchyItem {
name, name,
tags, tags,
uri: uri.into_url(), uri,
detail: Some(detail), detail: Some(detail),
kind: self.kind.clone().into(), kind: self.kind.clone().into(),
range: self.span.to_range(line_index.clone()), range: self.span.to_range(line_index.clone()),
@ -3357,9 +3417,18 @@ fn parse_code_actions(
additional_text_edits.extend(change.text_changes.iter().map(|tc| { additional_text_edits.extend(change.text_changes.iter().map(|tc| {
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
text_edit.new_text = text_edit text_edit.new_text = text_edit.new_text.replace(
.new_text &specifier_rewrite.old_specifier,
.replace(&specifier_rewrite.0, &specifier_rewrite.1); &specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
text_edit.new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, &text_edit.new_text
);
}
} }
text_edit text_edit
})); }));
@ -3518,17 +3587,23 @@ impl CompletionEntryDetails {
let mut text_edit = original_item.text_edit.clone(); let mut text_edit = original_item.text_edit.clone();
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
if let Some(text_edit) = &mut text_edit { if let Some(text_edit) = &mut text_edit {
match text_edit { let new_text = match text_edit {
lsp::CompletionTextEdit::Edit(text_edit) => { lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text,
text_edit.new_text = text_edit
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
}
lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => { lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => {
insert_replace_edit.new_text = insert_replace_edit &mut insert_replace_edit.new_text
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
} }
};
*new_text = new_text.replace(
&specifier_rewrite.old_specifier,
&specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
*new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, new_text
);
} }
} }
} }
@ -3586,6 +3661,12 @@ pub struct CompletionInfo {
} }
impl CompletionInfo { impl CompletionInfo {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
for entry in &mut self.entries {
entry.normalize(specifier_map);
}
}
pub fn as_completion_response( pub fn as_completion_response(
&self, &self,
line_index: Arc<LineIndex>, line_index: Arc<LineIndex>,
@ -3627,6 +3708,13 @@ impl CompletionInfo {
} }
} }
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CompletionSpecifierRewrite {
old_specifier: String,
new_specifier: String,
new_deno_types_specifier: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionItemData { pub struct CompletionItemData {
@ -3639,7 +3727,7 @@ pub struct CompletionItemData {
/// be rewritten by replacing the first string with the second. Intended for /// be rewritten by replacing the first string with the second. Intended for
/// auto-import specifiers to be reverse-import-mapped. /// auto-import specifiers to be reverse-import-mapped.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub specifier_rewrite: Option<(String, String)>, pub specifier_rewrite: Option<CompletionSpecifierRewrite>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>, pub data: Option<Value>,
pub use_code_snippet: bool, pub use_code_snippet: bool,
@ -3647,11 +3735,17 @@ pub struct CompletionItemData {
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct CompletionEntryDataImport { struct CompletionEntryDataAutoImport {
module_specifier: String, module_specifier: String,
file_name: String, file_name: String,
} }
#[derive(Debug)]
pub struct CompletionNormalizedAutoImportData {
raw: CompletionEntryDataAutoImport,
normalized: ModuleSpecifier,
}
#[derive(Debug, Default, Deserialize, Serialize)] #[derive(Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionEntry { pub struct CompletionEntry {
@ -3684,9 +3778,28 @@ pub struct CompletionEntry {
is_import_statement_completion: Option<bool>, is_import_statement_completion: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
data: Option<Value>, data: Option<Value>,
/// This is not from tsc, we add it for convenience during normalization.
/// Represents `self.data.file_name`, but normalized.
#[serde(skip)]
auto_import_data: Option<CompletionNormalizedAutoImportData>,
} }
impl CompletionEntry { impl CompletionEntry {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
let Some(data) = &self.data else {
return;
};
let Ok(raw) =
serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone())
else {
return;
};
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
}
fn get_commit_characters( fn get_commit_characters(
&self, &self,
info: &CompletionInfo, info: &CompletionInfo,
@ -3835,25 +3948,44 @@ impl CompletionEntry {
if let Some(source) = &self.source { if let Some(source) = &self.source {
let mut display_source = source.clone(); let mut display_source = source.clone();
if let Some(data) = &self.data { if let Some(import_data) = &self.auto_import_data {
if let Ok(import_data) = let import_mapper =
serde_json::from_value::<CompletionEntryDataImport>(data.clone()) language_server.get_ts_response_import_mapper(specifier);
if let Some(mut new_specifier) = import_mapper
.check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_data.normalized))
{ {
if let Ok(import_specifier) = resolve_url(&import_data.file_name) { if new_specifier.contains("/node_modules/") {
if let Some(new_module_specifier) = language_server return None;
.get_ts_response_import_mapper(specifier)
.check_specifier(&import_specifier, specifier)
.or_else(|| relative_specifier(specifier, &import_specifier))
{
display_source.clone_from(&new_module_specifier);
if new_module_specifier != import_data.module_specifier {
specifier_rewrite =
Some((import_data.module_specifier, new_module_specifier));
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
}
} }
let mut new_deno_types_specifier = None;
if let Some(code_specifier) = language_server
.resolver
.deno_types_to_code_resolution(
&import_data.normalized,
Some(specifier),
)
.and_then(|s| {
import_mapper
.check_specifier(&s, specifier)
.or_else(|| relative_specifier(specifier, &s))
})
{
new_deno_types_specifier =
Some(std::mem::replace(&mut new_specifier, code_specifier));
}
display_source.clone_from(&new_specifier);
if new_specifier != import_data.raw.module_specifier
|| new_deno_types_specifier.is_some()
{
specifier_rewrite = Some(CompletionSpecifierRewrite {
old_specifier: import_data.raw.module_specifier.clone(),
new_specifier,
new_deno_types_specifier,
});
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
} }
} }
// We want relative or bare (import-mapped or otherwise) specifiers to // We want relative or bare (import-mapped or otherwise) specifiers to
@ -3941,7 +4073,7 @@ pub struct OutliningSpan {
kind: OutliningSpanKind, kind: OutliningSpanKind,
} }
const FOLD_END_PAIR_CHARACTERS: &[u8] = &[b'}', b']', b')', b'`']; const FOLD_END_PAIR_CHARACTERS: &[u8] = b"}])`";
impl OutliningSpan { impl OutliningSpan {
pub fn to_folding_range( pub fn to_folding_range(
@ -4156,6 +4288,11 @@ impl TscSpecifierMap {
return specifier.to_string(); return specifier.to_string();
} }
let mut specifier = original.to_string(); let mut specifier = original.to_string();
if !specifier.contains("/node_modules/@types/node/") {
// The ts server doesn't give completions from files in
// `node_modules/.deno/`. We work around it like this.
specifier = specifier.replace("/node_modules/", "/$node_modules/");
}
let media_type = MediaType::from_specifier(original); let media_type = MediaType::from_specifier(original);
// If the URL-inferred media type doesn't correspond to tsc's path-inferred // If the URL-inferred media type doesn't correspond to tsc's path-inferred
// media type, force it to be the same by appending an extension. // media type, force it to be the same by appending an extension.
@ -4230,14 +4367,10 @@ impl State {
} }
fn get_document(&self, specifier: &ModuleSpecifier) -> Option<Arc<Document>> { fn get_document(&self, specifier: &ModuleSpecifier) -> Option<Arc<Document>> {
if let Some(scope) = &self.last_scope { self
self.state_snapshot.documents.get_or_load(specifier, scope) .state_snapshot
} else { .documents
self .get_or_load(specifier, self.last_scope.as_ref())
.state_snapshot
.documents
.get_or_load(specifier, &ModuleSpecifier::parse("file:///").unwrap())
}
} }
fn get_asset_or_document( fn get_asset_or_document(
@ -4277,7 +4410,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool {
fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
let state = state.borrow::<State>(); let state = state.borrow::<State>();
let mark = state.performance.mark("tsc.op.op_is_node_file"); let mark = state.performance.mark("tsc.op.op_is_node_file");
let r = match ModuleSpecifier::parse(&path) { let r = match state.specifier_map.normalize(path) {
Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier),
Err(_) => false, Err(_) => false,
}; };
@ -4314,15 +4447,14 @@ fn op_load<'s>(
data: doc.text(), data: doc.text(),
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
version: state.script_version(&specifier), version: state.script_version(&specifier),
is_cjs: matches!( is_cjs: doc
doc.media_type(), .document()
MediaType::Cjs | MediaType::Cts | MediaType::Dcts .map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d))
), .unwrap_or(NodeModuleKind::Esm)
== NodeModuleKind::Cjs,
}) })
}; };
let serialized = serde_v8::to_v8(scope, maybe_load_response)?; let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
state.performance.measure(mark); state.performance.measure(mark);
Ok(serialized) Ok(serialized)
} }
@ -4546,7 +4678,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
for doc in &docs { for doc in &docs {
let specifier = doc.specifier(); let specifier = doc.specifier();
let is_open = doc.is_open(); let is_open = doc.is_open();
if is_open || specifier.scheme() == "file" { if is_open
|| (specifier.scheme() == "file"
&& !state.state_snapshot.resolver.in_node_modules(specifier))
{
let script_names = doc let script_names = doc
.scope() .scope()
.and_then(|s| result.by_scope.get_mut(s)) .and_then(|s| result.by_scope.get_mut(s))
@ -4557,9 +4692,13 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
let (types, _) = documents.resolve_dependency( let (types, _) = documents.resolve_dependency(
types, types,
specifier, specifier,
state
.state_snapshot
.is_cjs_resolver
.get_doc_module_kind(doc),
doc.file_referrer(), doc.file_referrer(),
)?; )?;
let types_doc = documents.get_or_load(&types, specifier)?; let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
Some(types_doc.specifier().clone()) Some(types_doc.specifier().clone())
})(); })();
// If there is a types dep, use that as the root instead. But if the doc // If there is a types dep, use that as the root instead. But if the doc
@ -4660,6 +4799,7 @@ fn run_tsc_thread(
specifier_map, specifier_map,
request_rx, request_rx,
)], )],
create_params: create_isolate_create_params(),
startup_snapshot: Some(tsc::compiler_snapshot()), startup_snapshot: Some(tsc::compiler_snapshot()),
inspector: has_inspector_server, inspector: has_inspector_server,
..Default::default() ..Default::default()
@ -4898,6 +5038,10 @@ pub struct UserPreferences {
pub allow_rename_of_import_path: Option<bool>, pub allow_rename_of_import_path: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub auto_import_file_exclude_patterns: Option<Vec<String>>, pub auto_import_file_exclude_patterns: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub interactive_inlay_hints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prefer_type_only_auto_imports: Option<bool>,
} }
impl UserPreferences { impl UserPreferences {
@ -4915,6 +5059,7 @@ impl UserPreferences {
include_completions_with_snippet_text: Some( include_completions_with_snippet_text: Some(
config.snippet_support_capable(), config.snippet_support_capable(),
), ),
interactive_inlay_hints: Some(true),
provide_refactor_not_applicable_reason: Some(true), provide_refactor_not_applicable_reason: Some(true),
quote_preference: Some(fmt_config.into()), quote_preference: Some(fmt_config.into()),
use_label_details_in_completion_entries: Some(true), use_label_details_in_completion_entries: Some(true),
@ -5019,6 +5164,9 @@ impl UserPreferences {
} else { } else {
Some(language_settings.preferences.quote_style) Some(language_settings.preferences.quote_style)
}, },
prefer_type_only_auto_imports: Some(
language_settings.preferences.prefer_type_only_auto_imports,
),
..base_preferences ..base_preferences
} }
} }
@ -5402,7 +5550,7 @@ mod tests {
sources: &[(&str, &str, i32, LanguageId)], sources: &[(&str, &str, i32, LanguageId)],
) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) { ) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let mut config = Config::default(); let mut config = Config::default();
config config
.tree .tree
@ -5412,7 +5560,7 @@ mod tests {
"compilerOptions": ts_config, "compilerOptions": ts_config,
}) })
.to_string(), .to_string(),
temp_dir.uri().join("deno.json").unwrap(), temp_dir.url().join("deno.json").unwrap(),
&Default::default(), &Default::default(),
) )
.unwrap(), .unwrap(),
@ -5423,7 +5571,7 @@ mod tests {
let mut documents = Documents::default(); let mut documents = Documents::default();
documents.update_config(&config, &resolver, &cache, &Default::default()); documents.update_config(&config, &resolver, &cache, &Default::default());
for (relative_specifier, source, version, language_id) in sources { for (relative_specifier, source, version, language_id) in sources {
let specifier = temp_dir.uri().join(relative_specifier).unwrap(); let specifier = temp_dir.url().join(relative_specifier).unwrap();
documents.open(specifier, *version, *language_id, (*source).into(), None); documents.open(specifier, *version, *language_id, (*source).into(), None);
} }
let snapshot = Arc::new(StateSnapshot { let snapshot = Arc::new(StateSnapshot {
@ -5431,6 +5579,7 @@ mod tests {
documents: Arc::new(documents), documents: Arc::new(documents),
assets: Default::default(), assets: Default::default(),
config: Arc::new(config), config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver, resolver,
}); });
let performance = Arc::new(Performance::default()); let performance = Arc::new(Performance::default());
@ -5456,7 +5605,7 @@ mod tests {
let (_tx, rx) = mpsc::unbounded_channel(); let (_tx, rx) = mpsc::unbounded_channel();
let state = let state =
State::new(state_snapshot, Default::default(), Default::default(), rx); State::new(state_snapshot, Default::default(), Default::default(), rx);
let mut op_state = OpState::new(None); let mut op_state = OpState::new(None, None);
op_state.put(state); op_state.put(state);
op_state op_state
} }
@ -5481,7 +5630,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"noEmit": true, "noEmit": true,
"lib": [], "lib": [],
}), }),
@ -5493,7 +5641,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5527,7 +5675,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"jsx": "react", "jsx": "react",
"lib": ["esnext", "dom", "deno.ns"], "lib": ["esnext", "dom", "deno.ns"],
"noEmit": true, "noEmit": true,
@ -5540,7 +5687,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5553,7 +5700,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5571,7 +5717,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5584,7 +5730,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5598,7 +5743,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5630,7 +5775,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5648,7 +5792,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5661,7 +5805,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5682,7 +5825,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5728,7 +5871,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5740,7 +5882,7 @@ mod tests {
)], )],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await .await
@ -5806,7 +5948,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, cache) = setup( let (temp_dir, ts_server, snapshot, cache) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -5833,7 +5974,7 @@ mod tests {
b"export const b = \"b\";\n", b"export const b = \"b\";\n",
) )
.unwrap(); .unwrap();
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics( .get_diagnostics(
snapshot.clone(), snapshot.clone(),
@ -5883,7 +6024,7 @@ mod tests {
[(&specifier_dep, ChangeKind::Opened)], [(&specifier_dep, ChangeKind::Opened)],
None, None,
); );
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let diagnostics = ts_server let diagnostics = ts_server
.get_diagnostics( .get_diagnostics(
snapshot.clone(), snapshot.clone(),
@ -5948,14 +6089,13 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
&[("a.ts", fixture, 1, LanguageId::TypeScript)], &[("a.ts", fixture, 1, LanguageId::TypeScript)],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let info = ts_server let info = ts_server
.get_completions( .get_completions(
snapshot.clone(), snapshot.clone(),
@ -5970,9 +6110,10 @@ mod tests {
trigger_kind: None, trigger_kind: None,
}, },
Default::default(), Default::default(),
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
assert_eq!(info.entries.len(), 22); assert_eq!(info.entries.len(), 22);
let details = ts_server let details = ts_server
@ -5987,7 +6128,7 @@ mod tests {
preferences: None, preferences: None,
data: None, data: None,
}, },
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap() .unwrap()
@ -6099,7 +6240,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6109,7 +6249,7 @@ mod tests {
], ],
) )
.await; .await;
let specifier = temp_dir.uri().join("a.ts").unwrap(); let specifier = temp_dir.url().join("a.ts").unwrap();
let fmt_options_config = FmtOptionsConfig { let fmt_options_config = FmtOptionsConfig {
semi_colons: Some(false), semi_colons: Some(false),
single_quote: Some(true), single_quote: Some(true),
@ -6130,9 +6270,10 @@ mod tests {
..Default::default() ..Default::default()
}, },
FormatCodeSettings::from(&fmt_options_config), FormatCodeSettings::from(&fmt_options_config),
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
let entry = info let entry = info
.entries .entries
@ -6156,7 +6297,7 @@ mod tests {
}), }),
data: entry.data.clone(), data: entry.data.clone(),
}, },
Some(temp_dir.uri()), Some(temp_dir.url()),
) )
.await .await
.unwrap() .unwrap()
@ -6208,7 +6349,6 @@ mod tests {
let (temp_dir, ts_server, snapshot, _) = setup( let (temp_dir, ts_server, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6221,8 +6361,8 @@ mod tests {
let changes = ts_server let changes = ts_server
.get_edits_for_file_rename( .get_edits_for_file_rename(
snapshot, snapshot,
temp_dir.uri().join("b.ts").unwrap(), temp_dir.url().join("b.ts").unwrap(),
temp_dir.uri().join("🦕.ts").unwrap(), temp_dir.url().join("🦕.ts").unwrap(),
FormatCodeSettings::default(), FormatCodeSettings::default(),
UserPreferences::default(), UserPreferences::default(),
) )
@ -6231,7 +6371,7 @@ mod tests {
assert_eq!( assert_eq!(
changes, changes,
vec![FileTextChanges { vec![FileTextChanges {
file_name: temp_dir.uri().join("a.ts").unwrap().to_string(), file_name: temp_dir.url().join("a.ts").unwrap().to_string(),
text_changes: vec![TextChange { text_changes: vec![TextChange {
span: TextSpan { span: TextSpan {
start: 8, start: 8,
@ -6279,7 +6419,6 @@ mod tests {
let (temp_dir, _, snapshot, _) = setup( let (temp_dir, _, snapshot, _) = setup(
json!({ json!({
"target": "esnext", "target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"], "lib": ["deno.ns", "deno.window"],
"noEmit": true, "noEmit": true,
}), }),
@ -6290,7 +6429,7 @@ mod tests {
let resolved = op_resolve_inner( let resolved = op_resolve_inner(
&mut state, &mut state,
ResolveArgs { ResolveArgs {
base: temp_dir.uri().join("a.ts").unwrap().to_string(), base: temp_dir.url().join("a.ts").unwrap().to_string(),
is_base_cjs: false, is_base_cjs: false,
specifiers: vec!["./b.ts".to_string()], specifiers: vec!["./b.ts".to_string()],
}, },
@ -6299,7 +6438,7 @@ mod tests {
assert_eq!( assert_eq!(
resolved, resolved,
vec![Some(( vec![Some((
temp_dir.uri().join("b.ts").unwrap().to_string(), temp_dir.url().join("b.ts").unwrap().to_string(),
MediaType::TypeScript.as_ts_extension().to_string() MediaType::TypeScript.as_ts_extension().to_string()
))] ))]
); );

View file

@ -6,17 +6,25 @@ use deno_core::parking_lot::Mutex;
use deno_core::url::Position; use deno_core::url::Position;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use lsp_types::Uri;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use super::cache::LspCache; use super::cache::LspCache;
use super::logging::lsp_warn;
/// Used in situations where a default URL needs to be used where otherwise a /// Used in situations where a default URL needs to be used where otherwise a
/// panic is undesired. /// panic is undesired.
pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> = pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> =
Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap()); Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap());
/// Used in situations where a default URL needs to be used where otherwise a
/// panic is undesired.
pub static INVALID_URI: Lazy<Uri> =
Lazy::new(|| Uri::from_str("deno://invalid").unwrap());
/// Matches the `encodeURIComponent()` encoding from JavaScript, which matches /// Matches the `encodeURIComponent()` encoding from JavaScript, which matches
/// the component percent encoding set. /// the component percent encoding set.
/// ///
@ -47,6 +55,25 @@ const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
.add(b'+') .add(b'+')
.add(b','); .add(b',');
/// Characters that may be left unencoded in a `Url` path but not valid in a
/// `Uri` path.
const URL_TO_URI_PATH: &percent_encoding::AsciiSet =
&percent_encoding::CONTROLS
.add(b'[')
.add(b']')
.add(b'^')
.add(b'|');
/// Characters that may be left unencoded in a `Url` query but not valid in a
/// `Uri` query.
const URL_TO_URI_QUERY: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'\\').add(b'`').add(b'{').add(b'}');
/// Characters that may be left unencoded in a `Url` fragment but not valid in
/// a `Uri` fragment.
const URL_TO_URI_FRAGMENT: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'#').add(b'\\').add(b'{').add(b'}');
fn hash_data_specifier(specifier: &ModuleSpecifier) -> String { fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
let mut file_name_str = specifier.path().to_string(); let mut file_name_str = specifier.path().to_string();
if let Some(query) = specifier.query() { if let Some(query) = specifier.query() {
@ -56,7 +83,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
crate::util::checksum::gen(&[file_name_str.as_bytes()]) crate::util::checksum::gen(&[file_name_str.as_bytes()])
} }
fn to_deno_url(specifier: &Url) -> String { fn to_deno_uri(specifier: &Url) -> String {
let mut string = String::with_capacity(specifier.as_str().len() + 6); let mut string = String::with_capacity(specifier.as_str().len() + 6);
string.push_str("deno:/"); string.push_str("deno:/");
string.push_str(specifier.scheme()); string.push_str(specifier.scheme());
@ -93,58 +120,62 @@ fn from_deno_url(url: &Url) -> Option<Url> {
Url::parse(&string).ok() Url::parse(&string).ok()
} }
/// This exists to make it a little bit harder to accidentally use a `Url`
/// in the wrong place where a client url should be used.
#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
pub struct LspClientUrl(Url);
impl LspClientUrl {
pub fn new(url: Url) -> Self {
Self(url)
}
pub fn as_url(&self) -> &Url {
&self.0
}
pub fn into_url(self) -> Url {
self.0
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
}
impl std::fmt::Display for LspClientUrl {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
struct LspUrlMapInner { struct LspUrlMapInner {
specifier_to_url: HashMap<ModuleSpecifier, LspClientUrl>, specifier_to_uri: HashMap<ModuleSpecifier, Uri>,
url_to_specifier: HashMap<Url, ModuleSpecifier>, uri_to_specifier: HashMap<Uri, ModuleSpecifier>,
} }
impl LspUrlMapInner { impl LspUrlMapInner {
fn put(&mut self, specifier: ModuleSpecifier, url: LspClientUrl) { fn put(&mut self, specifier: ModuleSpecifier, uri: Uri) {
self self.uri_to_specifier.insert(uri.clone(), specifier.clone());
.url_to_specifier self.specifier_to_uri.insert(specifier, uri);
.insert(url.as_url().clone(), specifier.clone());
self.specifier_to_url.insert(specifier, url);
} }
fn get_url(&self, specifier: &ModuleSpecifier) -> Option<&LspClientUrl> { fn get_uri(&self, specifier: &ModuleSpecifier) -> Option<&Uri> {
self.specifier_to_url.get(specifier) self.specifier_to_uri.get(specifier)
} }
fn get_specifier(&self, url: &Url) -> Option<&ModuleSpecifier> { fn get_specifier(&self, uri: &Uri) -> Option<&ModuleSpecifier> {
self.url_to_specifier.get(url) self.uri_to_specifier.get(uri)
} }
} }
pub fn uri_parse_unencoded(s: &str) -> Result<Uri, AnyError> {
url_to_uri(&Url::parse(s)?)
}
pub fn url_to_uri(url: &Url) -> Result<Uri, AnyError> {
let components = deno_core::url::quirks::internal_components(url);
let mut input = String::with_capacity(url.as_str().len());
input.push_str(&url.as_str()[..components.path_start as usize]);
input.push_str(
&percent_encoding::utf8_percent_encode(url.path(), URL_TO_URI_PATH)
.to_string(),
);
if let Some(query) = url.query() {
input.push('?');
input.push_str(
&percent_encoding::utf8_percent_encode(query, URL_TO_URI_QUERY)
.to_string(),
);
}
if let Some(fragment) = url.fragment() {
input.push('#');
input.push_str(
&percent_encoding::utf8_percent_encode(fragment, URL_TO_URI_FRAGMENT)
.to_string(),
);
}
Ok(Uri::from_str(&input).inspect_err(|err| {
lsp_warn!("Could not convert URL \"{url}\" to URI: {err}")
})?)
}
pub fn uri_to_url(uri: &Uri) -> Url {
Url::parse(uri.as_str()).unwrap()
}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum LspUrlKind { pub enum LspUrlKind {
File, File,
@ -167,24 +198,24 @@ impl LspUrlMap {
/// Normalize a specifier that is used internally within Deno (or tsc) to a /// Normalize a specifier that is used internally within Deno (or tsc) to a
/// URL that can be handled as a "virtual" document by an LSP client. /// URL that can be handled as a "virtual" document by an LSP client.
pub fn normalize_specifier( pub fn specifier_to_uri(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Result<LspClientUrl, AnyError> { ) -> Result<Uri, AnyError> {
if let Some(file_url) = if let Some(file_url) =
self.cache.vendored_specifier(specifier, file_referrer) self.cache.vendored_specifier(specifier, file_referrer)
{ {
return Ok(LspClientUrl(file_url)); return url_to_uri(&file_url);
} }
let mut inner = self.inner.lock(); let mut inner = self.inner.lock();
if let Some(url) = inner.get_url(specifier).cloned() { if let Some(uri) = inner.get_uri(specifier).cloned() {
Ok(url) Ok(uri)
} else { } else {
let url = if specifier.scheme() == "file" { let uri = if specifier.scheme() == "file" {
LspClientUrl(specifier.clone()) url_to_uri(specifier)?
} else { } else {
let specifier_str = if specifier.scheme() == "asset" { let uri_str = if specifier.scheme() == "asset" {
format!("deno:/asset{}", specifier.path()) format!("deno:/asset{}", specifier.path())
} else if specifier.scheme() == "data" { } else if specifier.scheme() == "data" {
let data_url = deno_graph::source::RawDataUrl::parse(specifier)?; let data_url = deno_graph::source::RawDataUrl::parse(specifier)?;
@ -200,13 +231,13 @@ impl LspUrlMap {
extension extension
) )
} else { } else {
to_deno_url(specifier) to_deno_uri(specifier)
}; };
let url = LspClientUrl(Url::parse(&specifier_str)?); let uri = uri_parse_unencoded(&uri_str)?;
inner.put(specifier.clone(), url.clone()); inner.put(specifier.clone(), uri.clone());
url uri
}; };
Ok(url) Ok(uri)
} }
} }
@ -218,12 +249,17 @@ impl LspUrlMap {
/// Note: Sometimes the url provided by the client may not have a trailing slash, /// Note: Sometimes the url provided by the client may not have a trailing slash,
/// so we need to force it to in the mapping and nee to explicitly state whether /// so we need to force it to in the mapping and nee to explicitly state whether
/// this is a file or directory url. /// this is a file or directory url.
pub fn normalize_url(&self, url: &Url, kind: LspUrlKind) -> ModuleSpecifier { pub fn uri_to_specifier(
if let Some(remote_url) = self.cache.unvendored_specifier(url) { &self,
uri: &Uri,
kind: LspUrlKind,
) -> ModuleSpecifier {
let url = uri_to_url(uri);
if let Some(remote_url) = self.cache.unvendored_specifier(&url) {
return remote_url; return remote_url;
} }
let mut inner = self.inner.lock(); let mut inner = self.inner.lock();
if let Some(specifier) = inner.get_specifier(url).cloned() { if let Some(specifier) = inner.get_specifier(uri).cloned() {
return specifier; return specifier;
} }
let mut specifier = None; let mut specifier = None;
@ -234,13 +270,13 @@ impl LspUrlMap {
LspUrlKind::File => Url::from_file_path(path).unwrap(), LspUrlKind::File => Url::from_file_path(path).unwrap(),
}); });
} }
} else if let Some(s) = file_like_to_file_specifier(url) { } else if let Some(s) = file_like_to_file_specifier(&url) {
specifier = Some(s); specifier = Some(s);
} else if let Some(s) = from_deno_url(url) { } else if let Some(s) = from_deno_url(&url) {
specifier = Some(s); specifier = Some(s);
} }
let specifier = specifier.unwrap_or_else(|| url.clone()); let specifier = specifier.unwrap_or_else(|| url.clone());
inner.put(specifier.clone(), LspClientUrl(url.clone())); inner.put(specifier.clone(), uri.clone());
specifier specifier
} }
} }
@ -288,15 +324,14 @@ mod tests {
fn test_lsp_url_map() { fn test_lsp_url_map() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = assert_eq!(
Url::parse("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap(); actual_uri.as_str(),
assert_eq!(actual_url.as_url(), &expected_url); "deno:/https/deno.land/x/pkg%401.0.0/mod.ts"
);
let actual_specifier = let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -304,18 +339,14 @@ mod tests {
fn test_lsp_url_reverse() { fn test_lsp_url_reverse() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = let fixture =
resolve_url("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap(); Uri::from_str("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
let actual_specifier = map.normalize_url(&fixture, LspUrlKind::File); let actual_specifier = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected_specifier = let expected_specifier =
Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
assert_eq!(&actual_specifier, &expected_specifier); assert_eq!(&actual_specifier, &expected_specifier);
let actual_url = map let actual_uri = map.specifier_to_uri(&actual_specifier, None).unwrap();
.normalize_specifier(&actual_specifier, None) assert_eq!(actual_uri, fixture);
.unwrap()
.as_url()
.clone();
assert_eq!(actual_url, fixture);
} }
#[test] #[test]
@ -323,14 +354,11 @@ mod tests {
// Test fix for #9741 - not properly encoding certain URLs // Test fix for #9741 - not properly encoding certain URLs
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap(); let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = Url::parse("deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts").unwrap(); assert_eq!(actual_uri.as_str(), "deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts");
assert_eq!(actual_url.as_url(), &expected_url); let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
let actual_specifier =
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -338,14 +366,13 @@ mod tests {
fn test_lsp_url_map_data() { fn test_lsp_url_map_data() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap(); let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap();
assert_eq!(actual_url.as_url(), &expected_url); assert_eq!(&uri_to_url(&actual_uri), &expected_url);
let actual_specifier = let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -353,15 +380,11 @@ mod tests {
fn test_lsp_url_map_host_with_port() { fn test_lsp_url_map_host_with_port() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap(); let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap();
let actual_url = map let actual_uri = map
.normalize_specifier(&fixture, None) .specifier_to_uri(&fixture, None)
.expect("could not handle specifier"); .expect("could not handle specifier");
let expected_url = assert_eq!(actual_uri.as_str(), "deno:/http/localhost%3A8000/mod.ts");
Url::parse("deno:/http/localhost%3A8000/mod.ts").unwrap(); let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
assert_eq!(actual_url.as_url(), &expected_url);
let actual_specifier =
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
assert_eq!(actual_specifier, fixture); assert_eq!(actual_specifier, fixture);
} }
@ -369,11 +392,11 @@ mod tests {
#[test] #[test]
fn test_normalize_windows_path() { fn test_normalize_windows_path() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url( let fixture = Uri::from_str(
"file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", "file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
) )
.unwrap(); .unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected = let expected =
Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt") Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt")
.unwrap(); .unwrap();
@ -384,11 +407,11 @@ mod tests {
#[test] #[test]
fn test_normalize_percent_encoded_path() { fn test_normalize_percent_encoded_path() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url( let fixture = Uri::from_str(
"file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", "file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
) )
.unwrap(); .unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
let expected = let expected =
Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt") Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt")
.unwrap(); .unwrap();
@ -398,9 +421,9 @@ mod tests {
#[test] #[test]
fn test_normalize_deno_status() { fn test_normalize_deno_status() {
let map = LspUrlMap::default(); let map = LspUrlMap::default();
let fixture = resolve_url("deno:/status.md").unwrap(); let fixture = Uri::from_str("deno:/status.md").unwrap();
let actual = map.normalize_url(&fixture, LspUrlKind::File); let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
assert_eq!(actual, fixture); assert_eq!(actual.as_str(), fixture.as_str());
} }
#[test] #[test]

View file

@ -15,11 +15,11 @@ mod js;
mod jsr; mod jsr;
mod lsp; mod lsp;
mod module_loader; mod module_loader;
mod napi;
mod node; mod node;
mod npm; mod npm;
mod ops; mod ops;
mod resolver; mod resolver;
mod shared;
mod standalone; mod standalone;
mod task_runner; mod task_runner;
mod tools; mod tools;
@ -31,12 +31,13 @@ mod worker;
use crate::args::flags_from_vec; use crate::args::flags_from_vec;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
use crate::args::DENO_FUTURE;
use crate::graph_container::ModuleGraphContainer;
use crate::util::display; use crate::util::display;
use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::get_v8_flags_from_env;
use crate::util::v8::init_v8_flags; use crate::util::v8::init_v8_flags;
use args::TaskFlags;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
@ -50,11 +51,19 @@ use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_terminal::colors; use deno_terminal::colors;
use factory::CliFactory; use factory::CliFactory;
use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME;
use std::env; use std::env;
use std::future::Future; use std::future::Future;
use std::io::IsTerminal;
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type. /// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
trait SubcommandOutput { trait SubcommandOutput {
fn output(self) -> Result<i32, AnyError>; fn output(self) -> Result<i32, AnyError>;
@ -94,7 +103,10 @@ fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> { async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
let handle = match flags.subcommand.clone() { let handle = match flags.subcommand.clone() {
DenoSubcommand::Add(add_flags) => spawn_subcommand(async { DenoSubcommand::Add(add_flags) => spawn_subcommand(async {
tools::registry::add(flags, add_flags).await tools::registry::add(flags, add_flags, tools::registry::AddCommandName::Add).await
}),
DenoSubcommand::Remove(remove_flags) => spawn_subcommand(async {
tools::registry::remove(flags, remove_flags).await
}), }),
DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async { DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async {
if bench_flags.watch.is_some() { if bench_flags.watch.is_some() {
@ -103,9 +115,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::bench::run_benchmarks(flags, bench_flags).await tools::bench::run_benchmarks(flags, bench_flags).await
} }
}), }),
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async { DenoSubcommand::Bundle => exit_with_message("⚠️ `deno bundle` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
tools::bundle::bundle(flags, bundle_flags).await
}),
DenoSubcommand::Doc(doc_flags) => { DenoSubcommand::Doc(doc_flags) => {
spawn_subcommand(async { tools::doc::doc(flags, doc_flags).await }) spawn_subcommand(async { tools::doc::doc(flags, doc_flags).await })
} }
@ -113,28 +123,19 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::run::eval_command(flags, eval_flags).await tools::run::eval_command(flags, eval_flags).await
}), }),
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move { DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags); tools::installer::install_from_entrypoints(flags, &cache_flags.files).await
let emitter = factory.emitter()?;
let main_graph_container =
factory.main_module_graph_container().await?;
main_graph_container
.load_and_type_check_files(&cache_flags.files)
.await?;
emitter.cache_module_emits(&main_graph_container.graph()).await
}), }),
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move { DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags); tools::check::check(flags, check_flags).await
let main_graph_container = }),
factory.main_module_graph_container().await?; DenoSubcommand::Clean => spawn_subcommand(async move {
main_graph_container tools::clean::clean()
.load_and_type_check_files(&check_flags.files)
.await
}), }),
DenoSubcommand::Compile(compile_flags) => spawn_subcommand(async { DenoSubcommand::Compile(compile_flags) => spawn_subcommand(async {
tools::compile::compile(flags, compile_flags).await tools::compile::compile(flags, compile_flags).await
}), }),
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
tools::coverage::cover_files(flags, coverage_flags).await tools::coverage::cover_files(flags, coverage_flags)
}), }),
DenoSubcommand::Fmt(fmt_flags) => { DenoSubcommand::Fmt(fmt_flags) => {
spawn_subcommand( spawn_subcommand(
@ -143,9 +144,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
} }
DenoSubcommand::Init(init_flags) => { DenoSubcommand::Init(init_flags) => {
spawn_subcommand(async { spawn_subcommand(async {
// make compiler happy since init_project is sync tools::init::init_project(init_flags).await
tokio::task::yield_now().await;
tools::init::init_project(init_flags)
}) })
} }
DenoSubcommand::Info(info_flags) => { DenoSubcommand::Info(info_flags) => {
@ -154,13 +153,28 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
DenoSubcommand::Install(install_flags) => spawn_subcommand(async { DenoSubcommand::Install(install_flags) => spawn_subcommand(async {
tools::installer::install_command(flags, install_flags).await tools::installer::install_command(flags, install_flags).await
}), }),
DenoSubcommand::JSONReference(json_reference) => spawn_subcommand(async move {
display::write_to_stdout_ignore_sigpipe(&deno_core::serde_json::to_vec_pretty(&json_reference.json).unwrap())
}),
DenoSubcommand::Jupyter(jupyter_flags) => spawn_subcommand(async { DenoSubcommand::Jupyter(jupyter_flags) => spawn_subcommand(async {
tools::jupyter::kernel(flags, jupyter_flags).await tools::jupyter::kernel(flags, jupyter_flags).await
}), }),
DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async { DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async {
tools::installer::uninstall(uninstall_flags) tools::installer::uninstall(flags, uninstall_flags).await
}),
DenoSubcommand::Lsp => spawn_subcommand(async {
if std::io::stderr().is_terminal() {
log::warn!(
"{} command is intended to be run by text editors and IDEs and shouldn't be run manually.
Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction
how to setup your favorite text editor.
Press Ctrl+C to exit.
", colors::cyan("deno lsp"));
}
lsp::start().await
}), }),
DenoSubcommand::Lsp => spawn_subcommand(async { lsp::start().await }),
DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async { DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async {
if lint_flags.rules { if lint_flags.rules {
tools::lint::print_rules_list( tools::lint::print_rules_list(
@ -172,18 +186,84 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::lint::lint(flags, lint_flags).await tools::lint::lint(flags, lint_flags).await
} }
}), }),
DenoSubcommand::Outdated(update_flags) => {
spawn_subcommand(async move {
tools::registry::outdated(flags, update_flags).await
})
}
DenoSubcommand::Repl(repl_flags) => { DenoSubcommand::Repl(repl_flags) => {
spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await }) spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await })
} }
DenoSubcommand::Run(run_flags) => spawn_subcommand(async move { DenoSubcommand::Run(run_flags) => spawn_subcommand(async move {
if run_flags.is_stdin() { if run_flags.is_stdin() {
tools::run::run_from_stdin(flags).await tools::run::run_from_stdin(flags.clone()).await
} else { } else {
tools::run::run_script(WorkerExecutionMode::Run, flags, run_flags.watch).await let result = tools::run::run_script(WorkerExecutionMode::Run, flags.clone(), run_flags.watch).await;
match result {
Ok(v) => Ok(v),
Err(script_err) => {
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = script_err.downcast_ref::<ResolvePkgFolderFromDenoReqError>() {
if flags.node_modules_dir.is_none() {
let mut flags = flags.deref().clone();
let watch = match &flags.subcommand {
DenoSubcommand::Run(run_flags) => run_flags.watch.clone(),
_ => unreachable!(),
};
flags.node_modules_dir = Some(deno_config::deno_json::NodeModulesDirMode::None);
// use the current lockfile, but don't write it out
if flags.frozen_lockfile.is_none() {
flags.internal.lockfile_skip_write = true;
}
return tools::run::run_script(WorkerExecutionMode::Run, Arc::new(flags), watch).await;
}
}
let script_err_msg = script_err.to_string();
if script_err_msg.starts_with(MODULE_NOT_FOUND) || script_err_msg.starts_with(UNSUPPORTED_SCHEME) {
if run_flags.bare {
let mut cmd = args::clap_root();
cmd.build();
let command_names = cmd.get_subcommands().map(|command| command.get_name()).collect::<Vec<_>>();
let suggestions = args::did_you_mean(&run_flags.script, command_names);
if !suggestions.is_empty() {
let mut error = clap::error::Error::<clap::error::DefaultFormatter>::new(clap::error::ErrorKind::InvalidSubcommand).with_cmd(&cmd);
error.insert(
clap::error::ContextKind::SuggestedSubcommand,
clap::error::ContextValue::Strings(suggestions),
);
Err(error.into())
} else {
Err(script_err)
}
} else {
let mut new_flags = flags.deref().clone();
let task_flags = TaskFlags {
cwd: None,
task: Some(run_flags.script.clone()),
is_run: true,
recursive: false,
filter: None,
eval: false,
};
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await;
match result {
Ok(v) => Ok(v),
Err(_) => {
// Return script error for backwards compatibility.
Err(script_err)
}
}
}
} else {
Err(script_err)
}
}
}
} }
}), }),
DenoSubcommand::Serve(serve_flags) => spawn_subcommand(async move { DenoSubcommand::Serve(serve_flags) => spawn_subcommand(async move {
tools::run::run_script(WorkerExecutionMode::Serve, flags, serve_flags.watch).await tools::serve::serve(flags, serve_flags).await
}), }),
DenoSubcommand::Task(task_flags) => spawn_subcommand(async { DenoSubcommand::Task(task_flags) => spawn_subcommand(async {
tools::task::execute_script(flags, task_flags).await tools::task::execute_script(flags, task_flags).await
@ -229,12 +309,27 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
"This deno was built without the \"upgrade\" feature. Please upgrade using the installation method originally used to install Deno.", "This deno was built without the \"upgrade\" feature. Please upgrade using the installation method originally used to install Deno.",
1, 1,
), ),
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async { DenoSubcommand::Vendor => exit_with_message("⚠️ `deno vendor` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
tools::vendor::vendor(flags, vendor_flags).await
}),
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async { DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
tools::registry::publish(flags, publish_flags).await tools::registry::publish(flags, publish_flags).await
}), }),
DenoSubcommand::Help(help_flags) => spawn_subcommand(async move {
use std::io::Write;
let mut stream = anstream::AutoStream::new(std::io::stdout(), if colors::use_color() {
anstream::ColorChoice::Auto
} else {
anstream::ColorChoice::Never
});
match stream.write_all(help_flags.help.ansi().to_string().as_bytes()) {
Ok(()) => Ok(()),
Err(e) => match e.kind() {
std::io::ErrorKind::BrokenPipe => Ok(()),
_ => Err(e),
},
}
}),
}; };
handle.await? handle.await?
@ -257,22 +352,21 @@ fn setup_panic_hook() {
eprintln!("var set and include the backtrace in your report."); eprintln!("var set and include the backtrace in your report.");
eprintln!(); eprintln!();
eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH); eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH);
eprintln!("Version: {}", version::deno()); eprintln!("Version: {}", version::DENO_VERSION_INFO.deno);
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>()); eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
eprintln!(); eprintln!();
orig_hook(panic_info); orig_hook(panic_info);
std::process::exit(1); deno_runtime::exit(1);
})); }));
} }
#[allow(clippy::print_stderr)]
fn exit_with_message(message: &str, code: i32) -> ! { fn exit_with_message(message: &str, code: i32) -> ! {
eprintln!( log::error!(
"{}: {}", "{}: {}",
colors::red_bold("error"), colors::red_bold("error"),
message.trim_start_matches("error: ") message.trim_start_matches("error: ")
); );
std::process::exit(code); deno_runtime::exit(code);
} }
fn exit_for_error(error: AnyError) -> ! { fn exit_for_error(error: AnyError) -> ! {
@ -291,28 +385,18 @@ fn exit_for_error(error: AnyError) -> ! {
exit_with_message(&error_string, error_code); exit_with_message(&error_string, error_code);
} }
#[allow(clippy::print_stderr)]
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
eprintln!( log::error!(
"Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.",
feature feature
); );
std::process::exit(70); deno_runtime::exit(70);
}
// TODO(bartlomieju): remove when `--unstable` flag is removed.
#[allow(clippy::print_stderr)]
pub(crate) fn unstable_warn_cb(feature: &str, api_name: &str) {
eprintln!(
"⚠️ {}",
colors::yellow(format!(
"The `{}` API was used with `--unstable` flag. The `--unstable` flag is deprecated and will be removed in Deno 2.0. Use granular `--unstable-{}` instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags",
api_name, feature
))
);
} }
pub fn main() { pub fn main() {
#[cfg(feature = "dhat-heap")]
let profiler = dhat::Profiler::new_heap();
setup_panic_hook(); setup_panic_hook();
util::unix::raise_fd_limit(); util::unix::raise_fd_limit();
@ -333,8 +417,13 @@ pub fn main() {
run_subcommand(Arc::new(flags)).await run_subcommand(Arc::new(flags)).await
}; };
match create_and_run_current_thread_with_maybe_metrics(future) { let result = create_and_run_current_thread_with_maybe_metrics(future);
Ok(exit_code) => std::process::exit(exit_code),
#[cfg(feature = "dhat-heap")]
drop(profiler);
match result {
Ok(exit_code) => deno_runtime::exit(exit_code),
Err(err) => exit_for_error(err), Err(err) => exit_for_error(err),
} }
} }
@ -345,35 +434,32 @@ fn resolve_flags_and_init(
let flags = match flags_from_vec(args) { let flags = match flags_from_vec(args) {
Ok(flags) => flags, Ok(flags) => flags,
Err(err @ clap::Error { .. }) Err(err @ clap::Error { .. })
if err.kind() == clap::error::ErrorKind::DisplayHelp if err.kind() == clap::error::ErrorKind::DisplayVersion =>
|| err.kind() == clap::error::ErrorKind::DisplayVersion =>
{ {
// Ignore results to avoid BrokenPipe errors. // Ignore results to avoid BrokenPipe errors.
util::logger::init(None);
let _ = err.print(); let _ = err.print();
std::process::exit(0); deno_runtime::exit(0);
}
Err(err) => {
util::logger::init(None);
exit_for_error(AnyError::from(err))
} }
Err(err) => exit_for_error(AnyError::from(err)),
}; };
// TODO(bartlomieju): remove when `--unstable` flag is removed. if let Some(otel_config) = flags.otel_config() {
deno_runtime::ops::otel::init(otel_config)?;
}
util::logger::init(flags.log_level);
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled { if flags.unstable_config.legacy_flag_enabled {
#[allow(clippy::print_stderr)] log::warn!(
if matches!(flags.subcommand, DenoSubcommand::Check(_)) { "⚠️ {}",
// can't use log crate because that's not setup yet colors::yellow(
eprintln!( "The `--unstable` flag has been removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags"
"⚠️ {}", )
colors::yellow( );
"The `--unstable` flag is not needed for `deno check` anymore."
)
);
} else {
eprintln!(
"⚠️ {}",
colors::yellow(
"The `--unstable` flag is deprecated and will be removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags"
)
);
}
} }
let default_v8_flags = match flags.subcommand { let default_v8_flags = match flags.subcommand {
@ -381,26 +467,19 @@ fn resolve_flags_and_init(
// https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214 // https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214
DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()], DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()],
_ => { _ => {
if *DENO_FUTURE { // TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
// deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript // and its settings.
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast // deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript
vec!["--no-harmony-import-assertions".to_string()] // TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
} else { vec!["--no-harmony-import-assertions".to_string()]
vec![
// If we're still in v1.X version we want to support import assertions.
// V8 12.6 unshipped the support by default, so force it by passing a
// flag.
"--harmony-import-assertions".to_string(),
// Verify with DENO_FUTURE for now.
"--no-maglev".to_string(),
]
}
} }
}; };
init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env()); init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env());
deno_core::JsRuntime::init_platform(None); // TODO(bartlomieju): remove last argument once Deploy no longer needs it
util::logger::init(flags.log_level); deno_core::JsRuntime::init_platform(
None, /* import assertions enabled */ false,
);
Ok(flags) Ok(flags)
} }

Some files were not shown because too many files have changed in this diff Show more