0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-01 12:16:11 -05:00

Merge branch 'main' into premature_stdin_closure

This commit is contained in:
Yoshiya Hinosawa 2024-11-27 14:16:25 +09:00 committed by GitHub
commit 2a0830d0d6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2818 changed files with 33869 additions and 16171 deletions

View file

@ -31,6 +31,8 @@
"cli/tsc/dts/lib.scripthost.d.ts", "cli/tsc/dts/lib.scripthost.d.ts",
"cli/tsc/dts/lib.webworker*.d.ts", "cli/tsc/dts/lib.webworker*.d.ts",
"cli/tsc/dts/typescript.d.ts", "cli/tsc/dts/typescript.d.ts",
"cli/tools/doc/prism.css",
"cli/tools/doc/prism.js",
"ext/websocket/autobahn/reports", "ext/websocket/autobahn/reports",
"gh-pages", "gh-pages",
"target", "target",
@ -65,10 +67,14 @@
"tests/wpt/runner/expectation.json", "tests/wpt/runner/expectation.json",
"tests/wpt/runner/manifest.json", "tests/wpt/runner/manifest.json",
"tests/wpt/suite", "tests/wpt/suite",
"third_party" "third_party",
"tests/specs/run/shebang_with_json_imports_tsc",
"tests/specs/run/shebang_with_json_imports_swc",
"tests/specs/run/ext_flag_takes_precedence_over_extension",
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.93.0.wasm", "https://plugins.dprint.dev/typescript-0.93.2.wasm",
"https://plugins.dprint.dev/json-0.19.4.wasm", "https://plugins.dprint.dev/json-0.19.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.8.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.3.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm",

View file

@ -10,7 +10,7 @@ concurrency:
jobs: jobs:
build: build:
name: cargo publish name: cargo publish
runs-on: ubuntu-20.04-xl runs-on: ubuntu-24.04-xl
timeout-minutes: 90 timeout-minutes: 90
env: env:

View file

@ -5,15 +5,16 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 22; const cacheVersion = 27;
const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
const ubuntuARMRunner = "ubicloud-standard-16-arm"; const ubuntuARMRunner = "ubicloud-standard-16-arm";
const windowsX86Runner = "windows-2022"; const windowsX86Runner = "windows-2022";
const windowsX86XlRunner = "windows-2022-xl"; const windowsX86XlRunner = "windows-2022-xl";
const macosX86Runner = "macos-13"; const macosX86Runner = "macos-13";
const macosArmRunner = "macos-14"; const macosArmRunner = "macos-14";
const selfHostedMacosArmRunner = "self-hosted";
const Runners = { const Runners = {
linuxX86: { linuxX86: {
@ -40,7 +41,8 @@ const Runners = {
macosArm: { macosArm: {
os: "macos", os: "macos",
arch: "aarch64", arch: "aarch64",
runner: macosArmRunner, runner:
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
}, },
windowsX86: { windowsX86: {
os: "windows", os: "windows",
@ -59,7 +61,7 @@ const prCacheKeyPrefix =
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
// Note that you may need to add more version to the `apt-get remove` line below if you change this // Note that you may need to add more version to the `apt-get remove` line below if you change this
const llvmVersion = 18; const llvmVersion = 19;
const installPkgsCommand = const installPkgsCommand =
`sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`; `sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`;
const sysRootStep = { const sysRootStep = {
@ -71,7 +73,7 @@ export DEBIAN_FRONTEND=noninteractive
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove \ sudo apt-get -qq remove \
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" |
@ -86,7 +88,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev

View file

@ -62,18 +62,18 @@ jobs:
profile: debug profile: debug
- os: macos - os: macos
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-13'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || ''macos-13'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: macos-14 runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: debug profile: debug
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-14'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
@ -84,33 +84,33 @@ jobs:
profile: debug profile: debug
- os: windows - os: windows
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: test job: test
profile: release profile: release
use_sysroot: true use_sysroot: true
wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}' wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: bench job: bench
profile: release profile: release
use_sysroot: true use_sysroot: true
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: test job: test
profile: debug profile: debug
use_sysroot: true use_sysroot: true
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: lint job: lint
profile: debug profile: debug
- os: linux - os: linux
@ -252,22 +252,22 @@ jobs:
# to complete. # to complete.
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" |
sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-18.list sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-19.list
curl https://apt.llvm.org/llvm-snapshot.gpg.key | curl https://apt.llvm.org/llvm-snapshot.gpg.key |
gpg --dearmor | gpg --dearmor |
sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg
sudo apt-get update sudo apt-get update
# this was unreliable sometimes, so try again if it fails # this was unreliable sometimes, so try again if it fails
sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19
# Fix alternatives # Fix alternatives
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -299,8 +299,8 @@ jobs:
CARGO_PROFILE_RELEASE_LTO=false CARGO_PROFILE_RELEASE_LTO=false
RUSTFLAGS<<__1 RUSTFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -310,8 +310,8 @@ jobs:
__1 __1
RUSTDOCFLAGS<<__1 RUSTDOCFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -319,7 +319,7 @@ jobs:
--cfg tokio_unstable --cfg tokio_unstable
$RUSTFLAGS $RUSTFLAGS
__1 __1
CC=/usr/bin/clang-18 CC=/usr/bin/clang-19
CFLAGS=-flto=thin $CFLAGS CFLAGS=-flto=thin $CFLAGS
" > $GITHUB_ENV " > $GITHUB_ENV
- name: Remove macOS cURL --ipv4 flag - name: Remove macOS cURL --ipv4 flag
@ -361,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -685,10 +685,10 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum !./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
needs: needs:
- build - build
if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main' if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main'

View file

@ -7,7 +7,7 @@ on:
jobs: jobs:
update-dl-version: update-dl-version:
name: update dl.deno.land version name: update dl.deno.land version
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
if: github.repository == 'denoland/deno' if: github.repository == 'denoland/deno'
steps: steps:
- name: Authenticate with Google Cloud - name: Authenticate with Google Cloud

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: start release name: start release
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
env: env:

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: version bump name: version bump
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 90 timeout-minutes: 90
env: env:

View file

@ -20,7 +20,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
deno-version: [v1.x, canary] deno-version: [v1.x, canary]
os: [ubuntu-22.04-xl] os: [ubuntu-24.04-xl]
steps: steps:
- name: Clone repository - name: Clone repository

1244
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -21,6 +21,7 @@ members = [
"ext/napi/sym", "ext/napi/sym",
"ext/net", "ext/net",
"ext/node", "ext/node",
"ext/telemetry",
"ext/url", "ext/url",
"ext/web", "ext/web",
"ext/webgpu", "ext/webgpu",
@ -45,53 +46,55 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.42.2", features = ["transpiling"] } deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.314.2" } deno_core = { version = "0.322.0" }
deno_bench_util = { version = "0.168.0", path = "./bench_util" } deno_bench_util = { version = "0.173.0", path = "./bench_util" }
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.1" deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] } deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.4" deno_npm = "=0.25.4"
deno_path_util = "=0.2.1" deno_path_util = "=0.2.1"
deno_permissions = { version = "0.34.0", path = "./runtime/permissions" } deno_permissions = { version = "0.39.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.183.0", path = "./runtime" } deno_runtime = { version = "0.188.0", path = "./runtime" }
deno_semver = "=0.5.16" deno_semver = "=0.5.16"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.104.0", path = "./ext/napi/sym" } napi_sym = { version = "0.109.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1" denokv_proto = "0.8.4"
denokv_remote = "0.8.1" denokv_remote = "0.8.4"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features # denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.2" } denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts # exts
deno_broadcast_channel = { version = "0.168.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.173.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.106.0", path = "./ext/cache" } deno_cache = { version = "0.111.0", path = "./ext/cache" }
deno_canvas = { version = "0.43.0", path = "./ext/canvas" } deno_canvas = { version = "0.48.0", path = "./ext/canvas" }
deno_console = { version = "0.174.0", path = "./ext/console" } deno_console = { version = "0.179.0", path = "./ext/console" }
deno_cron = { version = "0.54.0", path = "./ext/cron" } deno_cron = { version = "0.59.0", path = "./ext/cron" }
deno_crypto = { version = "0.188.0", path = "./ext/crypto" } deno_crypto = { version = "0.193.0", path = "./ext/crypto" }
deno_fetch = { version = "0.198.0", path = "./ext/fetch" } deno_fetch = { version = "0.203.0", path = "./ext/fetch" }
deno_ffi = { version = "0.161.0", path = "./ext/ffi" } deno_ffi = { version = "0.166.0", path = "./ext/ffi" }
deno_fs = { version = "0.84.0", path = "./ext/fs" } deno_fs = { version = "0.89.0", path = "./ext/fs" }
deno_http = { version = "0.172.0", path = "./ext/http" } deno_http = { version = "0.177.0", path = "./ext/http" }
deno_io = { version = "0.84.0", path = "./ext/io" } deno_io = { version = "0.89.0", path = "./ext/io" }
deno_kv = { version = "0.82.0", path = "./ext/kv" } deno_kv = { version = "0.87.0", path = "./ext/kv" }
deno_napi = { version = "0.105.0", path = "./ext/napi" } deno_napi = { version = "0.110.0", path = "./ext/napi" }
deno_net = { version = "0.166.0", path = "./ext/net" } deno_net = { version = "0.171.0", path = "./ext/net" }
deno_node = { version = "0.111.0", path = "./ext/node" } deno_node = { version = "0.116.0", path = "./ext/node" }
deno_tls = { version = "0.161.0", path = "./ext/tls" } deno_telemetry = { version = "0.1.0", path = "./ext/telemetry" }
deno_url = { version = "0.174.0", path = "./ext/url" } deno_tls = { version = "0.166.0", path = "./ext/tls" }
deno_web = { version = "0.205.0", path = "./ext/web" } deno_url = { version = "0.179.0", path = "./ext/url" }
deno_webgpu = { version = "0.141.0", path = "./ext/webgpu" } deno_web = { version = "0.210.0", path = "./ext/web" }
deno_webidl = { version = "0.174.0", path = "./ext/webidl" } deno_webgpu = { version = "0.146.0", path = "./ext/webgpu" }
deno_websocket = { version = "0.179.0", path = "./ext/websocket" } deno_webidl = { version = "0.179.0", path = "./ext/webidl" }
deno_webstorage = { version = "0.169.0", path = "./ext/webstorage" } deno_websocket = { version = "0.184.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.174.0", path = "./ext/webstorage" }
# resolvers # resolvers
deno_resolver = { version = "0.6.0", path = "./resolvers/deno" } deno_resolver = { version = "0.11.0", path = "./resolvers/deno" }
node_resolver = { version = "0.13.0", path = "./resolvers/node" } node_resolver = { version = "0.18.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
@ -99,6 +102,7 @@ async-trait = "0.1.73"
base32 = "=0.5.1" base32 = "=0.5.1"
base64 = "0.21.7" base64 = "0.21.7"
bencher = "0.1" bencher = "0.1"
boxed_error = "0.2.2"
brotli = "6.0.0" brotli = "6.0.0"
bytes = "1.4.0" bytes = "1.4.0"
cache_control = "=0.2.0" cache_control = "=0.2.0"
@ -111,7 +115,7 @@ console_static_text = "=0.8.1"
dashmap = "5.5.3" dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.13.0" deno_cache_dir = "=0.13.2"
deno_package_json = { version = "0.1.2", default-features = false } deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
@ -126,6 +130,7 @@ fs3 = "0.5.0"
futures = "0.3.21" futures = "0.3.21"
glob = "0.3.1" glob = "0.3.1"
h2 = "0.4.4" h2 = "0.4.4"
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
http = "1.0" http = "1.0"
http-body = "1.0" http-body = "1.0"
http-body-util = "0.1.2" http-body-util = "0.1.2"
@ -141,7 +146,7 @@ jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.126" libc = "0.2.126"
libz-sys = { version = "1.1.20", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
log = "0.4.20" log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
memmem = "0.1.1" memmem = "0.1.1"
monch = "=0.5.0" monch = "=0.5.0"
@ -157,8 +162,8 @@ percent-encoding = "2.3.0"
phf = { version = "0.11", features = ["macros"] } phf = { version = "0.11", features = ["macros"] }
pin-project = "1.0.11" # don't pin because they yank crates from cargo pin-project = "1.0.11" # don't pin because they yank crates from cargo
pretty_assertions = "=1.4.0" pretty_assertions = "=1.4.0"
prost = "0.11" prost = "0.13"
prost-build = "0.11" prost-build = "0.13"
rand = "=0.8.5" rand = "=0.8.5"
regex = "^1.7.0" regex = "^1.7.0"
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
@ -197,16 +202,21 @@ tower-http = { version = "0.6.1", features = ["decompression-br", "decompression
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] } tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
tower-service = "0.3.2" tower-service = "0.3.2"
twox-hash = "=1.6.3" twox-hash = "=1.6.3"
# Upgrading past 2.4.1 may cause WPT failures url = { version = "2.5", features = ["serde", "expose_internals"] }
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] } uuid = { version = "1.3.0", features = ["v4"] }
webpki-root-certs = "0.26.5" webpki-root-certs = "0.26.5"
webpki-roots = "0.26" webpki-roots = "0.26"
which = "4.2.5" which = "4.2.5"
yoke = { version = "0.7.4", features = ["derive"] } yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] } zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4" zstd = "=0.12.4"
opentelemetry = "0.27.0"
opentelemetry-http = "0.27.0"
opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] }
opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] }
opentelemetry_sdk = "0.27.0"
# crypto # crypto
hkdf = "0.12.3" hkdf = "0.12.3"
rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node

View file

@ -46,6 +46,12 @@ brew install deno
choco install deno choco install deno
``` ```
[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows):
```powershell
winget install --id=DenoLand.Deno
```
### Build and install from source ### Build and install from source
Complete instructions for building Deno from source can be found in the manual Complete instructions for building Deno from source can be found in the manual

View file

@ -6,6 +6,166 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.1.1 / 2024.11.21
- docs(add): clarification to add command (#26968)
- docs(doc): fix typo in doc subcommand help output (#26321)
- fix(node): regression where ts files were sometimes resolved instead of js
(#26971)
- fix(task): ensure root config always looks up dependencies in root (#26959)
- fix(watch): don't panic if there's no path provided (#26972)
- fix: Buffer global in --unstable-node-globals (#26973)
### 2.1.0 / 2024.11.21
- feat(cli): add `--unstable-node-globals` flag (#26617)
- feat(cli): support multiple env file argument (#26527)
- feat(compile): ability to embed directory in executable (#26939)
- feat(compile): ability to embed local data files (#26934)
- feat(ext/fetch): Make fetch client parameters configurable (#26909)
- feat(ext/fetch): allow embedders to use `hickory_dns_resolver` instead of
default `GaiResolver` (#26740)
- feat(ext/fs): add ctime to Deno.stats and use it in node compat layer (#24801)
- feat(ext/http): Make http server parameters configurable (#26785)
- feat(ext/node): perf_hooks.monitorEventLoopDelay() (#26905)
- feat(fetch): accept async iterables for body (#26882)
- feat(fmt): support SQL (#26750)
- feat(info): show location for Web Cache (#26205)
- feat(init): add --npm flag to initialize npm projects (#26896)
- feat(jupyter): Add `Deno.jupyter.image` API (#26284)
- feat(lint): Add checked files list to the JSON output(#26936)
- feat(lsp): auto-imports with @deno-types directives (#26821)
- feat(node): stabilize detecting if CJS via `"type": "commonjs"` in a
package.json (#26439)
- feat(permission): support suffix wildcards in `--allow-env` flag (#25255)
- feat(publish): add `--set-version <version>` flag (#26141)
- feat(runtime): remove public OTEL trace API (#26854)
- feat(task): add --eval flag (#26943)
- feat(task): dependencies (#26467)
- feat(task): support object notation, remove support for JSDocs (#26886)
- feat(task): workspace support with --filter and --recursive (#26949)
- feat(watch): log which file changed on HMR or watch change (#25801)
- feat: OpenTelemetry Tracing API and Exporting (#26710)
- feat: Wasm module support (#26668)
- feat: fmt and lint respect .gitignore file (#26897)
- feat: permission stack traces in ops (#26938)
- feat: subcommand to view and update outdated dependencies (#26942)
- feat: upgrade V8 to 13.0 (#26851)
- fix(cli): preserve comments in doc tests (#26828)
- fix(cli): show prefix hint when installing a package globally (#26629)
- fix(ext/cache): gracefully error when cache creation failed (#26895)
- fix(ext/http): prefer brotli for `accept-encoding: gzip, deflate, br, zstd`
(#26814)
- fix(ext/node): New async setInterval function to improve the nodejs
compatibility (#26703)
- fix(ext/node): add autoSelectFamily option to net.createConnection (#26661)
- fix(ext/node): handle `--allow-sys=inspector` (#26836)
- fix(ext/node): increase tolerance for interval test (#26899)
- fix(ext/node): process.getBuiltinModule (#26833)
- fix(ext/node): use ERR_NOT_IMPLEMENTED for notImplemented (#26853)
- fix(ext/node): zlib.crc32() (#26856)
- fix(ext/webgpu): Create GPUQuerySet converter before usage (#26883)
- fix(ext/websocket): initialize `error` attribute of WebSocket ErrorEvent
(#26796)
- fix(ext/webstorage): use error class for sqlite error case (#26806)
- fix(fmt): error instead of panic on unstable format (#26859)
- fix(fmt): formatting of .svelte files (#26948)
- fix(install): percent encodings in interactive progress bar (#26600)
- fix(install): re-setup bin entries after running lifecycle scripts (#26752)
- fix(lockfile): track dependencies specified in TypeScript compiler options
(#26551)
- fix(lsp): ignore editor indent settings if deno.json is present (#26912)
- fix(lsp): skip code action edits that can't be converted (#26831)
- fix(node): handle resolving ".//<something>" in npm packages (#26920)
- fix(node/crypto): support promisify on generateKeyPair (#26913)
- fix(permissions): say to use --allow-run instead of --allow-all (#26842)
- fix(publish): improve error message when missing exports (#26945)
- fix: otel resiliency (#26857)
- fix: update message for unsupported schemes with npm and jsr (#26884)
- perf(compile): code cache (#26528)
- perf(windows): delay load webgpu and some other dlls (#26917)
- perf: use available system memory for v8 isolate memory limit (#26868)
### 2.0.6 / 2024.11.10
- feat(ext/http): abort event when request is cancelled (#26781)
- feat(ext/http): abort signal when request is cancelled (#26761)
- feat(lsp): auto-import completions from byonm dependencies (#26680)
- fix(ext/cache): don't panic when creating cache (#26780)
- fix(ext/node): better inspector support (#26471)
- fix(fmt): don't use self-closing tags in HTML (#26754)
- fix(install): cache jsr deps from all workspace config files (#26779)
- fix(node:zlib): gzip & gzipSync should accept ArrayBuffer (#26762)
- fix: performance.timeOrigin (#26787)
### 2.0.5 / 2024.11.05
- fix(add): better error message when adding package that only has pre-release
versions (#26724)
- fix(add): only add npm deps to package.json if it's at least as close as
deno.json (#26683)
- fix(cli): set `npm_config_user_agent` when running npm packages or tasks
(#26639)
- fix(coverage): exclude comment lines from coverage reports (#25939)
- fix(ext/node): add `findSourceMap` to the default export of `node:module`
(#26720)
- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node
format (#26632)
- fix(ext/node): resolve exports even if parent module filename isn't present
(#26553)
- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647)
- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723)
- fix(fmt): fix several HTML and components issues (#26654)
- fix(fmt): ignore file directive for YAML files (#26717)
- fix(install): handle invalid function error, and fallback to junctions
regardless of the error (#26730)
- fix(lsp): include unstable features from editor settings (#26655)
- fix(lsp): scope attribution for lazily loaded assets (#26699)
- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive`
(#24702)
- fix(serve): support serve hmr (#26078)
- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627)
- fix(workspace): support wildcard packages (#26568)
- fix: clamp smi in fast calls by default (#26506)
- fix: improved support for cjs and cts modules (#26558)
- fix: op_run_microtasks crash (#26718)
- fix: panic_hook hangs without procfs (#26732)
- fix: remove permission check in op_require_node_module_paths (#26645)
- fix: surface package.json location on dep parse failure (#26665)
- perf(lsp): don't walk coverage directory (#26715)
### 2.0.4 / 2024.10.29
- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621)
- Revert "fix(ext/node): use primordials in `ext/node/polyfills/https.ts`
(#26323)" (#26613)
- feat(lsp): "typescript.preferences.preferTypeOnlyAutoImports" setting (#26546)
- fix(check): expose more globals from @types/node (#26603)
- fix(check): ignore resolving `jsxImportSource` when jsx is not used in graph
(#26548)
- fix(cli): Make --watcher CLEAR_SCREEN clear scrollback buffer as well as
visible screen (#25997)
- fix(compile): regression handling redirects (#26586)
- fix(ext/napi): export dynamic symbols list for {Free,Open}BSD (#26605)
- fix(ext/node): add path to `fs.stat` and `fs.statSync` error (#26037)
- fix(ext/node): compatibility with {Free,Open}BSD (#26604)
- fix(ext/node): use primordials in
ext\node\polyfills\internal\crypto\_randomInt.ts (#26534)
- fix(install): cache json exports of JSR packages (#26552)
- fix(install): regression - do not panic when config file contains \r\n
newlines (#26547)
- fix(lsp): make missing import action fix infallible (#26539)
- fix(npm): match npm bearer token generation (#26544)
- fix(upgrade): stop running `deno lsp` processes on windows before attempting
to replace executable (#26542)
- fix(watch): don't panic on invalid file specifiers (#26577)
- fix: do not panic when failing to write to http cache (#26591)
- fix: provide hints in terminal errors for Node.js globals (#26610)
- fix: report exceptions from nextTick (#26579)
- fix: support watch flag to enable watching other files than the main module on
serve subcommand (#26622)
- perf: pass transpiled module to deno_core as known string (#26555)
### 2.0.3 / 2024.10.25 ### 2.0.3 / 2024.10.25
- feat(lsp): interactive inlay hints (#26382) - feat(lsp): interactive inlay hints (#26382)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.168.0" version = "0.173.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "2.0.3" version = "2.1.1"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -69,12 +69,12 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir.workspace = true
deno_config = { version = "=0.37.2", features = ["workspace", "sync"] } deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] } deno_doc = { version = "=0.161.1", features = ["rust", "comrak"] }
deno_graph = { version = "=0.83.4" } deno_graph = { version = "=0.86.2" }
deno_lint = { version = "=0.67.0", features = ["docs"] } deno_lint = { version = "=0.68.0", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm.workspace = true deno_npm.workspace = true
deno_package_json.workspace = true deno_package_json.workspace = true
@ -83,8 +83,9 @@ deno_resolver.workspace = true
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true deno_semver.workspace = true
deno_task_shell = "=0.18.1" deno_task_shell = "=0.18.1"
deno_telemetry.workspace = true
deno_terminal.workspace = true deno_terminal.workspace = true
libsui = "0.4.0" libsui = "0.5.0"
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14" anstream = "0.6.14"
@ -107,7 +108,7 @@ dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.4" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.0" dprint-plugin-typescript = "=0.93.2"
env_logger = "=0.10.0" env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
@ -122,14 +123,14 @@ hyper-util.workspace = true
import_map = { version = "=0.20.1", features = ["ext"] } import_map = { version = "=0.20.1", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser = { workspace = true, features = ["cst", "serde"] } jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
lazy-regex.workspace = true lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
libz-sys.workspace = true libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.11.0" malva = "=0.11.0"
markup_fmt = "=0.14.0" markup_fmt = "=0.16.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true
@ -151,6 +152,7 @@ serde_repr.workspace = true
sha2.workspace = true sha2.workspace = true
shell-escape = "=0.1.5" shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] } spki = { version = "0.7", features = ["pem"] }
sqlformat = "=0.3.2"
strsim = "0.11.1" strsim = "0.11.1"
tar.workspace = true tar.workspace = true
tempfile.workspace = true tempfile.workspace = true

View file

@ -70,7 +70,41 @@ pub fn deno_json_deps(
let values = imports_values(config.json.imports.as_ref()) let values = imports_values(config.json.imports.as_ref())
.into_iter() .into_iter()
.chain(scope_values(config.json.scopes.as_ref())); .chain(scope_values(config.json.scopes.as_ref()));
values_to_set(values) let mut set = values_to_set(values);
if let Some(serde_json::Value::Object(compiler_options)) =
&config.json.compiler_options
{
// add jsxImportSource
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSource")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add jsxImportSourceTypes
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSourceTypes")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add the dependencies in the types array
if let Some(serde_json::Value::Array(types)) = compiler_options.get("types")
{
for value in types {
if let serde_json::Value::String(value) = value {
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
}
}
}
set
} }
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> { fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
@ -98,15 +132,23 @@ fn values_to_set<'a>(
) -> HashSet<JsrDepPackageReq> { ) -> HashSet<JsrDepPackageReq> {
let mut entries = HashSet::new(); let mut entries = HashSet::new();
for value in values { for value in values {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) { if let Some(dep_req) = value_to_dep_req(value) {
entries.insert(JsrDepPackageReq::jsr(req_ref.into_inner().req)); entries.insert(dep_req);
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
entries.insert(JsrDepPackageReq::npm(req_ref.into_inner().req));
} }
} }
entries entries
} }
fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::jsr(req_ref.into_inner().req))
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::npm(req_ref.into_inner().req))
} else {
None
}
}
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) { pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
if let Some(ignored_options) = &ts_config.maybe_ignored_options { if let Some(ignored_options) = &ts_config.maybe_ignored_options {
log::warn!("{}", ignored_options); log::warn!("{}", ignored_options);

File diff suppressed because it is too large Load diff

View file

@ -51,7 +51,7 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
} }
} else { } else {
NetDescriptor::parse(&host_and_port).map_err(|e| { NetDescriptor::parse(&host_and_port).map_err(|e| {
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string())
})?; })?;
out.push(host_and_port) out.push(host_and_port)
} }

View file

@ -126,11 +126,7 @@ impl CliLockfile {
maybe_deno_json: Option<&ConfigFile>, maybe_deno_json: Option<&ConfigFile>,
) -> HashSet<JsrDepPackageReq> { ) -> HashSet<JsrDepPackageReq> {
maybe_deno_json maybe_deno_json
.map(|c| { .map(crate::args::deno_json::deno_json_deps)
crate::args::deno_json::deno_json_deps(c)
.into_iter()
.collect()
})
.unwrap_or_default() .unwrap_or_default()
} }

View file

@ -7,6 +7,7 @@ mod import_map;
mod lockfile; mod lockfile;
mod package_json; mod package_json;
use deno_ast::MediaType;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions; use deno_config::workspace::CreateResolverOptions;
@ -28,12 +29,12 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path; use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_telemetry::OtelConfig;
use import_map::resolve_import_map_value_from_specifier; use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::BenchConfig;
pub use deno_config::deno_json::ConfigFile; pub use deno_config::deno_json::ConfigFile;
pub use deno_config::deno_json::FmtOptionsConfig; pub use deno_config::deno_json::FmtOptionsConfig;
pub use deno_config::deno_json::JsxImportSourceConfig;
pub use deno_config::deno_json::LintRulesConfig; pub use deno_config::deno_json::LintRulesConfig;
pub use deno_config::deno_json::ProseWrap; pub use deno_config::deno_json::ProseWrap;
pub use deno_config::deno_json::TsConfig; pub use deno_config::deno_json::TsConfig;
@ -46,6 +47,7 @@ pub use flags::*;
pub use lockfile::CliLockfile; pub use lockfile::CliLockfile;
pub use lockfile::CliLockfileReadFromPathOptions; pub use lockfile::CliLockfileReadFromPathOptions;
pub use package_json::NpmInstallDepsProvider; pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -200,6 +202,8 @@ pub fn ts_config_to_transpile_and_emit_options(
precompile_jsx_dynamic_props: None, precompile_jsx_dynamic_props: None,
transform_jsx, transform_jsx,
var_decl_imports: false, var_decl_imports: false,
// todo(dsherret): support verbatim_module_syntax here properly
verbatim_module_syntax: false,
}, },
deno_ast::EmitOptions { deno_ast::EmitOptions {
inline_sources: options.inline_sources, inline_sources: options.inline_sources,
@ -285,6 +289,7 @@ impl BenchOptions {
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] #[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct UnstableFmtOptions { pub struct UnstableFmtOptions {
pub component: bool, pub component: bool,
pub sql: bool,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -318,6 +323,7 @@ impl FmtOptions {
options: resolve_fmt_options(fmt_flags, fmt_config.options), options: resolve_fmt_options(fmt_flags, fmt_config.options),
unstable: UnstableFmtOptions { unstable: UnstableFmtOptions {
component: unstable.component || fmt_flags.unstable_component, component: unstable.component || fmt_flags.unstable_component,
sql: unstable.sql || fmt_flags.unstable_sql,
}, },
files: fmt_config.files, files: fmt_config.files,
} }
@ -819,10 +825,8 @@ impl CliOptions {
}; };
let msg = let msg =
format!("DANGER: TLS certificate validation is disabled {}", domains); format!("DANGER: TLS certificate validation is disabled {}", domains);
#[allow(clippy::print_stderr)]
{ {
// use eprintln instead of log::warn so this always gets shown log::error!("{}", colors::yellow(msg));
eprintln!("{}", colors::yellow(msg));
} }
} }
@ -866,12 +870,8 @@ impl CliOptions {
} else { } else {
&[] &[]
}; };
let config_parse_options = deno_config::deno_json::ConfigParseOptions { let config_parse_options =
include_task_comments: matches!( deno_config::deno_json::ConfigParseOptions::default();
flags.subcommand,
DenoSubcommand::Task(..)
),
};
let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled
&& !flags.no_npm && !flags.no_npm
&& !has_flag_env_var("DENO_NO_PACKAGE_JSON"); && !has_flag_env_var("DENO_NO_PACKAGE_JSON");
@ -1126,7 +1126,11 @@ impl CliOptions {
} }
} }
pub fn env_file_name(&self) -> Option<&String> { pub fn otel_config(&self) -> Option<OtelConfig> {
self.flags.otel_config()
}
pub fn env_file_name(&self) -> Option<&Vec<String>> {
self.flags.env_file.as_ref() self.flags.env_file.as_ref()
} }
@ -1134,21 +1138,34 @@ impl CliOptions {
self self
.main_module_cell .main_module_cell
.get_or_init(|| { .get_or_init(|| {
let main_module = match &self.flags.subcommand { Ok(match &self.flags.subcommand {
DenoSubcommand::Compile(compile_flags) => { DenoSubcommand::Compile(compile_flags) => {
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())? resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
} }
DenoSubcommand::Eval(_) => { DenoSubcommand::Eval(_) => {
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())? resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())?
} }
DenoSubcommand::Repl(_) => { DenoSubcommand::Repl(_) => {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())? resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())?
} }
DenoSubcommand::Run(run_flags) => { DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() { if run_flags.is_stdin() {
resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())? resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())?
} else { } else {
resolve_url_or_path(&run_flags.script, self.initial_cwd())? let url =
resolve_url_or_path(&run_flags.script, self.initial_cwd())?;
if self.is_node_main()
&& url.scheme() == "file"
&& MediaType::from_specifier(&url) == MediaType::Unknown
{
try_resolve_node_binary_main_entrypoint(
&run_flags.script,
self.initial_cwd(),
)?
.unwrap_or(url)
} else {
url
}
} }
} }
DenoSubcommand::Serve(run_flags) => { DenoSubcommand::Serve(run_flags) => {
@ -1157,9 +1174,7 @@ impl CliOptions {
_ => { _ => {
bail!("No main module.") bail!("No main module.")
} }
}; })
Ok(main_module)
}) })
.as_ref() .as_ref()
.map_err(|err| deno_core::anyhow::anyhow!("{}", err)) .map_err(|err| deno_core::anyhow::anyhow!("{}", err))
@ -1208,7 +1223,7 @@ impl CliOptions {
// This is triggered via a secret environment variable which is used // This is triggered via a secret environment variable which is used
// for functionality like child_process.fork. Users should NOT depend // for functionality like child_process.fork. Users should NOT depend
// on this functionality. // on this functionality.
pub fn is_npm_main(&self) -> bool { pub fn is_node_main(&self) -> bool {
NPM_PROCESS_STATE.is_some() NPM_PROCESS_STATE.is_some()
} }
@ -1306,6 +1321,7 @@ impl CliOptions {
let workspace = self.workspace(); let workspace = self.workspace();
UnstableFmtOptions { UnstableFmtOptions {
component: workspace.has_unstable("fmt-component"), component: workspace.has_unstable("fmt-component"),
sql: workspace.has_unstable("fmt-sql"),
} }
} }
@ -1452,6 +1468,12 @@ impl CliOptions {
watch: Some(WatchFlagsWithPaths { hmr, .. }), watch: Some(WatchFlagsWithPaths { hmr, .. }),
.. ..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{
*hmr
} else if let DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { hmr, .. }),
..
}) = &self.flags.subcommand
{ {
*hmr *hmr
} else { } else {
@ -1526,6 +1548,10 @@ impl CliOptions {
}) => Url::parse(&flags.module_url) }) => Url::parse(&flags.module_url)
.ok() .ok()
.map(|url| vec![Cow::Owned(url)]), .map(|url| vec![Cow::Owned(url)]),
DenoSubcommand::Doc(DocFlags {
source_files: DocSourceFileFlag::Paths(paths),
..
}) => Some(files_to_urls(paths)),
_ => None, _ => None,
}) })
.unwrap_or_default(); .unwrap_or_default();
@ -1580,9 +1606,11 @@ impl CliOptions {
|| self.workspace().has_unstable("bare-node-builtins") || self.workspace().has_unstable("bare-node-builtins")
} }
pub fn unstable_detect_cjs(&self) -> bool { pub fn detect_cjs(&self) -> bool {
self.flags.unstable_config.detect_cjs // only enabled when there's a package.json in order to not have a
|| self.workspace().has_unstable("detect-cjs") // perf penalty for non-npm Deno projects of searching for the closest
// package.json beside each module
self.workspace().package_jsons().next().is_some() || self.is_node_main()
} }
fn byonm_enabled(&self) -> bool { fn byonm_enabled(&self) -> bool {
@ -1595,6 +1623,17 @@ impl CliOptions {
} }
pub fn use_byonm(&self) -> bool { pub fn use_byonm(&self) -> bool {
if matches!(
self.sub_command(),
DenoSubcommand::Install(_)
| DenoSubcommand::Add(_)
| DenoSubcommand::Remove(_)
| DenoSubcommand::Init(_)
| DenoSubcommand::Outdated(_)
) {
// For `deno install/add/remove/init` we want to force the managed resolver so it can set up `node_modules/` directory.
return false;
}
if self.node_modules_dir().ok().flatten().is_none() if self.node_modules_dir().ok().flatten().is_none()
&& self.maybe_node_modules_folder.is_some() && self.maybe_node_modules_folder.is_some()
&& self && self
@ -1637,7 +1676,7 @@ impl CliOptions {
"byonm", "byonm",
"bare-node-builtins", "bare-node-builtins",
"fmt-component", "fmt-component",
"detect-cjs", "fmt-sql",
]) ])
.collect(); .collect();
@ -1672,6 +1711,10 @@ impl CliOptions {
if let DenoSubcommand::Run(RunFlags { if let DenoSubcommand::Run(RunFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }), watch: Some(WatchFlagsWithPaths { paths, .. }),
.. ..
})
| DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }),
..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{ {
full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path)));
@ -1771,6 +1814,36 @@ fn resolve_node_modules_folder(
Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
} }
fn try_resolve_node_binary_main_entrypoint(
specifier: &str,
initial_cwd: &Path,
) -> Result<Option<Url>, AnyError> {
// node allows running files at paths without a `.js` extension
// or at directories with an index.js file
let path = deno_core::normalize_path(initial_cwd.join(specifier));
if path.is_dir() {
let index_file = path.join("index.js");
Ok(if index_file.is_file() {
Some(deno_path_util::url_from_file_path(&index_file)?)
} else {
None
})
} else {
let path = path.with_extension(
path
.extension()
.and_then(|s| s.to_str())
.map(|s| format!("{}.js", s))
.unwrap_or("js".to_string()),
);
if path.is_file() {
Ok(Some(deno_path_util::url_from_file_path(&path)?))
} else {
Ok(None)
}
}
}
fn resolve_import_map_specifier( fn resolve_import_map_specifier(
maybe_import_map_path: Option<&str>, maybe_import_map_path: Option<&str>,
maybe_config_file: Option<&ConfigFile>, maybe_config_file: Option<&ConfigFile>,
@ -1841,6 +1914,10 @@ pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT") flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
} }
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool { pub fn has_flag_env_var(name: &str) -> bool {
let value = env::var(name); let value = env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1")) matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
@ -1872,19 +1949,22 @@ pub fn config_to_deno_graph_workspace_member(
}) })
} }
fn load_env_variables_from_env_file(filename: Option<&String>) { fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
let Some(env_file_name) = filename else { let Some(env_file_names) = filename else {
return; return;
}; };
match from_filename(env_file_name) {
Ok(_) => (), for env_file_name in env_file_names.iter().rev() {
Err(error) => { match from_filename(env_file_name) {
match error { Ok(_) => (),
Err(error) => {
match error {
dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line), dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line),
dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name), dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name),
dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name), dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name),
_ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name), _ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name),
} }
}
} }
} }
} }

View file

@ -5,10 +5,12 @@ use std::sync::Arc;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError; use deno_package_json::PackageJsonDepValueParseError;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use thiserror::Error;
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmRemotePkg { pub struct InstallNpmRemotePkg {
@ -23,11 +25,20 @@ pub struct InstallNpmWorkspacePkg {
pub target_dir: PathBuf, pub target_dir: PathBuf,
} }
#[derive(Debug, Error, Clone)]
#[error("Failed to install '{}'\n at {}", alias, location)]
pub struct PackageJsonDepValueParseWithLocationError {
pub location: Url,
pub alias: String,
#[source]
pub source: PackageJsonDepValueParseError,
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct NpmInstallDepsProvider { pub struct NpmInstallDepsProvider {
remote_pkgs: Vec<InstallNpmRemotePkg>, remote_pkgs: Vec<InstallNpmRemotePkg>,
workspace_pkgs: Vec<InstallNpmWorkspacePkg>, workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>, pkg_json_dep_errors: Vec<PackageJsonDepValueParseWithLocationError>,
} }
impl NpmInstallDepsProvider { impl NpmInstallDepsProvider {
@ -89,7 +100,13 @@ impl NpmInstallDepsProvider {
let dep = match dep { let dep = match dep {
Ok(dep) => dep, Ok(dep) => dep,
Err(err) => { Err(err) => {
pkg_json_dep_errors.push(err); pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias,
source: err,
},
);
continue; continue;
} }
}; };
@ -150,7 +167,9 @@ impl NpmInstallDepsProvider {
&self.workspace_pkgs &self.workspace_pkgs
} }
pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] { pub fn pkg_json_dep_errors(
&self,
) -> &[PackageJsonDepValueParseWithLocationError] {
&self.pkg_json_dep_errors &self.pkg_json_dep_errors
} }
} }

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -400,6 +400,24 @@ fn main() {
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
}
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());

View file

@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash {
} }
/// What should the cache should do on failure? /// What should the cache should do on failure?
#[derive(Default)] #[derive(Debug, Default)]
pub enum CacheFailure { pub enum CacheFailure {
/// Return errors if failure mode otherwise unspecified. /// Return errors if failure mode otherwise unspecified.
#[default] #[default]
@ -69,6 +69,7 @@ pub enum CacheFailure {
} }
/// Configuration SQL and other parameters for a [`CacheDB`]. /// Configuration SQL and other parameters for a [`CacheDB`].
#[derive(Debug)]
pub struct CacheDBConfiguration { pub struct CacheDBConfiguration {
/// SQL to run for a new database. /// SQL to run for a new database.
pub table_initializer: &'static str, pub table_initializer: &'static str,
@ -98,6 +99,7 @@ impl CacheDBConfiguration {
} }
} }
#[derive(Debug)]
enum ConnectionState { enum ConnectionState {
Connected(Connection), Connected(Connection),
Blackhole, Blackhole,
@ -106,7 +108,7 @@ enum ConnectionState {
/// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// A cache database that eagerly initializes itself off-thread, preventing initialization operations
/// from blocking the main thread. /// from blocking the main thread.
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct CacheDB { pub struct CacheDB {
// TODO(mmastrac): We can probably simplify our thread-safe implementation here // TODO(mmastrac): We can probably simplify our thread-safe implementation here
conn: Arc<Mutex<OnceCell<ConnectionState>>>, conn: Arc<Mutex<OnceCell<ConnectionState>>>,

View file

@ -1,10 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_runtime::code_cache; use deno_runtime::code_cache;
use deno_runtime::deno_webstorage::rusqlite::params; use deno_runtime::deno_webstorage::rusqlite::params;
use crate::worker::CliCodeCache;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash; use super::cache_db::CacheDBHash;
@ -82,6 +86,12 @@ impl CodeCache {
} }
} }
impl CliCodeCache for CodeCache {
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
self
}
}
impl code_cache::CodeCache for CodeCache { impl code_cache::CodeCache for CodeCache {
fn get_sync( fn get_sync(
&self, &self,

2
cli/cache/emit.rs vendored
View file

@ -10,6 +10,7 @@ use deno_core::unsync::sync::AtomicFlag;
use super::DiskCache; use super::DiskCache;
/// The cache that stores previously emitted files. /// The cache that stores previously emitted files.
#[derive(Debug)]
pub struct EmitCache { pub struct EmitCache {
disk_cache: DiskCache, disk_cache: DiskCache,
emit_failed_flag: AtomicFlag, emit_failed_flag: AtomicFlag,
@ -91,6 +92,7 @@ impl EmitCache {
const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata="; const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
#[derive(Debug)]
struct EmitFileSerializer { struct EmitFileSerializer {
cli_version: &'static str, cli_version: &'static str,
} }

101
cli/cache/mod.rs vendored
View file

@ -8,14 +8,9 @@ use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect; use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver;
use crate::resolver::CliNodeResolver;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter; use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::path::specifier_has_extension;
use crate::util::text_encoding::arc_str_to_bytes;
use crate::util::text_encoding::from_utf8_lossy_owned;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::futures; use deno_core::futures;
@ -25,7 +20,9 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse; use deno_graph::source::LoadResponse;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -60,7 +57,6 @@ pub use fast_check::FastCheckCache;
pub use incremental::IncrementalCache; pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache; pub use module_info::ModuleInfoCache;
pub use node::NodeAnalysisCache; pub use node::NodeAnalysisCache;
pub use parsed_source::EsmOrCjsChecker;
pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::LazyGraphSourceParser;
pub use parsed_source::ParsedSourceCache; pub use parsed_source::ParsedSourceCache;
@ -181,46 +177,40 @@ pub struct FetchCacherOptions {
pub permissions: PermissionsContainer, pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`. /// If we're publishing for `deno publish`.
pub is_deno_publish: bool, pub is_deno_publish: bool,
pub unstable_detect_cjs: bool,
} }
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs. /// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher { pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, permissions: PermissionsContainer,
is_deno_publish: bool, is_deno_publish: bool,
unstable_detect_cjs: bool,
cache_info_enabled: bool, cache_info_enabled: bool,
} }
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
options: FetchCacherOptions, options: FetchCacherOptions,
) -> Self { ) -> Self {
Self { Self {
file_fetcher, file_fetcher,
esm_or_cjs_checker, fs,
global_http_cache, global_http_cache,
node_resolver, in_npm_pkg_checker,
npm_resolver,
module_info_cache, module_info_cache,
file_header_overrides: options.file_header_overrides, file_header_overrides: options.file_header_overrides,
permissions: options.permissions, permissions: options.permissions,
is_deno_publish: options.is_deno_publish, is_deno_publish: options.is_deno_publish,
unstable_detect_cjs: options.unstable_detect_cjs,
cache_info_enabled: false, cache_info_enabled: false,
} }
} }
@ -271,70 +261,23 @@ impl Loader for FetchCacher {
) -> LoadFuture { ) -> LoadFuture {
use deno_graph::source::CacheSetting as LoaderCacheSetting; use deno_graph::source::CacheSetting as LoaderCacheSetting;
if specifier.scheme() == "file" { if specifier.scheme() == "file"
if specifier.path().contains("/node_modules/") { && specifier.path().contains("/node_modules/")
// The specifier might be in a completely different symlinked tree than {
// what the node_modules url is in (ex. `/my-project-1/node_modules` // The specifier might be in a completely different symlinked tree than
// symlinked to `/my-project-2/node_modules`), so first we checked if the path // what the node_modules url is in (ex. `/my-project-1/node_modules`
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare // symlinked to `/my-project-2/node_modules`), so first we checked if the path
// against the canonicalized specifier. // is in a node_modules dir to avoid needlessly canonicalizing, then now compare
let specifier = // against the canonicalized specifier.
crate::node::resolve_specifier_into_node_modules(specifier); let specifier = crate::node::resolve_specifier_into_node_modules(
if self.npm_resolver.in_npm_package(&specifier) { specifier,
return Box::pin(futures::future::ready(Ok(Some( self.fs.as_ref(),
LoadResponse::External { specifier }, );
)))); if self.in_npm_pkg_checker.in_npm_package(&specifier) {
}
}
// make local CJS modules external to the graph
if specifier_has_extension(specifier, "cjs") {
return Box::pin(futures::future::ready(Ok(Some( return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External { LoadResponse::External { specifier },
specifier: specifier.clone(),
},
)))); ))));
} }
if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") {
if let Ok(Some(pkg_json)) =
self.node_resolver.get_closest_package_json(specifier)
{
if pkg_json.typ == "commonjs" {
if let Ok(path) = specifier.to_file_path() {
if let Ok(bytes) = std::fs::read(&path) {
let text: Arc<str> = from_utf8_lossy_owned(bytes).into();
let is_es_module = match self.esm_or_cjs_checker.is_esm(
specifier,
text.clone(),
MediaType::JavaScript,
) {
Ok(value) => value,
Err(err) => {
return Box::pin(futures::future::ready(Err(err.into())));
}
};
if !is_es_module {
self.node_resolver.mark_cjs_resolution(specifier.clone());
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
} else {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::Module {
specifier: specifier.clone(),
content: arc_str_to_bytes(text),
maybe_headers: None,
},
))));
}
}
}
}
}
}
} }
if self.is_deno_publish if self.is_deno_publish

View file

@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
/// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable
/// performance improvement because when it exists we can skip parsing a module for /// performance improvement because when it exists we can skip parsing a module for
/// deno_graph. /// deno_graph.
#[derive(Debug)]
pub struct ModuleInfoCache { pub struct ModuleInfoCache {
conn: CacheDB, conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
} }
impl ModuleInfoCache { impl ModuleInfoCache {
#[cfg(test)] #[cfg(test)]
pub fn new_in_memory(version: &'static str) -> Self { pub fn new_in_memory(
Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) version: &'static str,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self::new(
CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version),
parsed_source_cache,
)
} }
pub fn new(conn: CacheDB) -> Self { pub fn new(
Self { conn } conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self {
conn,
parsed_source_cache,
}
} }
/// Useful for testing: re-create this cache DB with a different current version. /// Useful for testing: re-create this cache DB with a different current version.
@ -63,6 +77,7 @@ impl ModuleInfoCache {
pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self {
Self { Self {
conn: self.conn.recreate_with_version(version), conn: self.conn.recreate_with_version(version),
parsed_source_cache: self.parsed_source_cache,
} }
} }
@ -113,13 +128,10 @@ impl ModuleInfoCache {
Ok(()) Ok(())
} }
pub fn as_module_analyzer<'a>( pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer {
&'a self,
parsed_source_cache: &'a Arc<ParsedSourceCache>,
) -> ModuleInfoCacheModuleAnalyzer<'a> {
ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer {
module_info_cache: self, module_info_cache: self,
parsed_source_cache, parsed_source_cache: &self.parsed_source_cache,
} }
} }
} }
@ -129,6 +141,84 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> {
parsed_source_cache: &'a Arc<ParsedSourceCache>, parsed_source_cache: &'a Arc<ParsedSourceCache>,
} }
impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
fn load_cached_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
) -> Option<ModuleInfo> {
match self.module_info_cache.get_module_info(
specifier,
media_type,
source_hash,
) {
Ok(Some(info)) => Some(info),
Ok(None) => None,
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
None
}
}
}
fn save_module_info_to_cache(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
module_info: &ModuleInfo,
) {
if let Err(err) = self.module_info_cache.set_module_info(
specifier,
media_type,
source_hash,
module_info,
) {
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
}
pub fn analyze_sync(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<str>,
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache
let source_hash = CacheDBHash::from_source(source);
if let Some(info) =
self.load_cached_module_info(specifier, media_type, source_hash)
{
return Ok(info);
}
// otherwise, get the module info from the parsed source cache
let parser = self.parsed_source_cache.as_capturing_parser();
let analyzer = ParserModuleAnalyzer::new(&parser);
let module_info =
analyzer.analyze_sync(specifier, source.clone(), media_type)?;
// then attempt to cache it
self.save_module_info_to_cache(
specifier,
media_type,
source_hash,
&module_info,
);
Ok(module_info)
}
}
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
async fn analyze( async fn analyze(
@ -139,20 +229,10 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache // attempt to load from the cache
let source_hash = CacheDBHash::from_source(&source); let source_hash = CacheDBHash::from_source(&source);
match self.module_info_cache.get_module_info( if let Some(info) =
specifier, self.load_cached_module_info(specifier, media_type, source_hash)
media_type, {
source_hash, return Ok(info);
) {
Ok(Some(info)) => return Ok(info),
Ok(None) => {}
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
}
} }
// otherwise, get the module info from the parsed source cache // otherwise, get the module info from the parsed source cache
@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
.unwrap()?; .unwrap()?;
// then attempt to cache it // then attempt to cache it
if let Err(err) = self.module_info_cache.set_module_info( self.save_module_info_to_cache(
specifier, specifier,
media_type, media_type,
source_hash, source_hash,
&module_info, &module_info,
) { );
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
Ok(module_info) Ok(module_info)
} }
@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
Tsx => 11, Tsx => 11,
Json => 12, Json => 12,
Wasm => 13, Wasm => 13,
TsBuildInfo => 14, Css => 14,
SourceMap => 15, SourceMap => 15,
Unknown => 16, Unknown => 16,
} }
@ -210,6 +284,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use deno_graph::JsDocImportInfo;
use deno_graph::PositionRange; use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange; use deno_graph::SpecifierWithRange;
@ -217,7 +292,7 @@ mod test {
#[test] #[test]
pub fn module_info_cache_general_use() { pub fn module_info_cache_general_use() {
let cache = ModuleInfoCache::new_in_memory("1.0.0"); let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default());
let specifier1 = let specifier1 =
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
let specifier2 = let specifier2 =
@ -234,18 +309,21 @@ mod test {
); );
let mut module_info = ModuleInfo::default(); let mut module_info = ModuleInfo::default();
module_info.jsdoc_imports.push(SpecifierWithRange { module_info.jsdoc_imports.push(JsDocImportInfo {
range: PositionRange { specifier: SpecifierWithRange {
start: deno_graph::Position { range: PositionRange {
line: 0, start: deno_graph::Position {
character: 3, line: 0,
}, character: 3,
end: deno_graph::Position { },
line: 1, end: deno_graph::Position {
character: 2, line: 1,
character: 2,
},
}, },
text: "test".to_string(),
}, },
text: "test".to_string(), resolution_mode: None,
}); });
cache cache
.set_module_info( .set_module_info(

View file

@ -5,12 +5,11 @@ use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnostic;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_graph::CapturingModuleParser; use deno_graph::CapturingEsParser;
use deno_graph::DefaultModuleParser; use deno_graph::DefaultEsParser;
use deno_graph::ModuleParser; use deno_graph::EsParser;
use deno_graph::ParseOptions; use deno_graph::ParseOptions;
use deno_graph::ParsedSourceStore; use deno_graph::ParsedSourceStore;
@ -47,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> {
} }
} }
#[derive(Default)] #[derive(Debug, Default)]
pub struct ParsedSourceCache { pub struct ParsedSourceCache {
sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>, sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>,
} }
@ -58,12 +57,11 @@ impl ParsedSourceCache {
module: &deno_graph::JsModule, module: &deno_graph::JsModule,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> { ) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
let parser = self.as_capturing_parser(); let parser = self.as_capturing_parser();
// this will conditionally parse because it's using a CapturingModuleParser // this will conditionally parse because it's using a CapturingEsParser
parser.parse_module(ParseOptions { parser.parse_program(ParseOptions {
specifier: &module.specifier, specifier: &module.specifier,
source: module.source.clone(), source: module.source.clone(),
media_type: module.media_type, media_type: module.media_type,
// don't bother enabling because this method is currently only used for vendoring
scope_analysis: false, scope_analysis: false,
}) })
} }
@ -87,10 +85,9 @@ impl ParsedSourceCache {
specifier, specifier,
source, source,
media_type, media_type,
// don't bother enabling because this method is currently only used for emitting
scope_analysis: false, scope_analysis: false,
}; };
DefaultModuleParser.parse_module(options) DefaultEsParser.parse_program(options)
} }
/// Frees the parsed source from memory. /// Frees the parsed source from memory.
@ -100,8 +97,8 @@ impl ParsedSourceCache {
/// Creates a parser that will reuse a ParsedSource from the store /// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse. /// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingModuleParser { pub fn as_capturing_parser(&self) -> CapturingEsParser {
CapturingModuleParser::new(None, self) CapturingEsParser::new(None, self)
} }
} }
@ -150,42 +147,3 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache {
} }
} }
} }
pub struct EsmOrCjsChecker {
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl EsmOrCjsChecker {
pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self {
Self {
parsed_source_cache,
}
}
pub fn is_esm(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<bool, ParseDiagnostic> {
// todo(dsherret): add a file cache here to avoid parsing with swc on each run
let source = match self.parsed_source_cache.get_parsed_source(specifier) {
Some(source) => source.clone(),
None => {
let source = deno_ast::parse_program(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: true, // capture because it's used for cjs export analysis
scope_analysis: false,
maybe_syntax: None,
})?;
self
.parsed_source_cache
.set_parsed_source(specifier.clone(), source.clone());
source
}
};
Ok(source.is_module())
}
}

View file

@ -1,5 +1,6 @@
disallowed-methods = [ disallowed-methods = [
{ path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" }, { path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" },
{ path = "std::process::exit", reason = "use deno_runtime::exit instead" },
] ]
disallowed-types = [ disallowed-types = [
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },

View file

@ -3,11 +3,14 @@
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::resolver::CjsTracker;
use deno_ast::ModuleKind;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRanged; use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult; use deno_ast::TranspileResult;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
@ -19,7 +22,9 @@ use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug)]
pub struct Emitter { pub struct Emitter {
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options: transpile_and_emit_options:
@ -30,6 +35,7 @@ pub struct Emitter {
impl Emitter { impl Emitter {
pub fn new( pub fn new(
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions, transpile_options: deno_ast::TranspileOptions,
@ -42,6 +48,7 @@ impl Emitter {
hasher.finish() hasher.finish()
}; };
Self { Self {
cjs_tracker,
emit_cache, emit_cache,
parsed_source_cache, parsed_source_cache,
transpile_and_emit_options: Arc::new((transpile_options, emit_options)), transpile_and_emit_options: Arc::new((transpile_options, emit_options)),
@ -59,21 +66,19 @@ impl Emitter {
continue; continue;
}; };
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable() if module.media_type.is_emittable() {
let is_emittable = matches!(
module.media_type,
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx
);
if is_emittable {
futures.push( futures.push(
self self
.emit_parsed_source( .emit_parsed_source(
&module.specifier, &module.specifier,
module.media_type, module.media_type,
ModuleKind::from_is_cjs(
self.cjs_tracker.is_cjs_with_known_is_script(
&module.specifier,
module.media_type,
module.is_script,
)?,
),
&module.source, &module.source,
) )
.boxed_local(), .boxed_local(),
@ -92,9 +97,10 @@ impl Emitter {
pub fn maybe_cached_emit( pub fn maybe_cached_emit(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &str, source: &str,
) -> Option<String> { ) -> Option<String> {
let source_hash = self.get_source_hash(source); let source_hash = self.get_source_hash(module_kind, source);
self.emit_cache.get_emit_code(specifier, source_hash) self.emit_cache.get_emit_code(specifier, source_hash)
} }
@ -102,11 +108,12 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
@ -119,8 +126,9 @@ impl Emitter {
EmitParsedSourceHelper::transpile( EmitParsedSourceHelper::transpile(
&parsed_source_cache, &parsed_source_cache,
&specifier, &specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&transpile_and_emit_options.0, &transpile_and_emit_options.0,
&transpile_and_emit_options.1, &transpile_and_emit_options.1,
) )
@ -142,18 +150,20 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpiled_source = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache, &self.parsed_source_cache,
specifier, specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&self.transpile_and_emit_options.0, &self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1, &self.transpile_and_emit_options.1,
)?; )?;
@ -192,10 +202,20 @@ impl Emitter {
// this statement is probably wrong) // this statement is probably wrong)
let mut options = self.transpile_and_emit_options.1.clone(); let mut options = self.transpile_and_emit_options.1.clone();
options.source_map = SourceMapOption::None; options.source_map = SourceMapOption::None;
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)?;
let transpiled_source = parsed_source let transpiled_source = parsed_source
.transpile(&self.transpile_and_emit_options.0, &options)? .transpile(
.into_source() &self.transpile_and_emit_options.0,
.into_string()?; &deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
},
&options,
)?
.into_source();
Ok(transpiled_source.text) Ok(transpiled_source.text)
} }
MediaType::JavaScript MediaType::JavaScript
@ -206,7 +226,7 @@ impl Emitter {
| MediaType::Dcts | MediaType::Dcts
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => { | MediaType::Unknown => {
// clear this specifier from the parsed source cache as it's now out of date // clear this specifier from the parsed source cache as it's now out of date
@ -219,10 +239,11 @@ impl Emitter {
/// A hashing function that takes the source code and uses the global emit /// A hashing function that takes the source code and uses the global emit
/// options then generates a string hash which can be stored to /// options then generates a string hash which can be stored to
/// determine if the cached emit is valid or not. /// determine if the cached emit is valid or not.
fn get_source_hash(&self, source_text: &str) -> u64 { fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 {
FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash
.write_str(source_text) .write_str(source_text)
.write_u64(self.transpile_and_emit_options_hash) .write_u64(self.transpile_and_emit_options_hash)
.write_hashable(module_kind)
.finish() .finish()
} }
} }
@ -239,9 +260,10 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn pre_emit_parsed_source( pub fn pre_emit_parsed_source(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> PreEmitResult { ) -> PreEmitResult {
let source_hash = self.0.get_source_hash(source); let source_hash = self.0.get_source_hash(module_kind, source);
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
@ -255,8 +277,9 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn transpile( pub fn transpile(
parsed_source_cache: &ParsedSourceCache, parsed_source_cache: &ParsedSourceCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
@ -265,8 +288,13 @@ impl<'a> EmitParsedSourceHelper<'a> {
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
.remove_or_parse_module(specifier, source, media_type)?; .remove_or_parse_module(specifier, source, media_type)?;
ensure_no_import_assertion(&parsed_source)?; ensure_no_import_assertion(&parsed_source)?;
let transpile_result = let transpile_result = parsed_source.transpile(
parsed_source.transpile(transpile_options, emit_options)?; transpile_options,
&TranspileModuleOptions {
module_kind: Some(module_kind),
},
emit_options,
)?;
let transpiled_source = match transpile_result { let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source, TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => { TranspileResult::Cloned(source) => {
@ -275,8 +303,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
} }
}; };
debug_assert!(transpiled_source.source_map.is_none()); debug_assert!(transpiled_source.source_map.is_none());
let text = String::from_utf8(transpiled_source.source)?; Ok(transpiled_source.text)
Ok(text)
} }
pub fn post_emit_parsed_source( pub fn post_emit_parsed_source(
@ -321,7 +348,7 @@ fn ensure_no_import_assertion(
deno_core::anyhow::anyhow!("{}", msg) deno_core::anyhow::anyhow!("{}", msg)
} }
let Some(module) = parsed_source.program_ref().as_module() else { let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {
return Ok(()); return Ok(());
}; };

View file

@ -38,6 +38,7 @@ fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
ModuleGraphError::ModuleError(err) => match err { ModuleGraphError::ModuleError(err) => match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError", ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic), ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. } ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError", | ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => { ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
@ -88,6 +89,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
} }
} }
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str { pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e) deno_runtime::errors::get_error_class_name(e)
.or_else(|| { .or_else(|| {
@ -106,5 +111,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
e.downcast_ref::<ResolutionError>() e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class) .map(get_resolution_error_class)
}) })
.or_else(|| {
e.downcast_ref::<std::num::TryFromIntError>()
.map(get_try_from_int_error_class)
})
.unwrap_or("Error") .unwrap_or("Error")
} }

View file

@ -11,10 +11,10 @@ use crate::args::StorageKeyResolver;
use crate::args::TsConfigType; use crate::args::TsConfigType;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::CodeCache; use crate::cache::CodeCache;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache; use crate::cache::HttpCache;
use crate::cache::LocalHttpCache; use crate::cache::LocalHttpCache;
@ -33,17 +33,22 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver; use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliResolver;
use crate::resolver::CliNodeResolver; use crate::resolver::CliResolverOptions;
use crate::resolver::CliSloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsCachedFs; use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter; use crate::standalone::DenoCompileBinaryWriter;
@ -51,6 +56,7 @@ use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector; use crate::tools::coverage::CoverageCollector;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
use crate::tools::run::hmr::HmrRunner; use crate::tools::run::hmr::HmrRunner;
use crate::tsc::TypeCheckingCjsTracker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
@ -59,15 +65,20 @@ use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use std::path::PathBuf; use std::path::PathBuf;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::FeatureChecker; use deno_core::FeatureChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
@ -77,6 +88,7 @@ use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser; use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use log::warn; use log::warn;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use std::future::Future; use std::future::Future;
use std::sync::Arc; use std::sync::Arc;
@ -118,7 +130,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider {
} }
} }
struct Deferred<T>(once_cell::unsync::OnceCell<T>); pub struct Deferred<T>(once_cell::unsync::OnceCell<T>);
impl<T> Default for Deferred<T> { impl<T> Default for Deferred<T> {
fn default() -> Self { fn default() -> Self {
@ -164,39 +176,42 @@ impl<T> Deferred<T> {
#[derive(Default)] #[derive(Default)]
struct CliFactoryServices { struct CliFactoryServices {
cli_options: Deferred<Arc<CliOptions>>, blob_store: Deferred<Arc<BlobStore>>,
caches: Deferred<Arc<Caches>>, caches: Deferred<Arc<Caches>>,
cjs_tracker: Deferred<Arc<CjsTracker>>,
cli_options: Deferred<Arc<CliOptions>>,
code_cache: Deferred<Arc<CodeCache>>,
deno_resolver: Deferred<Arc<CliDenoResolver>>,
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<FileFetcher>>, file_fetcher: Deferred<Arc<FileFetcher>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>, global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>, http_cache: Deferred<Arc<dyn HttpCache>>,
http_client_provider: Deferred<Arc<HttpClientProvider>>, http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<Arc<EmitCache>>, in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
emitter: Deferred<Arc<Emitter>>,
esm_or_cjs_checker: Deferred<Arc<EsmOrCjsChecker>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>, main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>, maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>, module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
module_graph_creator: Deferred<Arc<ModuleGraphCreator>>, module_graph_creator: Deferred<Arc<ModuleGraphCreator>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>, module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>, node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>, node_resolver: Deferred<Arc<NodeResolver>>,
npm_cache_dir: Deferred<Arc<NpmCacheDir>>,
npm_req_resolver: Deferred<Arc<CliNpmReqResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>, npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>, permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>,
resolver: Deferred<Arc<CliResolver>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
root_permissions_container: Deferred<PermissionsContainer>, root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>, sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>, text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>, type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
code_cache: Deferred<Arc<CodeCache>>,
workspace_resolver: Deferred<Arc<WorkspaceResolver>>, workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
} }
@ -300,12 +315,6 @@ impl CliFactory {
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
} }
pub fn esm_or_cjs_checker(&self) -> &Arc<EsmOrCjsChecker> {
self.services.esm_or_cjs_checker.get_or_init(|| {
Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone()))
})
}
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> { pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| { self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new( Ok(Arc::new(GlobalHttpCache::new(
@ -359,56 +368,112 @@ impl CliFactory {
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
} }
pub fn in_npm_pkg_checker(
&self,
) -> Result<&Arc<dyn InNpmPackageChecker>, AnyError> {
self.services.in_npm_pkg_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?;
let options = if cli_options.use_byonm() {
CreateInNpmPkgCheckerOptions::Byonm
} else {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.map(|p| p.as_path()),
},
)
};
Ok(create_in_npm_pkg_checker(options))
})
}
pub fn npm_cache_dir(&self) -> Result<&Arc<NpmCacheDir>, AnyError> {
self.services.npm_cache_dir.get_or_try_init(|| {
let fs = self.fs();
let global_path = self.deno_dir()?.npm_folder_path();
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
global_path,
cli_options.npmrc().get_all_known_registries_urls(),
)))
})
}
pub async fn npm_resolver( pub async fn npm_resolver(
&self, &self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
self self
.services .services
.npm_resolver .npm_resolver
.get_or_try_init_async(async { .get_or_try_init_async(
let fs = self.fs(); async {
let cli_options = self.cli_options()?; let fs = self.fs();
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. let cli_options = self.cli_options()?;
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { create_cli_npm_resolver(if cli_options.use_byonm() {
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { CliNpmResolverCreateOptions::Byonm(
fs: CliDenoResolverFs(fs.clone()), CliByonmNpmResolverCreateOptions {
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { fs: CliDenoResolverFs(fs.clone()),
Some(node_modules_path) => node_modules_path.to_path_buf(), pkg_json_resolver: self.pkg_json_resolver().clone(),
// path needs to be canonicalized for node resolution root_node_modules_dir: Some(
// (node_modules_dir_path above is already canonicalized) match cli_options.node_modules_dir_path() {
None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? Some(node_modules_path) => node_modules_path.to_path_buf(),
.join("node_modules"), // path needs to be canonicalized for node resolution
}), // (node_modules_dir_path above is already canonicalized)
}) None => canonicalize_path_maybe_not_exists(
} else { cli_options.initial_cwd(),
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { )?
snapshot: match cli_options.resolve_npm_resolution_snapshot()? { .join("node_modules"),
Some(snapshot) => { },
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) ),
}
None => match cli_options.maybe_lockfile() {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
}, },
}, )
maybe_lockfile: cli_options.maybe_lockfile().cloned(), } else {
fs: fs.clone(), CliNpmResolverCreateOptions::Managed(
http_client_provider: self.http_client_provider().clone(), CliManagedNpmResolverCreateOptions {
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
cache_setting: cli_options.cache_setting(), Some(snapshot) => {
text_only_progress_bar: self.text_only_progress_bar().clone(), CliNpmResolverManagedSnapshotOption::Specified(Some(
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), snapshot,
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())), ))
npm_system_info: cli_options.npm_system_info(), }
npmrc: cli_options.npmrc().clone(), None => match cli_options.maybe_lockfile() {
lifecycle_scripts: cli_options.lifecycle_scripts_config(), Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => {
CliNpmResolverManagedSnapshotOption::Specified(None)
}
},
},
maybe_lockfile: cli_options.maybe_lockfile().cloned(),
fs: fs.clone(),
http_client_provider: self.http_client_provider().clone(),
npm_cache_dir: self.npm_cache_dir()?.clone(),
cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.cloned(),
npm_install_deps_provider: Arc::new(
NpmInstallDepsProvider::from_workspace(
cli_options.workspace(),
),
),
npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
},
)
}) })
}).await .await
}.boxed_local()) }
.boxed_local(),
)
.await .await
} }
@ -463,28 +528,47 @@ impl CliFactory {
.await .await
} }
pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> { pub async fn deno_resolver(&self) -> Result<&Arc<CliDenoResolver>, AnyError> {
self
.services
.deno_resolver
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliDenoResolver::new(DenoResolverOptions {
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_and_req_resolver: if cli_options.no_npm() {
None
} else {
Some(NodeAndNpmReqResolver {
node_resolver: self.node_resolver().await?.clone(),
npm_req_resolver: self.npm_req_resolver().await?.clone(),
})
},
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
workspace_resolver: self.workspace_resolver().await?.clone(),
is_byonm: cli_options.use_byonm(),
maybe_vendor_dir: cli_options.vendor_dir_path(),
})))
})
.await
}
pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> {
self self
.services .services
.resolver .resolver
.get_or_try_init_async( .get_or_try_init_async(
async { async {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions { Ok(Arc::new(CliResolver::new(CliResolverOptions {
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
node_resolver: Some(self.cli_node_resolver().await?.clone()),
npm_resolver: if cli_options.no_npm() { npm_resolver: if cli_options.no_npm() {
None None
} else { } else {
Some(self.npm_resolver().await?.clone()) Some(self.npm_resolver().await?.clone())
}, },
workspace_resolver: self.workspace_resolver().await?.clone(),
bare_node_builtins_enabled: cli_options bare_node_builtins_enabled: cli_options
.unstable_bare_node_builtins(), .unstable_bare_node_builtins(),
maybe_jsx_import_source_config: cli_options deno_resolver: self.deno_resolver().await?.clone(),
.workspace()
.to_maybe_jsx_import_source_config()?,
maybe_vendor_dir: cli_options.vendor_dir_path(),
}))) })))
} }
.boxed_local(), .boxed_local(),
@ -513,6 +597,7 @@ impl CliFactory {
self.services.module_info_cache.get_or_try_init(|| { self.services.module_info_cache.get_or_try_init(|| {
Ok(Arc::new(ModuleInfoCache::new( Ok(Arc::new(ModuleInfoCache::new(
self.caches()?.dep_analysis_db(), self.caches()?.dep_analysis_db(),
self.parsed_source_cache().clone(),
))) )))
}) })
} }
@ -541,6 +626,7 @@ impl CliFactory {
ts_config_result.ts_config, ts_config_result.ts_config,
)?; )?;
Ok(Arc::new(Emitter::new( Ok(Arc::new(Emitter::new(
self.cjs_tracker()?.clone(),
self.emit_cache()?.clone(), self.emit_cache()?.clone(),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
transpile_options, transpile_options,
@ -564,7 +650,13 @@ impl CliFactory {
async { async {
Ok(Arc::new(NodeResolver::new( Ok(Arc::new(NodeResolver::new(
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.npm_resolver().await?.clone().into_npm_resolver(), self.in_npm_pkg_checker()?.clone(),
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
))) )))
} }
.boxed_local(), .boxed_local(),
@ -582,24 +674,57 @@ impl CliFactory {
let caches = self.caches()?; let caches = self.caches()?;
let node_analysis_cache = let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db()); NodeAnalysisCache::new(caches.node_analysis_db());
let node_resolver = self.cli_node_resolver().await?.clone(); let node_resolver = self.node_resolver().await?.clone();
let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache, node_analysis_cache,
self.cjs_tracker()?.clone(),
self.fs().clone(), self.fs().clone(),
node_resolver,
Some(self.parsed_source_cache().clone()), Some(self.parsed_source_cache().clone()),
); );
Ok(Arc::new(NodeCodeTranslator::new( Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer, cjs_esm_analyzer,
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.node_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
self.npm_resolver().await?.clone().into_npm_resolver(), node_resolver,
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
))) )))
}) })
.await .await
} }
pub async fn npm_req_resolver(
&self,
) -> Result<&Arc<CliNpmReqResolver>, AnyError> {
self
.services
.npm_req_resolver
.get_or_try_init_async(async {
let npm_resolver = self.npm_resolver().await?;
Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
fs: CliDenoResolverFs(self.fs().clone()),
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_resolver: self.node_resolver().await?.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
})))
})
.await
}
pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
self.services.pkg_json_resolver.get_or_init(|| {
Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new(
self.fs().clone(),
)))
})
}
pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> { pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> {
self self
.services .services
@ -608,6 +733,10 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(TypeChecker::new( Ok(Arc::new(TypeChecker::new(
self.caches()?.clone(), self.caches()?.clone(),
Arc::new(TypeCheckingCjsTracker::new(
self.cjs_tracker()?.clone(),
self.module_info_cache()?.clone(),
)),
cli_options.clone(), cli_options.clone(),
self.module_graph_builder().await?.clone(), self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(), self.node_resolver().await?.clone(),
@ -626,19 +755,19 @@ impl CliFactory {
.get_or_try_init_async(async { .get_or_try_init_async(async {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphBuilder::new( Ok(Arc::new(ModuleGraphBuilder::new(
cli_options.clone(),
self.caches()?.clone(), self.caches()?.clone(),
self.esm_or_cjs_checker().clone(), self.cjs_tracker()?.clone(),
cli_options.clone(),
self.file_fetcher()?.clone(),
self.fs().clone(), self.fs().clone(),
self.resolver().await?.clone(), self.global_http_cache()?.clone(),
self.cli_node_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
self.npm_resolver().await?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(), self.maybe_file_watcher_reporter().clone(),
self.file_fetcher()?.clone(), self.module_info_cache()?.clone(),
self.global_http_cache()?.clone(), self.npm_resolver().await?.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.root_permissions_container()?.clone(), self.root_permissions_container()?.clone(),
))) )))
}) })
@ -710,25 +839,18 @@ impl CliFactory {
.await .await
} }
pub fn cjs_resolutions(&self) -> &Arc<CjsResolutionStore> { pub fn cjs_tracker(&self) -> Result<&Arc<CjsTracker>, AnyError> {
self.services.cjs_resolutions.get_or_init(Default::default) self.services.cjs_tracker.get_or_try_init(|| {
} let options = self.cli_options()?;
Ok(Arc::new(CjsTracker::new(
pub async fn cli_node_resolver( self.in_npm_pkg_checker()?.clone(),
&self, self.pkg_json_resolver().clone(),
) -> Result<&Arc<CliNodeResolver>, AnyError> { IsCjsResolverOptions {
self detect_cjs: options.detect_cjs(),
.services is_node_main: options.is_node_main(),
.cli_node_resolver },
.get_or_try_init_async(async { )))
Ok(Arc::new(CliNodeResolver::new( })
self.cjs_resolutions().clone(),
self.fs().clone(),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
)))
})
.await
} }
pub fn permission_desc_parser( pub fn permission_desc_parser(
@ -761,6 +883,8 @@ impl CliFactory {
) -> Result<DenoCompileBinaryWriter, AnyError> { ) -> Result<DenoCompileBinaryWriter, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new( Ok(DenoCompileBinaryWriter::new(
self.cjs_tracker()?,
self.cli_options()?,
self.deno_dir()?, self.deno_dir()?,
self.emitter()?, self.emitter()?,
self.file_fetcher()?, self.file_fetcher()?,
@ -791,58 +915,67 @@ impl CliFactory {
&self, &self,
) -> Result<CliMainWorkerFactory, AnyError> { ) -> Result<CliMainWorkerFactory, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let fs = self.fs();
let node_resolver = self.node_resolver().await?; let node_resolver = self.node_resolver().await?;
let npm_resolver = self.npm_resolver().await?; let npm_resolver = self.npm_resolver().await?;
let fs = self.fs();
let cli_node_resolver = self.cli_node_resolver().await?;
let cli_npm_resolver = self.npm_resolver().await?.clone(); let cli_npm_resolver = self.npm_resolver().await?.clone();
let in_npm_pkg_checker = self.in_npm_pkg_checker()?;
let maybe_file_watcher_communicator = if cli_options.has_hmr() { let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap()) Some(self.watcher_communicator.clone().unwrap())
} else { } else {
None None
}; };
let node_code_translator = self.node_code_translator().await?;
let cjs_tracker = self.cjs_tracker()?.clone();
let pkg_json_resolver = self.pkg_json_resolver().clone();
let npm_req_resolver = self.npm_req_resolver().await?;
Ok(CliMainWorkerFactory::new( Ok(CliMainWorkerFactory::new(
self.blob_store().clone(), self.blob_store().clone(),
self.cjs_resolutions().clone(),
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
} else { } else {
None None
}, },
self.feature_checker()?.clone(), self.feature_checker()?.clone(),
self.fs().clone(), fs.clone(),
maybe_file_watcher_communicator, maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(), self.maybe_inspector_server()?.clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
Box::new(CliModuleLoaderFactory::new( Box::new(CliModuleLoaderFactory::new(
cli_options, cli_options,
cjs_tracker,
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
} else { } else {
None None
}, },
self.emitter()?.clone(), self.emitter()?.clone(),
fs.clone(),
in_npm_pkg_checker.clone(),
self.main_module_graph_container().await?.clone(), self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
cli_node_resolver.clone(), node_code_translator.clone(),
node_resolver.clone(),
npm_req_resolver.clone(),
cli_npm_resolver.clone(), cli_npm_resolver.clone(),
NpmModuleLoader::new( NpmModuleLoader::new(
self.cjs_resolutions().clone(), self.cjs_tracker()?.clone(),
self.node_code_translator().await?.clone(),
fs.clone(), fs.clone(),
cli_node_resolver.clone(), node_code_translator.clone(),
), ),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
self.resolver().await?.clone(), self.resolver().await?.clone(),
)), )),
node_resolver.clone(), node_resolver.clone(),
npm_resolver.clone(), npm_resolver.clone(),
pkg_json_resolver,
self.root_cert_store_provider().clone(), self.root_cert_store_provider().clone(),
self.root_permissions_container()?.clone(), self.root_permissions_container()?.clone(),
StorageKeyResolver::from_options(cli_options), StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(), cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?, self.create_cli_main_worker_options()?,
self.cli_options()?.otel_config(),
)) ))
} }
@ -891,7 +1024,6 @@ impl CliFactory {
inspect_wait: cli_options.inspect_wait().is_some(), inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(), strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(), is_inspecting: cli_options.is_inspecting(),
is_npm_main: cli_options.is_npm_main(),
location: cli_options.location_flag().clone(), location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0] // if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno // to be the name of the binary command instead of deno
@ -909,7 +1041,6 @@ impl CliFactory {
node_ipc: cli_options.node_ipc_fd(), node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(), serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(), serve_host: cli_options.serve_host(),
unstable_detect_cjs: cli_options.unstable_detect_cjs(),
}) })
} }
} }

View file

@ -164,8 +164,19 @@ fn get_validated_scheme(
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
let scheme = specifier.scheme(); let scheme = specifier.scheme();
if !SUPPORTED_SCHEMES.contains(&scheme) { if !SUPPORTED_SCHEMES.contains(&scheme) {
// NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled
// before `file_fetcher.rs` APIs are even hit.
let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec();
all_supported_schemes.extend_from_slice(&["npm", "jsr"]);
all_supported_schemes.sort();
let scheme_list = all_supported_schemes
.iter()
.map(|scheme| format!(" - \"{}\"", scheme))
.collect::<Vec<_>>()
.join("\n");
Err(generic_error(format!( Err(generic_error(format!(
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}" "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}",
scheme_list
))) )))
} else { } else {
Ok(scheme.to_string()) Ok(scheme.to_string())

View file

@ -6,7 +6,6 @@ use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache; use crate::cache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache; use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
@ -14,17 +13,19 @@ use crate::colors;
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CjsTracker;
use crate::resolver::CliNodeResolver; use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs; use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::JsrPackageConfig; use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum; use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionKind;
use deno_graph::FillFromLockfileOptions; use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError; use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError; use deno_graph::ModuleLoadError;
@ -43,13 +44,14 @@ use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use import_map::ImportMapError; use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error; use std::error::Error;
use std::ops::Deref; use std::ops::Deref;
@ -186,7 +188,7 @@ pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
fn exit_for_integrity_error(err: &ModuleError) { fn exit_for_integrity_error(err: &ModuleError) {
if let Some(err_message) = enhanced_integrity_error_message(err) { if let Some(err_message) = enhanced_integrity_error_message(err) {
log::error!("{} {}", colors::red("error:"), err_message); log::error!("{} {}", colors::red("error:"), err_message);
std::process::exit(10); deno_runtime::exit(10);
} }
} }
@ -379,54 +381,54 @@ pub struct BuildFastCheckGraphOptions<'a> {
} }
pub struct ModuleGraphBuilder { pub struct ModuleGraphBuilder {
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>, cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>, module_info_cache: Arc<ModuleInfoCache>,
global_http_cache: Arc<GlobalHttpCache>, npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
} }
impl ModuleGraphBuilder { impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>, cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>, module_info_cache: Arc<ModuleInfoCache>,
global_http_cache: Arc<GlobalHttpCache>, npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
options,
caches, caches,
esm_or_cjs_checker, cjs_tracker,
cli_options,
file_fetcher,
fs, fs,
resolver, global_http_cache,
node_resolver, in_npm_pkg_checker,
npm_resolver,
module_info_cache,
parsed_source_cache,
lockfile, lockfile,
maybe_file_watcher_reporter, maybe_file_watcher_reporter,
file_fetcher, module_info_cache,
global_http_cache, npm_resolver,
parsed_source_cache,
resolver,
root_permissions_container, root_permissions_container,
} }
} }
@ -512,19 +514,17 @@ impl ModuleGraphBuilder {
} }
let maybe_imports = if options.graph_kind.include_types() { let maybe_imports = if options.graph_kind.include_types() {
self.options.to_compiler_option_types()? self.cli_options.to_compiler_option_types()?
} else { } else {
Vec::new() Vec::new()
}; };
let analyzer = self let analyzer = self.module_info_cache.as_module_analyzer();
.module_info_cache
.as_module_analyzer(&self.parsed_source_cache);
let mut loader = match options.loader { let mut loader = match options.loader {
Some(loader) => MutLoaderRef::Borrowed(loader), Some(loader) => MutLoaderRef::Borrowed(loader),
None => MutLoaderRef::Owned(self.create_graph_loader()), None => MutLoaderRef::Owned(self.create_graph_loader()),
}; };
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver(); let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_file_watcher_reporter = self let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter .maybe_file_watcher_reporter
@ -549,7 +549,7 @@ impl ModuleGraphBuilder {
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(&graph_npm_resolver),
module_analyzer: &analyzer, module_analyzer: &analyzer,
reporter: maybe_file_watcher_reporter, reporter: maybe_file_watcher_reporter,
resolver: Some(graph_resolver), resolver: Some(&graph_resolver),
locker: locker.as_mut().map(|l| l as _), locker: locker.as_mut().map(|l| l as _),
}, },
) )
@ -566,7 +566,7 @@ impl ModuleGraphBuilder {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
if self if self
.options .cli_options
.node_modules_dir()? .node_modules_dir()?
.map(|m| m.uses_node_modules_dir()) .map(|m| m.uses_node_modules_dir())
.unwrap_or(false) .unwrap_or(false)
@ -672,16 +672,16 @@ impl ModuleGraphBuilder {
}; };
let parser = self.parsed_source_cache.as_capturing_parser(); let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver; let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver(); let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
graph.build_fast_check_type_graph( graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions { deno_graph::BuildFastCheckTypeGraphOptions {
jsr_url_provider: &CliJsrUrlProvider, es_parser: Some(&parser),
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
fast_check_dts: false, fast_check_dts: false,
module_parser: Some(&parser), jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(graph_resolver), resolver: Some(&graph_resolver),
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(&graph_npm_resolver),
workspace_fast_check: options.workspace_fast_check, workspace_fast_check: options.workspace_fast_check,
}, },
@ -699,20 +699,18 @@ impl ModuleGraphBuilder {
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> cache::FetchCacher { ) -> cache::FetchCacher {
cache::FetchCacher::new( cache::FetchCacher::new(
self.esm_or_cjs_checker.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
self.fs.clone(),
self.global_http_cache.clone(), self.global_http_cache.clone(),
self.node_resolver.clone(), self.in_npm_pkg_checker.clone(),
self.npm_resolver.clone(),
self.module_info_cache.clone(), self.module_info_cache.clone(),
cache::FetchCacherOptions { cache::FetchCacherOptions {
file_header_overrides: self.options.resolve_file_header_overrides(), file_header_overrides: self.cli_options.resolve_file_header_overrides(),
permissions, permissions,
is_deno_publish: matches!( is_deno_publish: matches!(
self.options.sub_command(), self.cli_options.sub_command(),
crate::args::DenoSubcommand::Publish { .. } crate::args::DenoSubcommand::Publish { .. }
), ),
unstable_detect_cjs: self.options.unstable_detect_cjs(),
}, },
) )
} }
@ -737,16 +735,28 @@ impl ModuleGraphBuilder {
&self.fs, &self.fs,
roots, roots,
GraphValidOptions { GraphValidOptions {
kind: if self.options.type_check_mode().is_true() { kind: if self.cli_options.type_check_mode().is_true() {
GraphKind::All GraphKind::All
} else { } else {
GraphKind::CodeOnly GraphKind::CodeOnly
}, },
check_js: self.options.check_js(), check_js: self.cli_options.check_js(),
exit_integrity_errors: true, exit_integrity_errors: true,
}, },
) )
} }
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
let jsx_import_source_config = self
.cli_options
.workspace()
.to_maybe_jsx_import_source_config()?;
Ok(CliGraphResolver {
cjs_tracker: &self.cjs_tracker,
resolver: &self.resolver,
jsx_import_source_config,
})
}
} }
/// Adds more explanatory information to a resolution error. /// Adds more explanatory information to a resolution error.
@ -785,7 +795,7 @@ fn enhanced_sloppy_imports_error_message(
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => { | ModuleError::Missing(specifier, _) => {
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone())) let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
.resolve(specifier, SloppyImportsResolutionMode::Execution)? .resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message(); .as_suggestion_message();
Some(format!( Some(format!(
"{} {} or run with --unstable-sloppy-imports", "{} {} or run with --unstable-sloppy-imports",
@ -1009,7 +1019,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
) { ) {
let mut file_paths = self.file_paths.lock(); let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap()); // Don't trust that the path is a valid path at this point:
// https://github.com/denoland/deno/issues/26209.
if let Ok(file_path) = specifier.to_file_path() {
file_paths.push(file_path);
}
} }
if modules_done == modules_total { if modules_done == modules_total {
@ -1086,12 +1100,12 @@ impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> {
} }
} }
pub fn format_range_with_colors(range: &deno_graph::Range) -> String { pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
format!( format!(
"{}:{}:{}", "{}:{}:{}",
colors::cyan(range.specifier.as_str()), colors::cyan(referrer.specifier.as_str()),
colors::yellow(&(range.start.line + 1).to_string()), colors::yellow(&(referrer.range.start.line + 1).to_string()),
colors::yellow(&(range.start.character + 1).to_string()) colors::yellow(&(referrer.range.start.character + 1).to_string())
) )
} }
@ -1147,13 +1161,88 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
message message
} }
#[derive(Debug)]
struct CliGraphResolver<'a> {
cjs_tracker: &'a CjsTracker,
resolver: &'a CliResolver,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.as_ref()
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
resolution_kind: ResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
self.resolver.resolve(
raw_specifier,
&referrer_range.specifier,
referrer_range.range.start,
referrer_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or_else(|| {
self
.cjs_tracker
.get_referrer_kind(&referrer_range.specifier)
}),
to_node_resolution_kind(resolution_kind),
)
}
}
pub fn to_node_resolution_kind(
kind: ResolutionKind,
) -> node_resolver::NodeResolutionKind {
match kind {
ResolutionKind::Execution => node_resolver::NodeResolutionKind::Execution,
ResolutionKind::Types => node_resolver::NodeResolutionKind::Types,
}
}
pub fn to_node_resolution_mode(
mode: deno_graph::source::ResolutionMode,
) -> node_resolver::ResolutionMode {
match mode {
deno_graph::source::ResolutionMode::Import => {
node_resolver::ResolutionMode::Import
}
deno_graph::source::ResolutionMode::Require => {
node_resolver::ResolutionMode::Require
}
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::Position; use deno_graph::PositionRange;
use deno_graph::Range; use deno_graph::Range;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
@ -1174,8 +1263,8 @@ mod test {
specifier: input.to_string(), specifier: input.to_string(),
range: Range { range: Range {
specifier, specifier,
start: Position::zeroed(), resolution_mode: None,
end: Position::zeroed(), range: PositionRange::zeroed(),
}, },
}; };
assert_eq!(get_resolution_error_bare_node_specifier(&err), output); assert_eq!(get_resolution_error_bare_node_specifier(&err), output);
@ -1190,8 +1279,8 @@ mod test {
let err = ResolutionError::InvalidSpecifier { let err = ResolutionError::InvalidSpecifier {
range: Range { range: Range {
specifier, specifier,
start: Position::zeroed(), resolution_mode: None,
end: Position::zeroed(), range: PositionRange::zeroed(),
}, },
error: SpecifierError::ImportPrefixMissing { error: SpecifierError::ImportPrefixMissing {
specifier: input.to_string(), specifier: input.to_string(),

View file

@ -177,6 +177,52 @@ function isCanvasLike(obj) {
return obj !== null && typeof obj === "object" && "toDataURL" in obj; return obj !== null && typeof obj === "object" && "toDataURL" in obj;
} }
function isJpg(obj) {
// Check if obj is a Uint8Array
if (!(obj instanceof Uint8Array)) {
return false;
}
// JPG files start with the magic bytes FF D8
if (obj.length < 2 || obj[0] !== 0xFF || obj[1] !== 0xD8) {
return false;
}
// JPG files end with the magic bytes FF D9
if (
obj.length < 2 || obj[obj.length - 2] !== 0xFF ||
obj[obj.length - 1] !== 0xD9
) {
return false;
}
return true;
}
function isPng(obj) {
// Check if obj is a Uint8Array
if (!(obj instanceof Uint8Array)) {
return false;
}
// PNG files start with a specific 8-byte signature
const pngSignature = [137, 80, 78, 71, 13, 10, 26, 10];
// Check if the array is at least as long as the signature
if (obj.length < pngSignature.length) {
return false;
}
// Check each byte of the signature
for (let i = 0; i < pngSignature.length; i++) {
if (obj[i] !== pngSignature[i]) {
return false;
}
}
return true;
}
/** Possible HTML and SVG Elements */ /** Possible HTML and SVG Elements */
function isSVGElementLike(obj) { function isSVGElementLike(obj) {
return obj !== null && typeof obj === "object" && "outerHTML" in obj && return obj !== null && typeof obj === "object" && "outerHTML" in obj &&
@ -233,6 +279,16 @@ async function format(obj) {
if (isDataFrameLike(obj)) { if (isDataFrameLike(obj)) {
return extractDataFrame(obj); return extractDataFrame(obj);
} }
if (isJpg(obj)) {
return {
"image/jpeg": core.ops.op_base64_encode(obj),
};
}
if (isPng(obj)) {
return {
"image/png": core.ops.op_base64_encode(obj),
};
}
if (isSVGElementLike(obj)) { if (isSVGElementLike(obj)) {
return { return {
"image/svg+xml": obj.outerHTML, "image/svg+xml": obj.outerHTML,
@ -314,6 +370,28 @@ const html = createTaggedTemplateDisplayable("text/html");
*/ */
const svg = createTaggedTemplateDisplayable("image/svg+xml"); const svg = createTaggedTemplateDisplayable("image/svg+xml");
function image(obj) {
if (typeof obj === "string") {
try {
obj = Deno.readFileSync(obj);
} catch {
// pass
}
}
if (isJpg(obj)) {
return makeDisplayable({ "image/jpeg": core.ops.op_base64_encode(obj) });
}
if (isPng(obj)) {
return makeDisplayable({ "image/png": core.ops.op_base64_encode(obj) });
}
throw new TypeError(
"Object is not a valid image or a path to an image. `Deno.jupyter.image` supports displaying JPG or PNG images.",
);
}
function isMediaBundle(obj) { function isMediaBundle(obj) {
if (obj == null || typeof obj !== "object" || Array.isArray(obj)) { if (obj == null || typeof obj !== "object" || Array.isArray(obj)) {
return false; return false;
@ -465,6 +543,7 @@ function enableJupyter() {
md, md,
html, html,
svg, svg,
image,
$display, $display,
}; };
} }

View file

@ -10,8 +10,10 @@ use super::tsc;
use super::urls::url_to_uri; use super::urls::url_to_uri;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::lsp::logging::lsp_warn;
use crate::lsp::search::PackageSearchApi; use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
use deno_lint::diagnostic::LintDiagnosticRange; use deno_lint::diagnostic::LintDiagnosticRange;
@ -36,7 +38,8 @@ use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference; use deno_semver::package::PackageReqReference;
use deno_semver::Version; use deno_semver::Version;
use import_map::ImportMap; use import_map::ImportMap;
use node_resolver::NpmResolver; use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::borrow::Cow; use std::borrow::Cow;
@ -229,6 +232,7 @@ pub struct TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: ModuleSpecifier, file_referrer: ModuleSpecifier,
} }
@ -237,12 +241,14 @@ impl<'a> TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: &ModuleSpecifier, file_referrer: &ModuleSpecifier,
) -> Self { ) -> Self {
Self { Self {
documents, documents,
maybe_import_map, maybe_import_map,
resolver, resolver,
tsc_specifier_map,
file_referrer: file_referrer.clone(), file_referrer: file_referrer.clone(),
} }
} }
@ -336,7 +342,11 @@ impl<'a> TsResponseImportMapper<'a> {
.resolver .resolver
.maybe_managed_npm_resolver(Some(&self.file_referrer)) .maybe_managed_npm_resolver(Some(&self.file_referrer))
{ {
if npm_resolver.in_npm_package(specifier) { let in_npm_pkg = self
.resolver
.in_npm_pkg_checker(Some(&self.file_referrer))
.in_npm_package(specifier);
if in_npm_pkg {
if let Ok(Some(pkg_id)) = if let Ok(Some(pkg_id)) =
npm_resolver.resolve_pkg_id_from_specifier(specifier) npm_resolver.resolve_pkg_id_from_specifier(specifier)
{ {
@ -383,6 +393,11 @@ impl<'a> TsResponseImportMapper<'a> {
} }
} }
} }
} else if let Some(dep_name) = self
.resolver
.file_url_to_package_json_dep(specifier, Some(&self.file_referrer))
{
return Some(dep_name);
} }
// check if the import map has this specifier // check if the import map has this specifier
@ -452,20 +467,36 @@ impl<'a> TsResponseImportMapper<'a> {
&self, &self,
specifier: &str, specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
) -> Option<String> { ) -> Option<String> {
if let Ok(specifier) = referrer.join(specifier) { let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
if let Some(specifier) = self.check_specifier(&specifier, referrer) { let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
return Some(specifier); SUPPORTED_EXTENSIONS
} .iter()
} .map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
let specifier = specifier.strip_suffix(".js").unwrap_or(specifier); );
for ext in SUPPORTED_EXTENSIONS { for specifier in specifiers {
let specifier_with_ext = format!("{specifier}{ext}"); if let Some(specifier) = self
if self .resolver
.documents .as_cli_resolver(Some(&self.file_referrer))
.contains_import(&specifier_with_ext, referrer) .resolve(
&specifier,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
.filter(|s| self.documents.exists(s, Some(&self.file_referrer)))
{ {
return Some(specifier_with_ext); if let Some(specifier) = self
.check_specifier(&specifier, referrer)
.or_else(|| relative_specifier(referrer, &specifier))
.filter(|s| !s.contains("/node_modules/"))
{
return Some(specifier);
}
} }
} }
None None
@ -475,18 +506,17 @@ impl<'a> TsResponseImportMapper<'a> {
&self, &self,
specifier_text: &str, specifier_text: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
) -> bool { ) -> bool {
self self
.resolver .resolver
.as_graph_resolver(Some(&self.file_referrer)) .as_cli_resolver(Some(&self.file_referrer))
.resolve( .resolve(
specifier_text, specifier_text,
&deno_graph::Range { referrer,
specifier: referrer.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), resolution_mode,
end: deno_graph::Position::zeroed(), NodeResolutionKind::Types,
},
deno_graph::source::ResolutionMode::Types,
) )
.is_ok() .is_ok()
} }
@ -554,9 +584,11 @@ fn try_reverse_map_package_json_exports(
/// like an import and rewrite the import specifier to include the extension /// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes( pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
changes: &[tsc::FileTextChanges], changes: &[tsc::FileTextChanges],
import_mapper: &TsResponseImportMapper, language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> { ) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
let mut r = Vec::new(); let mut r = Vec::new();
for change in changes { for change in changes {
let mut text_changes = Vec::new(); let mut text_changes = Vec::new();
@ -569,8 +601,8 @@ pub fn fix_ts_import_changes(
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) { if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
let specifier = let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) = if let Some(new_specifier) = import_mapper
import_mapper.check_unresolved_specifier(specifier, referrer) .check_unresolved_specifier(specifier, referrer, resolution_mode)
{ {
line.replace(specifier, &new_specifier) line.replace(specifier, &new_specifier)
} else { } else {
@ -600,8 +632,9 @@ pub fn fix_ts_import_changes(
/// resolution by Deno (includes the extension). /// resolution by Deno (includes the extension).
fn fix_ts_import_action<'a>( fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
action: &'a tsc::CodeFixAction, action: &'a tsc::CodeFixAction,
import_mapper: &TsResponseImportMapper, language_server: &language_server::Inner,
) -> Option<Cow<'a, tsc::CodeFixAction>> { ) -> Option<Cow<'a, tsc::CodeFixAction>> {
if !matches!( if !matches!(
action.fix_name.as_str(), action.fix_name.as_str(),
@ -617,9 +650,12 @@ fn fix_ts_import_action<'a>(
let Some(specifier) = specifier else { let Some(specifier) = specifier else {
return Some(Cow::Borrowed(action)); return Some(Cow::Borrowed(action));
}; };
if let Some(new_specifier) = let import_mapper = language_server.get_ts_response_import_mapper(referrer);
import_mapper.check_unresolved_specifier(specifier, referrer) if let Some(new_specifier) = import_mapper.check_unresolved_specifier(
{ specifier,
referrer,
resolution_mode,
) {
let description = action.description.replace(specifier, &new_specifier); let description = action.description.replace(specifier, &new_specifier);
let changes = action let changes = action
.changes .changes
@ -649,7 +685,8 @@ fn fix_ts_import_action<'a>(
fix_id: None, fix_id: None,
fix_all_description: None, fix_all_description: None,
})) }))
} else if !import_mapper.is_valid_import(specifier, referrer) { } else if !import_mapper.is_valid_import(specifier, referrer, resolution_mode)
{
None None
} else { } else {
Some(Cow::Borrowed(action)) Some(Cow::Borrowed(action))
@ -714,8 +751,14 @@ pub fn ts_changes_to_edit(
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> { ) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
let mut text_document_edits = Vec::new(); let mut text_document_edits = Vec::new();
for change in changes { for change in changes {
let text_document_edit = change.to_text_document_edit(language_server)?; let edit = match change.to_text_document_edit(language_server) {
text_document_edits.push(text_document_edit); Ok(e) => e,
Err(err) => {
lsp_warn!("Couldn't covert text document edit: {:#}", err);
continue;
}
};
text_document_edits.push(edit);
} }
Ok(Some(lsp::WorkspaceEdit { Ok(Some(lsp::WorkspaceEdit {
changes: None, changes: None,
@ -724,7 +767,7 @@ pub fn ts_changes_to_edit(
})) }))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CodeActionData { pub struct CodeActionData {
pub specifier: ModuleSpecifier, pub specifier: ModuleSpecifier,
@ -977,6 +1020,7 @@ impl CodeActionCollection {
pub fn add_ts_fix_action( pub fn add_ts_fix_action(
&mut self, &mut self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
resolution_mode: ResolutionMode,
action: &tsc::CodeFixAction, action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic, diagnostic: &lsp::Diagnostic,
language_server: &language_server::Inner, language_server: &language_server::Inner,
@ -994,11 +1038,9 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.", "The action returned from TypeScript is unsupported.",
)); ));
} }
let Some(action) = fix_ts_import_action( let Some(action) =
specifier, fix_ts_import_action(specifier, resolution_mode, action, language_server)
action, else {
&language_server.get_ts_response_import_mapper(specifier),
) else {
return Ok(()); return Ok(());
}; };
let edit = ts_changes_to_edit(&action.changes, language_server)?; let edit = ts_changes_to_edit(&action.changes, language_server)?;
@ -1047,10 +1089,12 @@ impl CodeActionCollection {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic, diagnostic: &lsp::Diagnostic,
) { ) {
let data = Some(json!({ let data = action.fix_id.as_ref().map(|fix_id| {
"specifier": specifier, json!(CodeActionData {
"fixId": action.fix_id, specifier: specifier.clone(),
})); fix_id: fix_id.clone(),
})
});
let title = if let Some(description) = &action.fix_all_description { let title = if let Some(description) = &action.fix_all_description {
description.clone() description.clone()
} else { } else {
@ -1190,23 +1234,20 @@ impl CodeActionCollection {
let text_info = parsed_source.text_info_lazy(); let text_info = parsed_source.text_info_lazy();
let specifier_range = SourceRange::new( let specifier_range = SourceRange::new(
text_info.loc_to_source_pos(LineAndColumnIndex { text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.start.line, line_index: import.specifier_range.range.start.line,
column_index: import.specifier_range.start.character, column_index: import.specifier_range.range.start.character,
}), }),
text_info.loc_to_source_pos(LineAndColumnIndex { text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.end.line, line_index: import.specifier_range.range.end.line,
column_index: import.specifier_range.end.character, column_index: import.specifier_range.range.end.character,
}), }),
); );
match parsed_source.program_ref() { parsed_source
deno_ast::swc::ast::Program::Module(module) => module .program_ref()
.body .body()
.iter() .find(|i| i.range().contains(&specifier_range))
.find(|i| i.range().contains(&specifier_range)) .map(|i| text_info.line_and_column_index(i.range().start))
.map(|i| text_info.line_and_column_index(i.range().start)),
deno_ast::swc::ast::Program::Script(_) => None,
}
} }
async fn deno_types_for_npm_action( async fn deno_types_for_npm_action(
@ -1233,13 +1274,16 @@ impl CodeActionCollection {
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") { if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
return None; return None;
} }
if !i.specifier_range.includes(&position) { if !i.specifier_range.includes(position) {
return None; return None;
} }
import_start_from_specifier(document, i) import_start_from_specifier(document, i)
})?; })?;
let referrer = document.specifier(); let referrer = document.specifier();
let referrer_kind = language_server
.is_cjs_resolver
.get_doc_resolution_mode(document);
let file_referrer = document.file_referrer(); let file_referrer = document.file_referrer();
let config_data = language_server let config_data = language_server
.config .config
@ -1262,10 +1306,11 @@ impl CodeActionCollection {
if !config_data.byonm { if !config_data.byonm {
return None; return None;
} }
if !language_server if !language_server.resolver.is_bare_package_json_dep(
.resolver &dep_key,
.is_bare_package_json_dep(&dep_key, referrer) referrer,
{ referrer_kind,
) {
return None; return None;
} }
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()? NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
@ -1284,7 +1329,7 @@ impl CodeActionCollection {
} }
if language_server if language_server
.resolver .resolver
.npm_to_file_url(&npm_ref, document.specifier(), file_referrer) .npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
.is_some() .is_some()
{ {
// The package import has types. // The package import has types.

View file

@ -421,7 +421,7 @@ pub fn collect_test(
) -> Result<Vec<lsp::CodeLens>, AnyError> { ) -> Result<Vec<lsp::CodeLens>, AnyError> {
let mut collector = let mut collector =
DenoTestCollector::new(specifier.clone(), parsed_source.clone()); DenoTestCollector::new(specifier.clone(), parsed_source.clone());
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Ok(collector.take()) Ok(collector.take())
} }
@ -581,7 +581,7 @@ mod tests {
.unwrap(); .unwrap();
let mut collector = let mut collector =
DenoTestCollector::new(specifier, parsed_module.clone()); DenoTestCollector::new(specifier, parsed_module.clone());
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
assert_eq!( assert_eq!(
collector.take(), collector.take(),
vec![ vec![

View file

@ -9,15 +9,15 @@ use super::jsr::CliJsrSearchApi;
use super::lsp_custom; use super::lsp_custom;
use super::npm::CliNpmSearchApi; use super::npm::CliNpmSearchApi;
use super::registries::ModuleRegistry; use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::search::PackageSearchApi; use super::search::PackageSearchApi;
use super::tsc; use super::tsc;
use crate::graph_util::to_node_resolution_mode;
use crate::jsr::JsrFetchResolver; use crate::jsr::JsrFetchResolver;
use crate::util::path::is_importable_ext; use crate::util::path::is_importable_ext;
use crate::util::path::relative_specifier; use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Range;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES; use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_ast::LineAndColumnIndex; use deno_ast::LineAndColumnIndex;
@ -35,6 +35,8 @@ use deno_semver::package::PackageNv;
use import_map::ImportMap; use import_map::ImportMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp_types::CompletionList; use lsp_types::CompletionList;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
@ -111,7 +113,7 @@ async fn check_auto_config_registry(
/// which we want to ignore when replacing text. /// which we want to ignore when replacing text.
fn to_narrow_lsp_range( fn to_narrow_lsp_range(
text_info: &SourceTextInfo, text_info: &SourceTextInfo,
range: &deno_graph::Range, range: deno_graph::PositionRange,
) -> lsp::Range { ) -> lsp::Range {
let end_byte_index = text_info let end_byte_index = text_info
.loc_to_source_pos(LineAndColumnIndex { .loc_to_source_pos(LineAndColumnIndex {
@ -159,23 +161,26 @@ pub async fn get_import_completions(
jsr_search_api: &CliJsrSearchApi, jsr_search_api: &CliJsrSearchApi,
npm_search_api: &CliNpmSearchApi, npm_search_api: &CliNpmSearchApi,
documents: &Documents, documents: &Documents,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
maybe_import_map: Option<&ImportMap>, maybe_import_map: Option<&ImportMap>,
) -> Option<lsp::CompletionResponse> { ) -> Option<lsp::CompletionResponse> {
let document = documents.get(specifier)?; let document = documents.get(specifier)?;
let file_referrer = document.file_referrer(); let file_referrer = document.file_referrer();
let (text, _, range) = document.get_maybe_dependency(position)?; let (text, _, graph_range) = document.get_maybe_dependency(position)?;
let range = to_narrow_lsp_range(document.text_info(), &range); let resolution_mode = graph_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or_else(|| is_cjs_resolver.get_doc_resolution_mode(&document));
let range = to_narrow_lsp_range(document.text_info(), graph_range.range);
let resolved = resolver let resolved = resolver
.as_graph_resolver(file_referrer) .as_cli_resolver(file_referrer)
.resolve( .resolve(
&text, &text,
&Range { specifier,
specifier: specifier.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), resolution_mode,
end: deno_graph::Position::zeroed(), NodeResolutionKind::Execution,
},
ResolutionMode::Execution,
) )
.ok(); .ok();
if let Some(completion_list) = get_jsr_completions( if let Some(completion_list) = get_jsr_completions(
@ -201,7 +206,7 @@ pub async fn get_import_completions(
// completions for import map specifiers // completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list)) Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) = } else if let Some(completion_list) =
get_local_completions(specifier, &text, &range, resolver) get_local_completions(specifier, resolution_mode, &text, &range, resolver)
{ {
// completions for local relative modules // completions for local relative modules
Some(lsp::CompletionResponse::List(completion_list)) Some(lsp::CompletionResponse::List(completion_list))
@ -355,25 +360,24 @@ fn get_import_map_completions(
/// Return local completions that are relative to the base specifier. /// Return local completions that are relative to the base specifier.
fn get_local_completions( fn get_local_completions(
base: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
text: &str, text: &str,
range: &lsp::Range, range: &lsp::Range,
resolver: &LspResolver, resolver: &LspResolver,
) -> Option<CompletionList> { ) -> Option<CompletionList> {
if base.scheme() != "file" { if referrer.scheme() != "file" {
return None; return None;
} }
let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1]; let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
let resolved_parent = resolver let resolved_parent = resolver
.as_graph_resolver(Some(base)) .as_cli_resolver(Some(referrer))
.resolve( .resolve(
parent, parent,
&Range { referrer,
specifier: base.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), resolution_mode,
end: deno_graph::Position::zeroed(), NodeResolutionKind::Execution,
},
ResolutionMode::Execution,
) )
.ok()?; .ok()?;
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?; let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
@ -385,7 +389,7 @@ fn get_local_completions(
let de = de.ok()?; let de = de.ok()?;
let label = de.path().file_name()?.to_string_lossy().to_string(); let label = de.path().file_name()?.to_string_lossy().to_string();
let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?; let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?;
if entry_specifier == *base { if entry_specifier == *referrer {
return None; return None;
} }
let full_text = format!("{parent}{label}"); let full_text = format!("{parent}{label}");
@ -824,7 +828,6 @@ mod tests {
use crate::lsp::documents::LanguageId; use crate::lsp::documents::LanguageId;
use crate::lsp::search::tests::TestPackageSearchApi; use crate::lsp::search::tests::TestPackageSearchApi;
use deno_core::resolve_url; use deno_core::resolve_url;
use deno_graph::Range;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use std::collections::HashMap; use std::collections::HashMap;
use test_util::TempDir; use test_util::TempDir;
@ -905,6 +908,7 @@ mod tests {
ModuleSpecifier::from_file_path(file_c).expect("could not create"); ModuleSpecifier::from_file_path(file_c).expect("could not create");
let actual = get_local_completions( let actual = get_local_completions(
&specifier, &specifier,
ResolutionMode::Import,
"./", "./",
&lsp::Range { &lsp::Range {
start: lsp::Position { start: lsp::Position {
@ -1600,8 +1604,7 @@ mod tests {
let text_info = SourceTextInfo::from_string(r#""te""#.to_string()); let text_info = SourceTextInfo::from_string(r#""te""#.to_string());
let range = to_narrow_lsp_range( let range = to_narrow_lsp_range(
&text_info, &text_info,
&Range { deno_graph::PositionRange {
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
start: deno_graph::Position { start: deno_graph::Position {
line: 0, line: 0,
character: 0, character: 0,
@ -1624,8 +1627,7 @@ mod tests {
let text_info = SourceTextInfo::from_string(r#""te"#.to_string()); let text_info = SourceTextInfo::from_string(r#""te"#.to_string());
let range = to_narrow_lsp_range( let range = to_narrow_lsp_range(
&text_info, &text_info,
&Range { deno_graph::PositionRange {
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
start: deno_graph::Position { start: deno_graph::Position {
line: 0, line: 0,
character: 0, character: 0,

View file

@ -4,6 +4,7 @@ use deno_ast::MediaType;
use deno_config::deno_json::DenoJsonCache; use deno_config::deno_json::DenoJsonCache;
use deno_config::deno_json::FmtConfig; use deno_config::deno_json::FmtConfig;
use deno_config::deno_json::FmtOptionsConfig; use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::deno_json::LintConfig; use deno_config::deno_json::LintConfig;
use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::TestConfig; use deno_config::deno_json::TestConfig;
@ -41,6 +42,7 @@ use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref; use std::ops::Deref;
use std::ops::DerefMut; use std::ops::DerefMut;
@ -984,7 +986,7 @@ impl Config {
| MediaType::Tsx => Some(&workspace_settings.typescript), | MediaType::Tsx => Some(&workspace_settings.typescript),
MediaType::Json MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => None, | MediaType::Unknown => None,
} }
@ -1190,6 +1192,7 @@ pub struct ConfigData {
pub resolver: Arc<WorkspaceResolver>, pub resolver: Arc<WorkspaceResolver>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>, pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub import_map_from_settings: Option<ModuleSpecifier>, pub import_map_from_settings: Option<ModuleSpecifier>,
pub unstable: BTreeSet<String>,
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>, watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
} }
@ -1587,9 +1590,16 @@ impl ConfigData {
.join("\n") .join("\n")
); );
} }
let unstable = member_dir
.workspace
.unstable_features()
.iter()
.chain(settings.unstable.as_deref())
.cloned()
.collect::<BTreeSet<_>>();
let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS")
.is_ok() .is_ok()
|| member_dir.workspace.has_unstable("sloppy-imports"); || unstable.contains("sloppy-imports");
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
Arc::new(CliSloppyImportsResolver::new( Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new_without_stat_cache(Arc::new( SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
@ -1630,6 +1640,7 @@ impl ConfigData {
lockfile, lockfile,
npmrc, npmrc,
import_map_from_settings, import_map_from_settings,
unstable,
watched_files, watched_files,
} }
} }
@ -1644,6 +1655,17 @@ impl ConfigData {
self.member_dir.maybe_pkg_json() self.member_dir.maybe_pkg_json()
} }
pub fn maybe_jsx_import_source_config(
&self,
) -> Option<JsxImportSourceConfig> {
self
.member_dir
.workspace
.to_maybe_jsx_import_source_config()
.ok()
.flatten()
}
pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool { pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
specifier.as_str().starts_with(self.scope.as_str()) specifier.as_str().starts_with(self.scope.as_str())
|| self || self

View file

@ -45,7 +45,7 @@ use deno_graph::Resolution;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use deno_graph::SpecifierError; use deno_graph::SpecifierError;
use deno_resolver::sloppy_imports::SloppyImportsResolution; use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node; use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
@ -1266,7 +1266,7 @@ impl DenoDiagnostic {
Self::NoLocal(specifier) => { Self::NoLocal(specifier) => {
let maybe_sloppy_resolution = CliSloppyImportsResolver::new( let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs)) SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
).resolve(specifier, SloppyImportsResolutionMode::Execution); ).resolve(specifier, SloppyImportsResolutionKind::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| { let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({ json!({
"specifier": specifier, "specifier": specifier,
@ -1531,7 +1531,7 @@ fn diagnose_dependency(
&& !dependency.imports.iter().any(|i| { && !dependency.imports.iter().any(|i| {
dependency dependency
.maybe_type .maybe_type
.includes(&i.specifier_range.start) .includes(i.specifier_range.range.start)
.is_some() .is_some()
}); });
@ -1707,6 +1707,7 @@ mod tests {
documents: Arc::new(documents), documents: Arc::new(documents),
assets: Default::default(), assets: Default::default(),
config: Arc::new(config), config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver, resolver,
}, },
) )

View file

@ -3,7 +3,10 @@
use super::cache::calculate_fs_version; use super::cache::calculate_fs_version;
use super::cache::LspCache; use super::cache::LspCache;
use super::config::Config; use super::config::Config;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::resolver::ScopeDepInfo;
use super::resolver::SingleReferrerGraphResolver;
use super::testing::TestCollector; use super::testing::TestCollector;
use super::testing::TestModule; use super::testing::TestModule;
use super::text::LineIndex; use super::text::LineIndex;
@ -24,7 +27,6 @@ use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_runtime::deno_node; use deno_runtime::deno_node;
@ -33,9 +35,10 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use indexmap::IndexMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;
@ -272,7 +275,7 @@ fn get_maybe_test_module_fut(
parsed_source.specifier().clone(), parsed_source.specifier().clone(),
parsed_source.text_info_lazy().clone(), parsed_source.text_info_lazy().clone(),
); );
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Arc::new(collector.take()) Arc::new(collector.take())
}) })
.map(Result::ok) .map(Result::ok)
@ -293,6 +296,8 @@ pub struct Document {
/// Contains the last-known-good set of dependencies from parsing the module. /// Contains the last-known-good set of dependencies from parsing the module.
config: Arc<Config>, config: Arc<Config>,
dependencies: Arc<IndexMap<String, deno_graph::Dependency>>, dependencies: Arc<IndexMap<String, deno_graph::Dependency>>,
/// If this is maybe a CJS script and maybe not an ES module.
is_script: Option<bool>,
// TODO(nayeemrmn): This is unused, use it for scope attribution for remote // TODO(nayeemrmn): This is unused, use it for scope attribution for remote
// modules. // modules.
file_referrer: Option<ModuleSpecifier>, file_referrer: Option<ModuleSpecifier>,
@ -323,6 +328,7 @@ impl Document {
maybe_lsp_version: Option<i32>, maybe_lsp_version: Option<i32>,
maybe_language_id: Option<LanguageId>, maybe_language_id: Option<LanguageId>,
maybe_headers: Option<HashMap<String, String>>, maybe_headers: Option<HashMap<String, String>>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>, resolver: Arc<LspResolver>,
config: Arc<Config>, config: Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
@ -332,12 +338,8 @@ impl Document {
.filter(|s| cache.is_valid_file_referrer(s)) .filter(|s| cache.is_valid_file_referrer(s))
.cloned() .cloned()
.or(file_referrer); .or(file_referrer);
let media_type = resolve_media_type( let media_type =
&specifier, resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
maybe_headers.as_ref(),
maybe_language_id,
&resolver,
);
let (maybe_parsed_source, maybe_module) = let (maybe_parsed_source, maybe_module) =
if media_type_is_diagnosable(media_type) { if media_type_is_diagnosable(media_type) {
parse_and_analyze_module( parse_and_analyze_module(
@ -346,6 +348,7 @@ impl Document {
maybe_headers.as_ref(), maybe_headers.as_ref(),
media_type, media_type,
file_referrer.as_ref(), file_referrer.as_ref(),
is_cjs_resolver,
&resolver, &resolver,
) )
} else { } else {
@ -371,6 +374,7 @@ impl Document {
file_referrer.as_ref(), file_referrer.as_ref(),
), ),
file_referrer, file_referrer,
is_script: maybe_module.as_ref().map(|m| m.is_script),
maybe_types_dependency, maybe_types_dependency,
line_index, line_index,
maybe_language_id, maybe_language_id,
@ -392,6 +396,7 @@ impl Document {
fn with_new_config( fn with_new_config(
&self, &self,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>, resolver: Arc<LspResolver>,
config: Arc<Config>, config: Arc<Config>,
) -> Arc<Self> { ) -> Arc<Self> {
@ -399,11 +404,11 @@ impl Document {
&self.specifier, &self.specifier,
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
self.maybe_language_id, self.maybe_language_id,
&resolver,
); );
let dependencies; let dependencies;
let maybe_types_dependency; let maybe_types_dependency;
let maybe_parsed_source; let maybe_parsed_source;
let is_script;
let maybe_test_module_fut; let maybe_test_module_fut;
if media_type != self.media_type { if media_type != self.media_type {
let parsed_source_result = let parsed_source_result =
@ -413,6 +418,7 @@ impl Document {
&parsed_source_result, &parsed_source_result,
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
self.file_referrer.as_ref(), self.file_referrer.as_ref(),
is_cjs_resolver,
&resolver, &resolver,
) )
.ok(); .ok();
@ -420,6 +426,7 @@ impl Document {
.as_ref() .as_ref()
.map(|m| Arc::new(m.dependencies.clone())) .map(|m| Arc::new(m.dependencies.clone()))
.unwrap_or_default(); .unwrap_or_default();
is_script = maybe_module.as_ref().map(|m| m.is_script);
maybe_types_dependency = maybe_module maybe_types_dependency = maybe_module
.as_ref() .as_ref()
.and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?)));
@ -427,10 +434,19 @@ impl Document {
maybe_test_module_fut = maybe_test_module_fut =
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
} else { } else {
let graph_resolver = let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
resolver.as_graph_resolver(self.file_referrer.as_ref());
let npm_resolver = let npm_resolver =
resolver.create_graph_npm_resolver(self.file_referrer.as_ref()); resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &self.specifier,
module_resolution_mode: is_cjs_resolver
.get_lsp_resolution_mode(&self.specifier, self.is_script),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
dependencies = Arc::new( dependencies = Arc::new(
self self
.dependencies .dependencies
@ -441,7 +457,7 @@ impl Document {
d.with_new_resolver( d.with_new_resolver(
s, s,
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver), Some(&resolver),
Some(&npm_resolver), Some(&npm_resolver),
), ),
) )
@ -451,10 +467,11 @@ impl Document {
maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| { maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| {
Arc::new(d.with_new_resolver( Arc::new(d.with_new_resolver(
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver), Some(&resolver),
Some(&npm_resolver), Some(&npm_resolver),
)) ))
}); });
is_script = self.is_script;
maybe_parsed_source = self.maybe_parsed_source().cloned(); maybe_parsed_source = self.maybe_parsed_source().cloned();
maybe_test_module_fut = self maybe_test_module_fut = self
.maybe_test_module_fut .maybe_test_module_fut
@ -466,6 +483,7 @@ impl Document {
// updated properties // updated properties
dependencies, dependencies,
file_referrer: self.file_referrer.clone(), file_referrer: self.file_referrer.clone(),
is_script,
maybe_types_dependency, maybe_types_dependency,
maybe_navigation_tree: Mutex::new(None), maybe_navigation_tree: Mutex::new(None),
// maintain - this should all be copies/clones // maintain - this should all be copies/clones
@ -490,6 +508,7 @@ impl Document {
fn with_change( fn with_change(
&self, &self,
is_cjs_resolver: &LspIsCjsResolver,
version: i32, version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>, changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<Self>, AnyError> { ) -> Result<Arc<Self>, AnyError> {
@ -523,6 +542,7 @@ impl Document {
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
media_type, media_type,
self.file_referrer.as_ref(), self.file_referrer.as_ref(),
is_cjs_resolver,
self.resolver.as_ref(), self.resolver.as_ref(),
) )
} else { } else {
@ -546,6 +566,7 @@ impl Document {
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config); get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config);
Ok(Arc::new(Self { Ok(Arc::new(Self {
config: self.config.clone(), config: self.config.clone(),
is_script: maybe_module.as_ref().map(|m| m.is_script),
specifier: self.specifier.clone(), specifier: self.specifier.clone(),
file_referrer: self.file_referrer.clone(), file_referrer: self.file_referrer.clone(),
maybe_fs_version: self.maybe_fs_version.clone(), maybe_fs_version: self.maybe_fs_version.clone(),
@ -580,6 +601,7 @@ impl Document {
), ),
maybe_language_id: self.maybe_language_id, maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(), dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(), maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(), text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(), text_info_cell: once_cell::sync::OnceCell::new(),
@ -607,6 +629,7 @@ impl Document {
), ),
maybe_language_id: self.maybe_language_id, maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(), dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(), maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(), text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(), text_info_cell: once_cell::sync::OnceCell::new(),
@ -655,6 +678,13 @@ impl Document {
}) })
} }
/// If this is maybe a CJS script and maybe not an ES module.
///
/// Use `LspIsCjsResolver` to determine for sure.
pub fn is_script(&self) -> Option<bool> {
self.is_script
}
pub fn line_index(&self) -> Arc<LineIndex> { pub fn line_index(&self) -> Arc<LineIndex> {
self.line_index.clone() self.line_index.clone()
} }
@ -738,7 +768,7 @@ impl Document {
}; };
self.dependencies().iter().find_map(|(s, dep)| { self.dependencies().iter().find_map(|(s, dep)| {
dep dep
.includes(&position) .includes(position)
.map(|r| (s.clone(), dep.clone(), r.clone())) .map(|r| (s.clone(), dep.clone(), r.clone()))
}) })
} }
@ -764,14 +794,7 @@ fn resolve_media_type(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
maybe_language_id: Option<LanguageId>, maybe_language_id: Option<LanguageId>,
resolver: &LspResolver,
) -> MediaType { ) -> MediaType {
if resolver.in_node_modules(specifier) {
if let Some(media_type) = resolver.node_media_type(specifier) {
return media_type;
}
}
if let Some(language_id) = maybe_language_id { if let Some(language_id) = maybe_language_id {
return MediaType::from_specifier_and_content_type( return MediaType::from_specifier_and_content_type(
specifier, specifier,
@ -786,15 +809,15 @@ fn resolve_media_type(
MediaType::from_specifier(specifier) MediaType::from_specifier(specifier)
} }
pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range { pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp::Range {
lsp::Range { lsp::Range {
start: lsp::Position { start: lsp::Position {
line: range.start.line as u32, line: referrer.range.start.line as u32,
character: range.start.character as u32, character: referrer.range.start.character as u32,
}, },
end: lsp::Position { end: lsp::Position {
line: range.end.line as u32, line: referrer.range.end.line as u32,
character: range.end.character as u32, character: referrer.range.end.character as u32,
}, },
} }
} }
@ -809,6 +832,7 @@ impl FileSystemDocuments {
pub fn get( pub fn get(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>, resolver: &Arc<LspResolver>,
config: &Arc<Config>, config: &Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
@ -832,7 +856,14 @@ impl FileSystemDocuments {
}; };
if dirty { if dirty {
// attempt to update the file on the file system // attempt to update the file on the file system
self.refresh_document(specifier, resolver, config, cache, file_referrer) self.refresh_document(
specifier,
is_cjs_resolver,
resolver,
config,
cache,
file_referrer,
)
} else { } else {
old_doc old_doc
} }
@ -843,6 +874,7 @@ impl FileSystemDocuments {
fn refresh_document( fn refresh_document(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>, resolver: &Arc<LspResolver>,
config: &Arc<Config>, config: &Arc<Config>,
cache: &Arc<LspCache>, cache: &Arc<LspCache>,
@ -851,14 +883,20 @@ impl FileSystemDocuments {
let doc = if specifier.scheme() == "file" { let doc = if specifier.scheme() == "file" {
let path = url_to_file_path(specifier).ok()?; let path = url_to_file_path(specifier).ok()?;
let bytes = fs::read(path).ok()?; let bytes = fs::read(path).ok()?;
let content = let content = bytes_to_content(
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?; specifier,
MediaType::from_specifier(specifier),
bytes,
None,
)
.ok()?;
Document::new( Document::new(
specifier.clone(), specifier.clone(),
content.into(), content.into(),
None, None,
None, None,
None, None,
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -875,6 +913,7 @@ impl FileSystemDocuments {
None, None,
None, None,
None, None,
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -889,19 +928,25 @@ impl FileSystemDocuments {
specifier, specifier,
Some(&cached_file.metadata.headers), Some(&cached_file.metadata.headers),
); );
let content = deno_graph::source::decode_owned_source( let media_type = resolve_media_type(
specifier, specifier,
Some(&cached_file.metadata.headers),
None,
);
let content = bytes_to_content(
specifier,
media_type,
cached_file.content, cached_file.content,
maybe_charset, maybe_charset,
) )
.ok()?; .ok()?;
let maybe_headers = Some(cached_file.metadata.headers);
Document::new( Document::new(
specifier.clone(), specifier.clone(),
content.into(), content.into(),
None, None,
None, None,
maybe_headers, Some(cached_file.metadata.headers),
is_cjs_resolver,
resolver.clone(), resolver.clone(),
config.clone(), config.clone(),
cache, cache,
@ -942,6 +987,11 @@ pub struct Documents {
/// The DENO_DIR that the documents looks for non-file based modules. /// The DENO_DIR that the documents looks for non-file based modules.
cache: Arc<LspCache>, cache: Arc<LspCache>,
config: Arc<Config>, config: Arc<Config>,
/// Resolver for detecting if a document is CJS or ESM.
is_cjs_resolver: Arc<LspIsCjsResolver>,
/// A resolver that takes into account currently loaded import map and JSX
/// settings.
resolver: Arc<LspResolver>,
/// A flag that indicates that stated data is potentially invalid and needs to /// A flag that indicates that stated data is potentially invalid and needs to
/// be recalculated before being considered valid. /// be recalculated before being considered valid.
dirty: bool, dirty: bool,
@ -949,15 +999,7 @@ pub struct Documents {
open_docs: HashMap<ModuleSpecifier, Arc<Document>>, open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
/// Documents stored on the file system. /// Documents stored on the file system.
file_system_docs: Arc<FileSystemDocuments>, file_system_docs: Arc<FileSystemDocuments>,
/// A resolver that takes into account currently loaded import map and JSX dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>,
/// settings.
resolver: Arc<LspResolver>,
/// The npm package requirements found in npm specifiers.
npm_reqs_by_scope:
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
/// Config scopes that contain a node: specifier such that a @types/node
/// package should be injected.
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
} }
impl Documents { impl Documents {
@ -982,6 +1024,7 @@ impl Documents {
// the cache for remote modules here in order to get the // the cache for remote modules here in order to get the
// x-typescript-types? // x-typescript-types?
None, None,
&self.is_cjs_resolver,
self.resolver.clone(), self.resolver.clone(),
self.config.clone(), self.config.clone(),
&self.cache, &self.cache,
@ -1016,7 +1059,7 @@ impl Documents {
)) ))
})?; })?;
self.dirty = true; self.dirty = true;
let doc = doc.with_change(version, changes)?; let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
self.open_docs.insert(doc.specifier().clone(), doc.clone()); self.open_docs.insert(doc.specifier().clone(), doc.clone());
Ok(doc) Ok(doc)
} }
@ -1071,34 +1114,6 @@ impl Documents {
self.cache.is_valid_file_referrer(specifier) self.cache.is_valid_file_referrer(specifier)
} }
/// Return `true` if the provided specifier can be resolved to a document,
/// otherwise `false`.
pub fn contains_import(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
) -> bool {
let file_referrer = self.get_file_referrer(referrer);
let maybe_specifier = self
.resolver
.as_graph_resolver(file_referrer.as_deref())
.resolve(
specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok();
if let Some(import_specifier) = maybe_specifier {
self.exists(&import_specifier, file_referrer.as_deref())
} else {
false
}
}
pub fn resolve_document_specifier( pub fn resolve_document_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -1147,17 +1162,20 @@ impl Documents {
false false
} }
pub fn npm_reqs_by_scope( pub fn dep_info_by_scope(
&mut self, &mut self,
) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> { ) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> {
self.calculate_npm_reqs_if_dirty(); self.calculate_dep_info_if_dirty();
self.npm_reqs_by_scope.clone() self.dep_info_by_scope.clone()
} }
pub fn scopes_with_node_specifier( pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> {
&self, self
) -> &Arc<HashSet<Option<ModuleSpecifier>>> { .dep_info_by_scope
&self.scopes_with_node_specifier .iter()
.filter(|(_, i)| i.has_node_specifier)
.map(|(s, _)| s.clone())
.collect::<HashSet<_>>()
} }
/// Return a document for the specifier. /// Return a document for the specifier.
@ -1173,6 +1191,7 @@ impl Documents {
if let Some(old_doc) = old_doc { if let Some(old_doc) = old_doc {
self.file_system_docs.get( self.file_system_docs.get(
specifier, specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
@ -1197,6 +1216,7 @@ impl Documents {
} else { } else {
self.file_system_docs.get( self.file_system_docs.get(
&specifier, &specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
@ -1251,18 +1271,23 @@ impl Documents {
/// tsc when type checking. /// tsc when type checking.
pub fn resolve( pub fn resolve(
&self, &self,
raw_specifiers: &[String], // (is_cjs: bool, raw_specifier: String)
raw_specifiers: &[(bool, String)],
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Vec<Option<(ModuleSpecifier, MediaType)>> { ) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
let document = self.get(referrer); let referrer_doc = self.get(referrer);
let file_referrer = document let file_referrer = referrer_doc
.as_ref() .as_ref()
.and_then(|d| d.file_referrer()) .and_then(|d| d.file_referrer())
.or(file_referrer); .or(file_referrer);
let dependencies = document.as_ref().map(|d| d.dependencies()); let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
let mut results = Vec::new(); let mut results = Vec::new();
for raw_specifier in raw_specifiers { for (is_cjs, raw_specifier) in raw_specifiers {
let resolution_mode = match is_cjs {
true => ResolutionMode::Require,
false => ResolutionMode::Import,
};
if raw_specifier.starts_with("asset:") { if raw_specifier.starts_with("asset:") {
if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) { if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
let media_type = MediaType::from_specifier(&specifier); let media_type = MediaType::from_specifier(&specifier);
@ -1277,31 +1302,32 @@ impl Documents {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
specifier, specifier,
referrer, referrer,
resolution_mode,
file_referrer, file_referrer,
)); ));
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() { } else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
specifier, specifier,
referrer, referrer,
resolution_mode,
file_referrer, file_referrer,
)); ));
} else { } else {
results.push(None); results.push(None);
} }
} else if let Ok(specifier) = } else if let Ok(specifier) =
self.resolver.as_graph_resolver(file_referrer).resolve( self.resolver.as_cli_resolver(file_referrer).resolve(
raw_specifier, raw_specifier,
&deno_graph::Range { referrer,
specifier: referrer.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), resolution_mode,
end: deno_graph::Position::zeroed(), NodeResolutionKind::Types,
},
ResolutionMode::Types,
) )
{ {
results.push(self.resolve_dependency( results.push(self.resolve_dependency(
&specifier, &specifier,
referrer, referrer,
resolution_mode,
file_referrer, file_referrer,
)); ));
} else { } else {
@ -1320,7 +1346,11 @@ impl Documents {
) { ) {
self.config = Arc::new(config.clone()); self.config = Arc::new(config.clone());
self.cache = Arc::new(cache.clone()); self.cache = Arc::new(cache.clone());
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
self.resolver = resolver.clone(); self.resolver = resolver.clone();
node_resolver::PackageJsonThreadLocalCache::clear();
{ {
let fs_docs = &self.file_system_docs; let fs_docs = &self.file_system_docs;
// Clean up non-existent documents. // Clean up non-existent documents.
@ -1340,14 +1370,21 @@ impl Documents {
if !config.specifier_enabled(doc.specifier()) { if !config.specifier_enabled(doc.specifier()) {
continue; continue;
} }
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone()); *doc = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
} }
for mut doc in self.file_system_docs.docs.iter_mut() { for mut doc in self.file_system_docs.docs.iter_mut() {
if !config.specifier_enabled(doc.specifier()) { if !config.specifier_enabled(doc.specifier()) {
continue; continue;
} }
*doc.value_mut() = *doc.value_mut() = doc.with_new_config(
doc.with_new_config(self.resolver.clone(), self.config.clone()); &self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
} }
self.open_docs = open_docs; self.open_docs = open_docs;
let mut preload_count = 0; let mut preload_count = 0;
@ -1364,6 +1401,7 @@ impl Documents {
{ {
fs_docs.refresh_document( fs_docs.refresh_document(
specifier, specifier,
&self.is_cjs_resolver,
&self.resolver, &self.resolver,
&self.config, &self.config,
&self.cache, &self.cache,
@ -1379,34 +1417,46 @@ impl Documents {
/// Iterate through the documents, building a map where the key is a unique /// Iterate through the documents, building a map where the key is a unique
/// document and the value is a set of specifiers that depend on that /// document and the value is a set of specifiers that depend on that
/// document. /// document.
fn calculate_npm_reqs_if_dirty(&mut self) { fn calculate_dep_info_if_dirty(&mut self) {
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default(); let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default();
let mut scopes_with_specifier = HashSet::new();
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
if !is_fs_docs_dirty && !self.dirty { if !is_fs_docs_dirty && !self.dirty {
return; return;
} }
let mut visit_doc = |doc: &Arc<Document>| { let mut visit_doc = |doc: &Arc<Document>| {
let scope = doc.scope(); let scope = doc.scope();
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default(); let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default();
for dependency in doc.dependencies().values() { for dependency in doc.dependencies().values() {
if let Some(dep) = dependency.get_code() { let code_specifier = dependency.get_code();
let type_specifier = dependency.get_type();
if let Some(dep) = code_specifier {
if dep.scheme() == "node" { if dep.scheme() == "node" {
scopes_with_specifier.insert(scope.cloned()); dep_info.has_node_specifier = true;
} }
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
if let Some(dep) = dependency.get_type() { if let Some(dep) = type_specifier {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
}
}
if dependency.maybe_deno_types_specifier.is_some() {
if let (Some(code_specifier), Some(type_specifier)) =
(code_specifier, type_specifier)
{
if MediaType::from_specifier(type_specifier).is_declaration() {
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier.clone(), code_specifier.clone());
}
} }
} }
} }
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
}; };
@ -1417,14 +1467,46 @@ impl Documents {
visit_doc(doc); visit_doc(doc);
} }
// fill the reqs from the lockfile
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() { for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default();
(|| {
let config_file = config_data.maybe_deno_json()?;
let jsx_config =
config_file.to_maybe_jsx_import_source_config().ok()??;
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
let code_specifier = jsx_config.default_specifier.as_ref()?;
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
let type_specifier = cli_resolver
.resolve(
type_specifier,
&jsx_config.base_url,
deno_graph::Position::zeroed(),
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
NodeResolutionKind::Types,
)
.ok()?;
let code_specifier = cli_resolver
.resolve(
code_specifier,
&jsx_config.base_url,
deno_graph::Position::zeroed(),
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.ok()?;
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier, code_specifier);
Some(())
})();
// fill the reqs from the lockfile
if let Some(lockfile) = config_data.lockfile.as_ref() { if let Some(lockfile) = config_data.lockfile.as_ref() {
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for dep_req in lockfile.content.packages.specifiers.keys() { for dep_req in lockfile.content.packages.specifiers.keys() {
if dep_req.kind == deno_semver::package::PackageKind::Npm { if dep_req.kind == deno_semver::package::PackageKind::Npm {
reqs.insert(dep_req.req.clone()); dep_info.npm_reqs.insert(dep_req.req.clone());
} }
} }
} }
@ -1433,15 +1515,22 @@ impl Documents {
// Ensure a @types/node package exists when any module uses a node: specifier. // Ensure a @types/node package exists when any module uses a node: specifier.
// Unlike on the command line, here we just add @types/node to the npm package // Unlike on the command line, here we just add @types/node to the npm package
// requirements since this won't end up in the lockfile. // requirements since this won't end up in the lockfile.
for scope in &scopes_with_specifier { for dep_info in dep_info_by_scope.values_mut() {
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default(); if dep_info.has_node_specifier
if !reqs.iter().any(|r| r.name == "@types/node") { && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node")
reqs.insert(PackageReq::from_str("@types/node").unwrap()); {
dep_info
.npm_reqs
.insert(PackageReq::from_str("@types/node").unwrap());
} }
} }
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope); self.dep_info_by_scope = Arc::new(
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier); dep_info_by_scope
.into_iter()
.map(|(s, i)| (s, Arc::new(i)))
.collect(),
);
self.dirty = false; self.dirty = false;
} }
@ -1449,6 +1538,7 @@ impl Documents {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
if let Some(module_name) = specifier.as_str().strip_prefix("node:") { if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
@ -1462,10 +1552,12 @@ impl Documents {
let mut specifier = specifier.clone(); let mut specifier = specifier.clone();
let mut media_type = None; let mut media_type = None;
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) {
let (s, mt) = let (s, mt) = self.resolver.npm_to_file_url(
self &npm_ref,
.resolver referrer,
.npm_to_file_url(&npm_ref, referrer, file_referrer)?; resolution_mode,
file_referrer,
)?;
specifier = s; specifier = s;
media_type = Some(mt); media_type = Some(mt);
} }
@ -1475,7 +1567,8 @@ impl Documents {
return Some((specifier, media_type)); return Some((specifier, media_type));
}; };
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() { if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
self.resolve_dependency(types, &specifier, file_referrer) let specifier_kind = self.is_cjs_resolver.get_doc_resolution_mode(&doc);
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
} else { } else {
Some((doc.specifier().clone(), doc.media_type())) Some((doc.specifier().clone(), doc.media_type()))
} }
@ -1543,6 +1636,7 @@ fn parse_and_analyze_module(
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
media_type: MediaType, media_type: MediaType,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) { ) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
let parsed_source_result = parse_source(specifier.clone(), text, media_type); let parsed_source_result = parse_source(specifier.clone(), text, media_type);
@ -1551,6 +1645,7 @@ fn parse_and_analyze_module(
&parsed_source_result, &parsed_source_result,
maybe_headers, maybe_headers,
file_referrer, file_referrer,
is_cjs_resolver,
resolver, resolver,
); );
(Some(parsed_source_result), Some(module_result)) (Some(parsed_source_result), Some(module_result))
@ -1561,7 +1656,7 @@ fn parse_source(
text: Arc<str>, text: Arc<str>,
media_type: MediaType, media_type: MediaType,
) -> ParsedSourceResult { ) -> ParsedSourceResult {
deno_ast::parse_module(deno_ast::ParseParams { deno_ast::parse_program(deno_ast::ParseParams {
specifier, specifier,
text, text,
media_type, media_type,
@ -1576,11 +1671,26 @@ fn analyze_module(
parsed_source_result: &ParsedSourceResult, parsed_source_result: &ParsedSourceResult,
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver, resolver: &LspResolver,
) -> ModuleResult { ) -> ModuleResult {
match parsed_source_result { match parsed_source_result {
Ok(parsed_source) => { Ok(parsed_source) => {
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer); let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
let cli_resolver = resolver.as_cli_resolver(file_referrer);
let config_data = resolver.as_config_data(file_referrer);
let valid_referrer = specifier.clone();
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &valid_referrer,
module_resolution_mode: is_cjs_resolver.get_lsp_resolution_mode(
&specifier,
Some(parsed_source.compute_is_script()),
),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
Ok(deno_graph::parse_module_from_ast( Ok(deno_graph::parse_module_from_ast(
deno_graph::ParseModuleFromAstOptions { deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::TypesOnly, graph_kind: deno_graph::GraphKind::TypesOnly,
@ -1591,7 +1701,7 @@ fn analyze_module(
// dynamic imports like import(`./dir/${something}`) in the LSP // dynamic imports like import(`./dir/${something}`) in the LSP
file_system: &deno_graph::source::NullFileSystem, file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider, jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)), maybe_resolver: Some(&resolver),
maybe_npm_resolver: Some(&npm_resolver), maybe_npm_resolver: Some(&npm_resolver),
}, },
)) ))
@ -1602,6 +1712,24 @@ fn analyze_module(
} }
} }
fn bytes_to_content(
specifier: &ModuleSpecifier,
media_type: MediaType,
bytes: Vec<u8>,
maybe_charset: Option<&str>,
) -> Result<String, AnyError> {
if media_type == MediaType::Wasm {
// we use the dts representation for Wasm modules
Ok(deno_graph::source::wasm::wasm_module_to_dts(&bytes)?)
} else {
Ok(deno_graph::source::decode_owned_source(
specifier,
bytes,
maybe_charset,
)?)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -22,6 +22,8 @@ use deno_semver::jsr::JsrPackageReqReference;
use indexmap::Equivalent; use indexmap::Equivalent;
use indexmap::IndexSet; use indexmap::IndexSet;
use log::error; use log::error;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use serde::Deserialize; use serde::Deserialize;
use serde_json::from_value; use serde_json::from_value;
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -77,6 +79,7 @@ use super::parent_process_checker;
use super::performance::Performance; use super::performance::Performance;
use super::refactor; use super::refactor;
use super::registries::ModuleRegistry; use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::testing; use super::testing;
use super::text; use super::text;
@ -144,6 +147,7 @@ pub struct StateSnapshot {
pub project_version: usize, pub project_version: usize,
pub assets: AssetsSnapshot, pub assets: AssetsSnapshot,
pub config: Arc<Config>, pub config: Arc<Config>,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
pub documents: Arc<Documents>, pub documents: Arc<Documents>,
pub resolver: Arc<LspResolver>, pub resolver: Arc<LspResolver>,
} }
@ -203,6 +207,7 @@ pub struct Inner {
pub documents: Documents, pub documents: Documents,
http_client_provider: Arc<HttpClientProvider>, http_client_provider: Arc<HttpClientProvider>,
initial_cwd: PathBuf, initial_cwd: PathBuf,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
jsr_search_api: CliJsrSearchApi, jsr_search_api: CliJsrSearchApi,
/// Handles module registries, which allow discovery of modules /// Handles module registries, which allow discovery of modules
module_registry: ModuleRegistry, module_registry: ModuleRegistry,
@ -480,6 +485,7 @@ impl Inner {
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| { let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
panic!("Could not resolve current working directory") panic!("Could not resolve current working directory")
}); });
let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache));
Self { Self {
assets, assets,
@ -491,6 +497,7 @@ impl Inner {
documents, documents,
http_client_provider, http_client_provider,
initial_cwd: initial_cwd.clone(), initial_cwd: initial_cwd.clone(),
is_cjs_resolver,
jsr_search_api, jsr_search_api,
project_version: 0, project_version: 0,
task_queue: Default::default(), task_queue: Default::default(),
@ -601,6 +608,7 @@ impl Inner {
project_version: self.project_version, project_version: self.project_version,
assets: self.assets.snapshot(), assets: self.assets.snapshot(),
config: Arc::new(self.config.clone()), config: Arc::new(self.config.clone()),
is_cjs_resolver: self.is_cjs_resolver.clone(),
documents: Arc::new(self.documents.clone()), documents: Arc::new(self.documents.clone()),
resolver: self.resolver.snapshot(), resolver: self.resolver.snapshot(),
}) })
@ -622,6 +630,7 @@ impl Inner {
} }
}); });
self.cache = LspCache::new(global_cache_url); self.cache = LspCache::new(global_cache_url);
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache));
let deno_dir = self.cache.deno_dir(); let deno_dir = self.cache.deno_dir();
let workspace_settings = self.config.workspace_settings(); let workspace_settings = self.config.workspace_settings();
let maybe_root_path = self let maybe_root_path = self
@ -863,7 +872,10 @@ impl Inner {
// We ignore these directories by default because there is a // We ignore these directories by default because there is a
// high likelihood they aren't relevant. Someone can opt-into // high likelihood they aren't relevant. Someone can opt-into
// them by specifying one of them as an enabled path. // them by specifying one of them as an enabled path.
if matches!(dir_name.as_str(), "vendor" | "node_modules" | ".git") { if matches!(
dir_name.as_str(),
"vendor" | "coverage" | "node_modules" | ".git"
) {
continue; continue;
} }
// ignore cargo target directories for anyone using Deno with Rust // ignore cargo target directories for anyone using Deno with Rust
@ -904,7 +916,7 @@ impl Inner {
| MediaType::Tsx => {} | MediaType::Tsx => {}
MediaType::Wasm MediaType::Wasm
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::Unknown => { | MediaType::Unknown => {
if path.extension().and_then(|s| s.to_str()) != Some("jsonc") { if path.extension().and_then(|s| s.to_str()) != Some("jsonc") {
continue; continue;
@ -979,15 +991,13 @@ impl Inner {
spawn(async move { spawn(async move {
let specifier = { let specifier = {
let inner = ls.inner.read().await; let inner = ls.inner.read().await;
let resolver = inner.resolver.as_graph_resolver(Some(&referrer)); let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
let Ok(specifier) = resolver.resolve( let Ok(specifier) = resolver.resolve(
&specifier, &specifier,
&deno_graph::Range { &referrer,
specifier: referrer.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), ResolutionMode::Import,
end: deno_graph::Position::zeroed(), NodeResolutionKind::Types,
},
deno_graph::source::ResolutionMode::Types,
) else { ) else {
return; return;
}; };
@ -1024,7 +1034,7 @@ impl Inner {
// refresh the npm specifiers because it might have discovered // refresh the npm specifiers because it might have discovered
// a @types/node package and now's a good time to do that anyway // a @types/node package and now's a good time to do that anyway
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.project_changed([], true); self.project_changed([], true);
} }
@ -1070,7 +1080,7 @@ impl Inner {
); );
if document.is_diagnosable() { if document.is_diagnosable() {
self.project_changed([(document.specifier(), ChangeKind::Opened)], false); self.project_changed([(document.specifier(), ChangeKind::Opened)], false);
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate(&[specifier]); self.diagnostics_server.invalidate(&[specifier]);
self.send_diagnostics_update(); self.send_diagnostics_update();
self.send_testing_update(); self.send_testing_update();
@ -1091,8 +1101,8 @@ impl Inner {
Ok(document) => { Ok(document) => {
if document.is_diagnosable() { if document.is_diagnosable() {
let old_scopes_with_node_specifier = let old_scopes_with_node_specifier =
self.documents.scopes_with_node_specifier().clone(); self.documents.scopes_with_node_specifier();
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
let mut config_changed = false; let mut config_changed = false;
if !self if !self
.documents .documents
@ -1143,13 +1153,15 @@ impl Inner {
})); }));
} }
async fn refresh_npm_specifiers(&mut self) { async fn refresh_dep_info(&mut self) {
let package_reqs = self.documents.npm_reqs_by_scope(); let dep_info_by_scope = self.documents.dep_info_by_scope();
let resolver = self.resolver.clone(); let resolver = self.resolver.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
spawn(async move { resolver.set_npm_reqs(&package_reqs).await }) spawn(
.await async move { resolver.set_dep_info_by_scope(&dep_info_by_scope).await },
.ok(); )
.await
.ok();
} }
async fn did_close(&mut self, params: DidCloseTextDocumentParams) { async fn did_close(&mut self, params: DidCloseTextDocumentParams) {
@ -1168,7 +1180,7 @@ impl Inner {
.uri_to_specifier(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
self.diagnostics_state.clear(&specifier); self.diagnostics_state.clear(&specifier);
if self.is_diagnosable(&specifier) { if self.is_diagnosable(&specifier) {
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate(&[specifier.clone()]); self.diagnostics_server.invalidate(&[specifier.clone()]);
self.send_diagnostics_update(); self.send_diagnostics_update();
self.send_testing_update(); self.send_testing_update();
@ -1382,16 +1394,17 @@ impl Inner {
.fmt_config_for_specifier(&specifier) .fmt_config_for_specifier(&specifier)
.options .options
.clone(); .clone();
fmt_options.use_tabs = Some(!params.options.insert_spaces); let config_data = self.config.tree.data_for_specifier(&specifier);
fmt_options.indent_width = Some(params.options.tab_size as u8); if !config_data.is_some_and(|d| d.maybe_deno_json().is_some()) {
let maybe_workspace = self fmt_options.use_tabs = Some(!params.options.insert_spaces);
.config fmt_options.indent_width = Some(params.options.tab_size as u8);
.tree }
.data_for_specifier(&specifier)
.map(|d| &d.member_dir.workspace);
let unstable_options = UnstableFmtOptions { let unstable_options = UnstableFmtOptions {
component: maybe_workspace component: config_data
.map(|w| w.has_unstable("fmt-component")) .map(|d| d.unstable.contains("fmt-component"))
.unwrap_or(false),
sql: config_data
.map(|d| d.unstable.contains("fmt-sql"))
.unwrap_or(false), .unwrap_or(false),
}; };
let document = document.clone(); let document = document.clone();
@ -1623,6 +1636,10 @@ impl Inner {
let file_diagnostics = self let file_diagnostics = self
.diagnostics_server .diagnostics_server
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version()); .get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
let specifier_kind = asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_resolution_mode(d))
.unwrap_or(ResolutionMode::Import);
let mut includes_no_cache = false; let mut includes_no_cache = false;
for diagnostic in &fixable_diagnostics { for diagnostic in &fixable_diagnostics {
match diagnostic.source.as_deref() { match diagnostic.source.as_deref() {
@ -1661,7 +1678,13 @@ impl Inner {
.await; .await;
for action in actions { for action in actions {
code_actions code_actions
.add_ts_fix_action(&specifier, &action, diagnostic, self) .add_ts_fix_action(
&specifier,
specifier_kind,
&action,
diagnostic,
self,
)
.map_err(|err| { .map_err(|err| {
error!("Unable to convert fix: {:#}", err); error!("Unable to convert fix: {:#}", err);
LspError::internal_error() LspError::internal_error()
@ -1807,10 +1830,9 @@ impl Inner {
error!("Unable to decode code action data: {:#}", err); error!("Unable to decode code action data: {:#}", err);
LspError::invalid_params("The CodeAction's data is invalid.") LspError::invalid_params("The CodeAction's data is invalid.")
})?; })?;
let scope = self let maybe_asset_or_doc =
.get_asset_or_document(&code_action_data.specifier) self.get_asset_or_document(&code_action_data.specifier).ok();
.ok() let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned());
.and_then(|d| d.scope().cloned());
let combined_code_actions = self let combined_code_actions = self
.ts_server .ts_server
.get_combined_code_fix( .get_combined_code_fix(
@ -1837,8 +1859,13 @@ impl Inner {
let changes = if code_action_data.fix_id == "fixMissingImport" { let changes = if code_action_data.fix_id == "fixMissingImport" {
fix_ts_import_changes( fix_ts_import_changes(
&code_action_data.specifier, &code_action_data.specifier,
maybe_asset_or_doc
.as_ref()
.and_then(|d| d.document())
.map(|d| self.is_cjs_resolver.get_doc_resolution_mode(d))
.unwrap_or(ResolutionMode::Import),
&combined_code_actions.changes, &combined_code_actions.changes,
&self.get_ts_response_import_mapper(&code_action_data.specifier), self,
) )
.map_err(|err| { .map_err(|err| {
error!("Unable to remap changes: {:#}", err); error!("Unable to remap changes: {:#}", err);
@ -1890,8 +1917,12 @@ impl Inner {
if kind_suffix == ".rewrite.function.returnType" { if kind_suffix == ".rewrite.function.returnType" {
refactor_edit_info.edits = fix_ts_import_changes( refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier, &action_data.specifier,
asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_resolution_mode(d))
.unwrap_or(ResolutionMode::Import),
&refactor_edit_info.edits, &refactor_edit_info.edits,
&self.get_ts_response_import_mapper(&action_data.specifier), self,
) )
.map_err(|err| { .map_err(|err| {
error!("Unable to remap changes: {:#}", err); error!("Unable to remap changes: {:#}", err);
@ -1922,7 +1953,8 @@ impl Inner {
// todo(dsherret): this should probably just take the resolver itself // todo(dsherret): this should probably just take the resolver itself
// as the import map is an implementation detail // as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()), .and_then(|d| d.resolver.maybe_import_map()),
self.resolver.as_ref(), &self.resolver,
&self.ts_server.specifier_map,
file_referrer, file_referrer,
) )
} }
@ -2238,6 +2270,7 @@ impl Inner {
&self.jsr_search_api, &self.jsr_search_api,
&self.npm_search_api, &self.npm_search_api,
&self.documents, &self.documents,
&self.is_cjs_resolver,
self.resolver.as_ref(), self.resolver.as_ref(),
self self
.config .config
@ -2285,7 +2318,11 @@ impl Inner {
.into(), .into(),
scope.cloned(), scope.cloned(),
) )
.await; .await
.unwrap_or_else(|err| {
error!("Unable to get completion info from TypeScript: {:#}", err);
None
});
if let Some(completions) = maybe_completion_info { if let Some(completions) = maybe_completion_info {
response = Some( response = Some(
@ -3568,15 +3605,16 @@ impl Inner {
if byonm { if byonm {
roots.retain(|s| s.scheme() != "npm"); roots.retain(|s| s.scheme() != "npm");
} else if let Some(npm_reqs) = self } else if let Some(dep_info) = self
.documents .documents
.npm_reqs_by_scope() .dep_info_by_scope()
.get(&config_data.map(|d| d.scope.as_ref().clone())) .get(&config_data.map(|d| d.scope.as_ref().clone()))
{ {
// always include the npm packages since resolution of one npm package // always include the npm packages since resolution of one npm package
// might affect the resolution of other npm packages // might affect the resolution of other npm packages
roots.extend( roots.extend(
npm_reqs dep_info
.npm_reqs
.iter() .iter()
.map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()), .map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()),
); );
@ -3597,9 +3635,8 @@ impl Inner {
deno_json_cache: None, deno_json_cache: None,
pkg_json_cache: None, pkg_json_cache: None,
workspace_cache: None, workspace_cache: None,
config_parse_options: deno_config::deno_json::ConfigParseOptions { config_parse_options:
include_task_comments: false, deno_config::deno_json::ConfigParseOptions::default(),
},
additional_config_file_names: &[], additional_config_file_names: &[],
discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"), discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"),
maybe_vendor_override: if force_global_cache { maybe_vendor_override: if force_global_cache {
@ -3654,7 +3691,7 @@ impl Inner {
async fn post_cache(&mut self) { async fn post_cache(&mut self) {
self.resolver.did_cache(); self.resolver.did_cache();
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate_all(); self.diagnostics_server.invalidate_all();
self.project_changed([], true); self.project_changed([], true);
self.ts_server.cleanup_semantic_cache(self.snapshot()).await; self.ts_server.cleanup_semantic_cache(self.snapshot()).await;
@ -3742,14 +3779,11 @@ impl Inner {
fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> { fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
let mut result = vec![]; let mut result = vec![];
for config_file in self.config.tree.config_files() { for config_file in self.config.tree.config_files() {
if let Some(tasks) = json!(&config_file.json.tasks).as_object() { if let Some(tasks) = config_file.to_tasks_config().ok().flatten() {
for (name, value) in tasks { for (name, def) in tasks {
let Some(command) = value.as_str() else {
continue;
};
result.push(TaskDefinition { result.push(TaskDefinition {
name: name.clone(), name: name.clone(),
command: command.to_string(), command: def.command.clone(),
source_uri: url_to_uri(&config_file.specifier) source_uri: url_to_uri(&config_file.specifier)
.map_err(|_| LspError::internal_error())?, .map_err(|_| LspError::internal_error())?,
}); });
@ -3948,7 +3982,9 @@ mod tests {
fn test_walk_workspace() { fn test_walk_workspace() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
temp_dir.create_dir_all("root1/vendor/"); temp_dir.create_dir_all("root1/vendor/");
temp_dir.create_dir_all("root1/coverage/");
temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor
temp_dir.write("root1/coverage/mod.ts", ""); // no, coverage
temp_dir.create_dir_all("root1/node_modules/"); temp_dir.create_dir_all("root1/node_modules/");
temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules

View file

@ -14,8 +14,6 @@ pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str =
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct TaskDefinition { pub struct TaskDefinition {
pub name: String, pub name: String,
// TODO(nayeemrmn): Rename this to `command` in vscode_deno.
#[serde(rename = "detail")]
pub command: String, pub command: String,
pub source_uri: lsp::Uri, pub source_uri: lsp::Uri,
} }

View file

@ -56,9 +56,6 @@ pub async fn start() -> Result<(), AnyError> {
LanguageServer::performance_request, LanguageServer::performance_request,
) )
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions) .custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
// TODO(nayeemrmn): Rename this to `deno/taskDefinitions` in vscode_deno and
// remove this alias.
.custom_method("deno/task", LanguageServer::task_definitions)
.custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request) .custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request)
.custom_method( .custom_method(
testing::TEST_RUN_CANCEL_REQUEST, testing::TEST_RUN_CANCEL_REQUEST,

View file

@ -11,7 +11,7 @@ pub fn start(parent_process_id: u32) {
std::thread::sleep(Duration::from_secs(10)); std::thread::sleep(Duration::from_secs(10));
if !is_process_active(parent_process_id) { if !is_process_active(parent_process_id) {
std::process::exit(1); deno_runtime::exit(1);
} }
}); });
} }

View file

@ -263,7 +263,7 @@ impl ReplLanguageServer {
} }
fn get_document_uri(&self) -> Uri { fn get_document_uri(&self) -> Uri {
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str()) uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str())
.unwrap() .unwrap()
} }
} }

View file

@ -2,27 +2,35 @@
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache; use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::source::Resolver;
use deno_graph::GraphImport; use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError; use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolution; use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionKind;
use node_resolver::NpmResolver; use node_resolver::ResolutionMode;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet; use std::collections::BTreeSet;
@ -31,11 +39,16 @@ use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use super::cache::LspCache; use super::cache::LspCache;
use super::documents::Document;
use super::jsr::JsrCacheResolver; use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc; use crate::args::create_default_npmrc;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::factory::Deferred;
use crate::graph_util::to_node_resolution_kind;
use crate::graph_util::to_node_resolution_mode;
use crate::graph_util::CliJsrUrlProvider; use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config; use crate::lsp::config::Config;
@ -43,40 +56,56 @@ use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::ManagedCliNpmResolver; use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore; use crate::resolver::CliDenoResolver;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver; use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliResolver;
use crate::resolver::CliNodeResolver; use crate::resolver::CliResolverOptions;
use crate::resolver::IsCjsResolver;
use crate::resolver::WorkerCliNpmGraphResolver; use crate::resolver::WorkerCliNpmGraphResolver;
use crate::tsc::into_specifier_and_media_type;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct LspScopeResolver { struct LspScopeResolver {
graph_resolver: Arc<CliGraphResolver>, resolver: Arc<CliResolver>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
jsr_resolver: Option<Arc<JsrCacheResolver>>, jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>, npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>, node_resolver: Option<Arc<NodeResolver>>,
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
pkg_json_resolver: Arc<PackageJsonResolver>,
redirect_resolver: Option<Arc<RedirectResolver>>, redirect_resolver: Option<Arc<RedirectResolver>>,
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>, graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
config_data: Option<Arc<ConfigData>>, config_data: Option<Arc<ConfigData>>,
} }
impl Default for LspScopeResolver { impl Default for LspScopeResolver {
fn default() -> Self { fn default() -> Self {
let factory = ResolverFactory::new(None);
Self { Self {
graph_resolver: create_graph_resolver(None, None, None), resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
jsr_resolver: None, jsr_resolver: None,
npm_resolver: None, npm_resolver: None,
node_resolver: None, node_resolver: None,
npm_pkg_req_resolver: None,
pkg_json_resolver: factory.pkg_json_resolver().clone(),
redirect_resolver: None, redirect_resolver: None,
graph_imports: Default::default(), graph_imports: Default::default(),
dep_info: Default::default(),
package_json_deps_by_resolution: Default::default(),
config_data: None, config_data: None,
} }
} }
@ -88,22 +117,16 @@ impl LspScopeResolver {
cache: &LspCache, cache: &LspCache,
http_client_provider: Option<&Arc<HttpClientProvider>>, http_client_provider: Option<&Arc<HttpClientProvider>>,
) -> Self { ) -> Self {
let mut npm_resolver = None; let mut factory = ResolverFactory::new(config_data);
let mut node_resolver = None; if let Some(http_client_provider) = http_client_provider {
if let Some(http_client) = http_client_provider { factory.init_npm_resolver(http_client_provider, cache).await;
npm_resolver = create_npm_resolver(
config_data.map(|d| d.as_ref()),
cache,
http_client,
)
.await;
node_resolver = create_node_resolver(npm_resolver.as_ref());
} }
let graph_resolver = create_graph_resolver( let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone();
config_data.map(|d| d.as_ref()), let npm_resolver = factory.npm_resolver().cloned();
npm_resolver.as_ref(), let node_resolver = factory.node_resolver().cloned();
node_resolver.as_ref(), let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned();
); let cli_resolver = factory.cli_resolver().clone();
let pkg_json_resolver = factory.pkg_json_resolver().clone();
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new( let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
cache.for_specifier(config_data.map(|d| d.scope.as_ref())), cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.map(|d| d.as_ref()), config_data.map(|d| d.as_ref()),
@ -112,7 +135,9 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())), cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()), config_data.and_then(|d| d.lockfile.clone()),
))); )));
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver(); let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data let graph_imports = config_data
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok()) .and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
.map(|imports| { .map(|imports| {
@ -120,11 +145,18 @@ impl LspScopeResolver {
imports imports
.into_iter() .into_iter()
.map(|(referrer, imports)| { .map(|(referrer, imports)| {
let resolver = SingleReferrerGraphResolver {
valid_referrer: &referrer,
module_resolution_mode: ResolutionMode::Import,
cli_resolver: &cli_resolver,
jsx_import_source_config: maybe_jsx_import_source_config
.as_ref(),
};
let graph_import = GraphImport::new( let graph_import = GraphImport::new(
&referrer, &referrer,
imports, imports,
&CliJsrUrlProvider, &CliJsrUrlProvider,
Some(graph_resolver.as_ref()), Some(&resolver),
Some(&npm_graph_resolver), Some(&npm_graph_resolver),
); );
(referrer, graph_import) (referrer, graph_import)
@ -133,33 +165,81 @@ impl LspScopeResolver {
) )
}) })
.unwrap_or_default(); .unwrap_or_default();
let package_json_deps_by_resolution = (|| {
let npm_pkg_req_resolver = npm_pkg_req_resolver.as_ref()?;
let package_json = config_data?.maybe_pkg_json()?;
let referrer = package_json.specifier();
let dependencies = package_json.dependencies.as_ref()?;
let result = dependencies
.iter()
.flat_map(|(name, _)| {
let req_ref =
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
let specifier = into_specifier_and_media_type(Some(
npm_pkg_req_resolver
.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
NodeResolutionKind::Types,
)
.or_else(|_| {
npm_pkg_req_resolver.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
})
.ok()?,
))
.0;
Some((specifier, name.clone()))
})
.collect();
Some(result)
})();
let package_json_deps_by_resolution =
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
Self { Self {
graph_resolver, resolver: cli_resolver,
in_npm_pkg_checker,
jsr_resolver, jsr_resolver,
npm_pkg_req_resolver,
npm_resolver, npm_resolver,
node_resolver, node_resolver,
pkg_json_resolver,
redirect_resolver, redirect_resolver,
graph_imports, graph_imports,
dep_info: Default::default(),
package_json_deps_by_resolution,
config_data: config_data.cloned(), config_data: config_data.cloned(),
} }
} }
fn snapshot(&self) -> Arc<Self> { fn snapshot(&self) -> Arc<Self> {
let mut factory = ResolverFactory::new(self.config_data.as_ref());
let npm_resolver = let npm_resolver =
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
let node_resolver = create_node_resolver(npm_resolver.as_ref()); if let Some(npm_resolver) = &npm_resolver {
let graph_resolver = create_graph_resolver( factory.set_npm_resolver(npm_resolver.clone());
self.config_data.as_deref(), }
npm_resolver.as_ref(),
node_resolver.as_ref(),
);
Arc::new(Self { Arc::new(Self {
graph_resolver, resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
jsr_resolver: self.jsr_resolver.clone(), jsr_resolver: self.jsr_resolver.clone(),
npm_resolver, npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
node_resolver, npm_resolver: factory.npm_resolver().cloned(),
node_resolver: factory.node_resolver().cloned(),
redirect_resolver: self.redirect_resolver.clone(), redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: factory.pkg_json_resolver().clone(),
graph_imports: self.graph_imports.clone(), graph_imports: self.graph_imports.clone(),
dep_info: self.dep_info.clone(),
package_json_deps_by_resolution: self
.package_json_deps_by_resolution
.clone(),
config_data: self.config_data.clone(), config_data: self.config_data.clone(),
}) })
} }
@ -223,19 +303,24 @@ impl LspResolver {
} }
} }
pub async fn set_npm_reqs( pub async fn set_dep_info_by_scope(
&self, &self,
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>, dep_info_by_scope: &Arc<
BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>,
>,
) { ) {
for (scope, resolver) in [(None, &self.unscoped)] for (scope, resolver) in [(None, &self.unscoped)]
.into_iter() .into_iter()
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r))) .chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
{ {
let dep_info = dep_info_by_scope.get(&scope.cloned());
if let Some(dep_info) = dep_info {
*resolver.dep_info.lock() = dep_info.clone();
}
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() { if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
if let Some(npm_resolver) = npm_resolver.as_managed() { if let Some(npm_resolver) = npm_resolver.as_managed() {
let reqs = reqs let reqs = dep_info
.get(&scope.cloned()) .map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default(); .unwrap_or_default();
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await { if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
lsp_warn!("Could not set npm package requirements: {:#}", err); lsp_warn!("Could not set npm package requirements: {:#}", err);
@ -245,12 +330,12 @@ impl LspResolver {
} }
} }
pub fn as_graph_resolver( pub fn as_cli_resolver(
&self, &self,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> &dyn Resolver { ) -> &CliResolver {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.as_ref() resolver.resolver.as_ref()
} }
pub fn create_graph_npm_resolver( pub fn create_graph_npm_resolver(
@ -258,7 +343,23 @@ impl LspResolver {
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver { ) -> WorkerCliNpmGraphResolver {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.create_graph_npm_resolver() resolver.resolver.create_graph_npm_resolver()
}
pub fn as_config_data(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&Arc<ConfigData>> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.config_data.as_ref()
}
pub fn in_npm_pkg_checker(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &Arc<dyn InNpmPackageChecker> {
let resolver = self.get_scope_resolver(file_referrer);
&resolver.in_npm_pkg_checker
} }
pub fn maybe_managed_npm_resolver( pub fn maybe_managed_npm_resolver(
@ -324,17 +425,48 @@ impl LspResolver {
&self, &self,
req_ref: &NpmPackageReqReference, req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?; let npm_pkg_req_resolver = resolver.npm_pkg_req_resolver.as_ref()?;
Some(NodeResolution::into_specifier_and_media_type(Some( Some(into_specifier_and_media_type(Some(
node_resolver npm_pkg_req_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .resolve_req_reference(
req_ref,
referrer,
resolution_mode,
NodeResolutionKind::Types,
)
.ok()?, .ok()?,
))) )))
} }
pub fn file_url_to_package_json_dep(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<String> {
let resolver = self.get_scope_resolver(file_referrer);
resolver
.package_json_deps_by_resolution
.get(specifier)
.cloned()
}
pub fn deno_types_to_code_resolution(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<ModuleSpecifier> {
let resolver = self.get_scope_resolver(file_referrer);
let dep_info = resolver.dep_info.lock().clone();
dep_info
.deno_types_to_code_resolutions
.get(specifier)
.cloned()
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool { fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules // consider any /node_modules/ directory as being in the node_modules
@ -346,14 +478,10 @@ impl LspResolver {
.contains("/node_modules/") .contains("/node_modules/")
} }
let global_npm_resolver = self if let Some(node_resolver) =
.get_scope_resolver(Some(specifier)) &self.get_scope_resolver(Some(specifier)).node_resolver
.npm_resolver {
.as_ref() if node_resolver.in_npm_package(specifier) {
.and_then(|npm_resolver| npm_resolver.as_managed())
.filter(|r| r.root_node_modules_path().is_none());
if let Some(npm_resolver) = &global_npm_resolver {
if npm_resolver.in_npm_package(specifier) {
return true; return true;
} }
} }
@ -361,32 +489,23 @@ impl LspResolver {
has_node_modules_dir(specifier) has_node_modules_dir(specifier)
} }
pub fn node_media_type(
&self,
specifier: &ModuleSpecifier,
) -> Option<MediaType> {
let resolver = self.get_scope_resolver(Some(specifier));
let node_resolver = resolver.node_resolver.as_ref()?;
let resolution = node_resolver
.url_to_node_resolution(specifier.clone())
.ok()?;
Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1)
}
pub fn is_bare_package_json_dep( pub fn is_bare_package_json_dep(
&self, &self,
specifier_text: &str, specifier_text: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
) -> bool { ) -> bool {
let resolver = self.get_scope_resolver(Some(referrer)); let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else { let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
else {
return false; return false;
}; };
node_resolver npm_pkg_req_resolver
.resolve_if_for_npm_pkg( .resolve_if_for_npm_pkg(
specifier_text, specifier_text,
referrer, referrer,
NodeResolutionMode::Types, resolution_mode,
NodeResolutionKind::Types,
) )
.ok() .ok()
.flatten() .flatten()
@ -398,10 +517,9 @@ impl LspResolver {
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> { ) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
let resolver = self.get_scope_resolver(Some(referrer)); let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else { resolver
return Ok(None); .pkg_json_resolver
}; .get_closest_package_json(referrer)
node_resolver.get_closest_package_json(referrer)
} }
pub fn resolve_redirects( pub fn resolve_redirects(
@ -453,113 +571,213 @@ impl LspResolver {
} }
} }
async fn create_npm_resolver( #[derive(Debug, Default, Clone)]
config_data: Option<&ConfigData>, pub struct ScopeDepInfo {
cache: &LspCache, pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
http_client_provider: &Arc<HttpClientProvider>, pub npm_reqs: BTreeSet<PackageReq>,
) -> Option<Arc<dyn CliNpmResolver>> { pub has_node_specifier: bool,
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); }
let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { #[derive(Default)]
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), struct ResolverFactoryServices {
root_node_modules_dir: config_data.and_then(|config_data| { cli_resolver: Deferred<Arc<CliResolver>>,
config_data.node_modules_dir.clone().or_else(|| { in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
url_to_file_path(&config_data.scope) node_resolver: Deferred<Option<Arc<NodeResolver>>>,
.ok() npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
.map(|p| p.join("node_modules/")) npm_resolver: Option<Arc<dyn CliNpmResolver>>,
}) }
}),
}) struct ResolverFactory<'a> {
} else { config_data: Option<&'a Arc<ConfigData>>,
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { fs: Arc<dyn deno_fs::FileSystem>,
http_client_provider: http_client_provider.clone(), pkg_json_resolver: Arc<PackageJsonResolver>,
snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { services: ResolverFactoryServices,
Some(lockfile) => { }
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(), impl<'a> ResolverFactory<'a> {
) pub fn new(config_data: Option<&'a Arc<ConfigData>>) -> Self {
} let fs = Arc::new(deno_fs::RealFs);
None => CliNpmResolverManagedSnapshotOption::Specified(None), let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
}, deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
// Don't provide the lockfile. We don't want these resolvers ));
// updating it. Only the cache request should update the lockfile. Self {
maybe_lockfile: None, config_data,
fs: Arc::new(deno_fs::RealFs), fs,
npm_global_cache_dir: cache.deno_dir().npm_folder_path(), pkg_json_resolver,
// Use an "only" cache setting in order to make the services: Default::default(),
// user do an explicit "cache" command and prevent }
// the cache from being filled with lots of packages while }
// the user is typing.
cache_setting: CacheSetting::Only, async fn init_npm_resolver(
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), &mut self,
maybe_node_modules_path: config_data http_client_provider: &Arc<HttpClientProvider>,
.and_then(|d| d.node_modules_dir.clone()), cache: &LspCache,
// only used for top level install, so we can ignore this ) {
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false);
npmrc: config_data let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
pkg_json_resolver: self.pkg_json_resolver.clone(),
root_node_modules_dir: self.config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| {
url_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})
}),
})
} else {
let npmrc = self
.config_data
.and_then(|d| d.npmrc.clone()) .and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc), .unwrap_or_else(create_default_npmrc);
npm_system_info: NpmSystemInfo::default(), let npm_cache_dir = Arc::new(NpmCacheDir::new(
lifecycle_scripts: Default::default(), &DenoCacheEnvFsAdapter(self.fs.as_ref()),
cache.deno_dir().npm_folder_path(),
npmrc.get_all_known_registries_urls(),
));
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
http_client_provider: http_client_provider.clone(),
snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
},
// Don't provide the lockfile. We don't want these resolvers
// updating it. Only the cache request should update the lockfile.
maybe_lockfile: None,
fs: Arc::new(deno_fs::RealFs),
npm_cache_dir,
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
// the user is typing.
cache_setting: CacheSetting::Only,
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
maybe_node_modules_path: self
.config_data
.and_then(|d| d.node_modules_dir.clone()),
// only used for top level install, so we can ignore this
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
npmrc,
npm_system_info: NpmSystemInfo::default(),
lifecycle_scripts: Default::default(),
})
};
self.set_npm_resolver(create_cli_npm_resolver_for_lsp(options).await);
}
pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) {
self.services.npm_resolver = Some(npm_resolver);
}
pub fn npm_resolver(&self) -> Option<&Arc<dyn CliNpmResolver>> {
self.services.npm_resolver.as_ref()
}
pub fn cli_resolver(&self) -> &Arc<CliResolver> {
self.services.cli_resolver.get_or_init(|| {
let npm_req_resolver = self.npm_pkg_req_resolver().cloned();
let deno_resolver = Arc::new(CliDenoResolver::new(DenoResolverOptions {
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
node_and_req_resolver: match (self.node_resolver(), npm_req_resolver) {
(Some(node_resolver), Some(npm_req_resolver)) => {
Some(NodeAndNpmReqResolver {
node_resolver: node_resolver.clone(),
npm_req_resolver,
})
}
_ => None,
},
sloppy_imports_resolver: self
.config_data
.and_then(|d| d.sloppy_imports_resolver.clone()),
workspace_resolver: self
.config_data
.map(|d| d.resolver.clone())
.unwrap_or_else(|| {
Arc::new(WorkspaceResolver::new_raw(
// this is fine because this is only used before initialization
Arc::new(ModuleSpecifier::parse("file:///").unwrap()),
None,
Vec::new(),
Vec::new(),
PackageJsonDepResolution::Disabled,
))
}),
is_byonm: self.config_data.map(|d| d.byonm).unwrap_or(false),
maybe_vendor_dir: self.config_data.and_then(|d| d.vendor_dir.as_ref()),
}));
Arc::new(CliResolver::new(CliResolverOptions {
deno_resolver,
npm_resolver: self.npm_resolver().cloned(),
bare_node_builtins_enabled: self
.config_data
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
}))
}) })
}; }
Some(create_cli_npm_resolver_for_lsp(options).await)
}
fn create_node_resolver( pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
npm_resolver: Option<&Arc<dyn CliNpmResolver>>, &self.pkg_json_resolver
) -> Option<Arc<CliNodeResolver>> { }
use once_cell::sync::Lazy;
// it's not ideal to share this across all scopes and to pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> {
// never clear it, but it's fine for the time being self.services.in_npm_pkg_checker.get_or_init(|| {
static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> = crate::npm::create_in_npm_pkg_checker(
Lazy::new(Default::default); match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) {
Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => {
CreateInNpmPkgCheckerOptions::Byonm
}
Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.maybe_node_modules_path(),
},
)
}
},
)
})
}
let npm_resolver = npm_resolver?; pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
let fs = Arc::new(deno_fs::RealFs); self
let node_resolver_inner = Arc::new(NodeResolver::new( .services
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), .node_resolver
npm_resolver.clone().into_npm_resolver(), .get_or_init(|| {
)); let npm_resolver = self.services.npm_resolver.as_ref()?;
Some(Arc::new(CliNodeResolver::new( Some(Arc::new(NodeResolver::new(
CJS_RESOLUTIONS.clone(), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()),
fs, self.in_npm_pkg_checker().clone(),
node_resolver_inner, npm_resolver.clone().into_npm_pkg_folder_resolver(),
npm_resolver.clone(), self.pkg_json_resolver.clone(),
))) )))
} })
.as_ref()
}
fn create_graph_resolver( pub fn npm_pkg_req_resolver(&self) -> Option<&Arc<CliNpmReqResolver>> {
config_data: Option<&ConfigData>, self
npm_resolver: Option<&Arc<dyn CliNpmResolver>>, .services
node_resolver: Option<&Arc<CliNodeResolver>>, .npm_pkg_req_resolver
) -> Arc<CliGraphResolver> { .get_or_init(|| {
let workspace = config_data.map(|d| &d.member_dir.workspace); let node_resolver = self.node_resolver()?;
Arc::new(CliGraphResolver::new(CliGraphResolverOptions { let npm_resolver = self.npm_resolver()?;
node_resolver: node_resolver.cloned(), Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
npm_resolver: npm_resolver.cloned(), byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else( fs: CliDenoResolverFs(self.fs.clone()),
|| { in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
Arc::new(WorkspaceResolver::new_raw( node_resolver: node_resolver.clone(),
// this is fine because this is only used before initialization npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
Arc::new(ModuleSpecifier::parse("file:///").unwrap()), })))
None, })
Vec::new(), .as_ref()
Vec::new(), }
PackageJsonDepResolution::Disabled,
))
},
),
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
workspace.to_maybe_jsx_import_source_config().ok().flatten()
}),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: workspace
.is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")),
sloppy_imports_resolver: config_data
.and_then(|d| d.sloppy_imports_resolver.clone()),
}))
} }
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
@ -586,6 +804,134 @@ impl std::fmt::Debug for RedirectResolver {
} }
} }
#[derive(Debug)]
pub struct LspIsCjsResolver {
inner: IsCjsResolver,
}
impl Default for LspIsCjsResolver {
fn default() -> Self {
LspIsCjsResolver::new(&Default::default())
}
}
impl LspIsCjsResolver {
pub fn new(cache: &LspCache) -> Self {
#[derive(Debug)]
struct LspInNpmPackageChecker {
global_cache_dir: ModuleSpecifier,
}
impl LspInNpmPackageChecker {
pub fn new(cache: &LspCache) -> Self {
let npm_folder_path = cache.deno_dir().npm_folder_path();
Self {
global_cache_dir: url_from_directory_path(
&canonicalize_path_maybe_not_exists(&npm_folder_path)
.unwrap_or(npm_folder_path),
)
.unwrap_or_else(|_| {
ModuleSpecifier::parse("file:///invalid/").unwrap()
}),
}
}
}
impl InNpmPackageChecker for LspInNpmPackageChecker {
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
if specifier
.as_str()
.starts_with(self.global_cache_dir.as_str())
{
return true;
}
specifier.as_str().contains("/node_modules/")
}
}
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
LspIsCjsResolver {
inner: IsCjsResolver::new(
Arc::new(LspInNpmPackageChecker::new(cache)),
pkg_json_resolver,
crate::resolver::IsCjsResolverOptions {
detect_cjs: true,
is_node_main: false,
},
),
}
}
pub fn get_doc_resolution_mode(&self, document: &Document) -> ResolutionMode {
self.get_lsp_resolution_mode(document.specifier(), document.is_script())
}
pub fn get_lsp_resolution_mode(
&self,
specifier: &ModuleSpecifier,
is_script: Option<bool>,
) -> ResolutionMode {
self.inner.get_lsp_resolution_mode(specifier, is_script)
}
}
#[derive(Debug)]
pub struct SingleReferrerGraphResolver<'a> {
pub valid_referrer: &'a ModuleSpecifier,
pub module_resolution_mode: ResolutionMode,
pub cli_resolver: &'a CliResolver,
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
resolution_kind: deno_graph::source::ResolutionKind,
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
// this resolver assumes it will only be used with a single referrer
// with the provided referrer kind
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
self.cli_resolver.resolve(
specifier_text,
&referrer_range.specifier,
referrer_range.range.start,
referrer_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or(self.module_resolution_mode),
to_node_resolution_kind(resolution_kind),
)
}
}
impl RedirectResolver { impl RedirectResolver {
fn new( fn new(
cache: Arc<dyn HttpCache>, cache: Arc<dyn HttpCache>,

View file

@ -650,7 +650,7 @@ pub mod tests {
.unwrap(); .unwrap();
let text_info = parsed_module.text_info_lazy().clone(); let text_info = parsed_module.text_info_lazy().clone();
let mut collector = TestCollector::new(specifier, text_info); let mut collector = TestCollector::new(specifier, text_info);
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
collector.take() collector.take()
} }

View file

@ -34,6 +34,7 @@ use crate::util::path::relative_specifier;
use crate::util::path::to_percent_decoded_str; use crate::util::path::to_percent_decoded_str;
use crate::util::result::InfallibleResultExt; use crate::util::result::InfallibleResultExt;
use crate::util::v8::convert; use crate::util::v8::convert;
use crate::worker::create_isolate_create_params;
use deno_core::convert::Smi; use deno_core::convert::Smi;
use deno_core::convert::ToV8; use deno_core::convert::ToV8;
use deno_core::error::StdAnyError; use deno_core::error::StdAnyError;
@ -69,6 +70,7 @@ use indexmap::IndexMap;
use indexmap::IndexSet; use indexmap::IndexSet;
use lazy_regex::lazy_regex; use lazy_regex::lazy_regex;
use log::error; use log::error;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -236,7 +238,7 @@ pub struct TsServer {
performance: Arc<Performance>, performance: Arc<Performance>,
sender: mpsc::UnboundedSender<Request>, sender: mpsc::UnboundedSender<Request>,
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>, receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
specifier_map: Arc<TscSpecifierMap>, pub specifier_map: Arc<TscSpecifierMap>,
inspector_server: Mutex<Option<Arc<InspectorServer>>>, inspector_server: Mutex<Option<Arc<InspectorServer>>>,
pending_change: Mutex<Option<PendingChange>>, pending_change: Mutex<Option<PendingChange>>,
} }
@ -882,20 +884,22 @@ impl TsServer {
options: GetCompletionsAtPositionOptions, options: GetCompletionsAtPositionOptions,
format_code_settings: FormatCodeSettings, format_code_settings: FormatCodeSettings,
scope: Option<ModuleSpecifier>, scope: Option<ModuleSpecifier>,
) -> Option<CompletionInfo> { ) -> Result<Option<CompletionInfo>, AnyError> {
let req = TscRequest::GetCompletionsAtPosition(Box::new(( let req = TscRequest::GetCompletionsAtPosition(Box::new((
self.specifier_map.denormalize(&specifier), self.specifier_map.denormalize(&specifier),
position, position,
options, options,
format_code_settings, format_code_settings,
))); )));
match self.request(snapshot, req, scope).await { self
Ok(maybe_info) => maybe_info, .request::<Option<CompletionInfo>>(snapshot, req, scope)
Err(err) => { .await
log::error!("Unable to get completion info from TypeScript: {:#}", err); .map(|mut info| {
None if let Some(info) = &mut info {
} info.normalize(&self.specifier_map);
} }
info
})
} }
pub async fn get_completion_details( pub async fn get_completion_details(
@ -3413,9 +3417,18 @@ fn parse_code_actions(
additional_text_edits.extend(change.text_changes.iter().map(|tc| { additional_text_edits.extend(change.text_changes.iter().map(|tc| {
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
text_edit.new_text = text_edit text_edit.new_text = text_edit.new_text.replace(
.new_text &specifier_rewrite.old_specifier,
.replace(&specifier_rewrite.0, &specifier_rewrite.1); &specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
text_edit.new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, &text_edit.new_text
);
}
} }
text_edit text_edit
})); }));
@ -3574,17 +3587,23 @@ impl CompletionEntryDetails {
let mut text_edit = original_item.text_edit.clone(); let mut text_edit = original_item.text_edit.clone();
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
if let Some(text_edit) = &mut text_edit { if let Some(text_edit) = &mut text_edit {
match text_edit { let new_text = match text_edit {
lsp::CompletionTextEdit::Edit(text_edit) => { lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text,
text_edit.new_text = text_edit
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
}
lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => { lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => {
insert_replace_edit.new_text = insert_replace_edit &mut insert_replace_edit.new_text
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
} }
};
*new_text = new_text.replace(
&specifier_rewrite.old_specifier,
&specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
*new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, new_text
);
} }
} }
} }
@ -3642,6 +3661,12 @@ pub struct CompletionInfo {
} }
impl CompletionInfo { impl CompletionInfo {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
for entry in &mut self.entries {
entry.normalize(specifier_map);
}
}
pub fn as_completion_response( pub fn as_completion_response(
&self, &self,
line_index: Arc<LineIndex>, line_index: Arc<LineIndex>,
@ -3683,6 +3708,13 @@ impl CompletionInfo {
} }
} }
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CompletionSpecifierRewrite {
old_specifier: String,
new_specifier: String,
new_deno_types_specifier: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionItemData { pub struct CompletionItemData {
@ -3695,7 +3727,7 @@ pub struct CompletionItemData {
/// be rewritten by replacing the first string with the second. Intended for /// be rewritten by replacing the first string with the second. Intended for
/// auto-import specifiers to be reverse-import-mapped. /// auto-import specifiers to be reverse-import-mapped.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub specifier_rewrite: Option<(String, String)>, pub specifier_rewrite: Option<CompletionSpecifierRewrite>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>, pub data: Option<Value>,
pub use_code_snippet: bool, pub use_code_snippet: bool,
@ -3703,11 +3735,17 @@ pub struct CompletionItemData {
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct CompletionEntryDataImport { struct CompletionEntryDataAutoImport {
module_specifier: String, module_specifier: String,
file_name: String, file_name: String,
} }
#[derive(Debug)]
pub struct CompletionNormalizedAutoImportData {
raw: CompletionEntryDataAutoImport,
normalized: ModuleSpecifier,
}
#[derive(Debug, Default, Deserialize, Serialize)] #[derive(Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionEntry { pub struct CompletionEntry {
@ -3740,9 +3778,28 @@ pub struct CompletionEntry {
is_import_statement_completion: Option<bool>, is_import_statement_completion: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
data: Option<Value>, data: Option<Value>,
/// This is not from tsc, we add it for convenience during normalization.
/// Represents `self.data.file_name`, but normalized.
#[serde(skip)]
auto_import_data: Option<CompletionNormalizedAutoImportData>,
} }
impl CompletionEntry { impl CompletionEntry {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
let Some(data) = &self.data else {
return;
};
let Ok(raw) =
serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone())
else {
return;
};
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
}
fn get_commit_characters( fn get_commit_characters(
&self, &self,
info: &CompletionInfo, info: &CompletionInfo,
@ -3891,25 +3948,44 @@ impl CompletionEntry {
if let Some(source) = &self.source { if let Some(source) = &self.source {
let mut display_source = source.clone(); let mut display_source = source.clone();
if let Some(data) = &self.data { if let Some(import_data) = &self.auto_import_data {
if let Ok(import_data) = let import_mapper =
serde_json::from_value::<CompletionEntryDataImport>(data.clone()) language_server.get_ts_response_import_mapper(specifier);
if let Some(mut new_specifier) = import_mapper
.check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_data.normalized))
{ {
if let Ok(import_specifier) = resolve_url(&import_data.file_name) { if new_specifier.contains("/node_modules/") {
if let Some(new_module_specifier) = language_server return None;
.get_ts_response_import_mapper(specifier)
.check_specifier(&import_specifier, specifier)
.or_else(|| relative_specifier(specifier, &import_specifier))
{
display_source.clone_from(&new_module_specifier);
if new_module_specifier != import_data.module_specifier {
specifier_rewrite =
Some((import_data.module_specifier, new_module_specifier));
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
}
} }
let mut new_deno_types_specifier = None;
if let Some(code_specifier) = language_server
.resolver
.deno_types_to_code_resolution(
&import_data.normalized,
Some(specifier),
)
.and_then(|s| {
import_mapper
.check_specifier(&s, specifier)
.or_else(|| relative_specifier(specifier, &s))
})
{
new_deno_types_specifier =
Some(std::mem::replace(&mut new_specifier, code_specifier));
}
display_source.clone_from(&new_specifier);
if new_specifier != import_data.raw.module_specifier
|| new_deno_types_specifier.is_some()
{
specifier_rewrite = Some(CompletionSpecifierRewrite {
old_specifier: import_data.raw.module_specifier.clone(),
new_specifier,
new_deno_types_specifier,
});
}
} else if source.starts_with(jsr_url().as_str()) {
return None;
} }
} }
// We want relative or bare (import-mapped or otherwise) specifiers to // We want relative or bare (import-mapped or otherwise) specifiers to
@ -4212,6 +4288,11 @@ impl TscSpecifierMap {
return specifier.to_string(); return specifier.to_string();
} }
let mut specifier = original.to_string(); let mut specifier = original.to_string();
if !specifier.contains("/node_modules/@types/node/") {
// The ts server doesn't give completions from files in
// `node_modules/.deno/`. We work around it like this.
specifier = specifier.replace("/node_modules/", "/$node_modules/");
}
let media_type = MediaType::from_specifier(original); let media_type = MediaType::from_specifier(original);
// If the URL-inferred media type doesn't correspond to tsc's path-inferred // If the URL-inferred media type doesn't correspond to tsc's path-inferred
// media type, force it to be the same by appending an extension. // media type, force it to be the same by appending an extension.
@ -4329,7 +4410,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool {
fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
let state = state.borrow::<State>(); let state = state.borrow::<State>();
let mark = state.performance.mark("tsc.op.op_is_node_file"); let mark = state.performance.mark("tsc.op.op_is_node_file");
let r = match ModuleSpecifier::parse(&path) { let r = match state.specifier_map.normalize(path) {
Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier),
Err(_) => false, Err(_) => false,
}; };
@ -4366,15 +4447,19 @@ fn op_load<'s>(
data: doc.text(), data: doc.text(),
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
version: state.script_version(&specifier), version: state.script_version(&specifier),
is_cjs: matches!( is_cjs: doc
doc.media_type(), .document()
MediaType::Cjs | MediaType::Cts | MediaType::Dcts .map(|d| {
), state
.state_snapshot
.is_cjs_resolver
.get_doc_resolution_mode(d)
})
.unwrap_or(ResolutionMode::Import)
== ResolutionMode::Require,
}) })
}; };
let serialized = serde_v8::to_v8(scope, maybe_load_response)?; let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
state.performance.measure(mark); state.performance.measure(mark);
Ok(serialized) Ok(serialized)
} }
@ -4399,17 +4484,9 @@ fn op_release(
fn op_resolve( fn op_resolve(
state: &mut OpState, state: &mut OpState,
#[string] base: String, #[string] base: String,
is_base_cjs: bool, #[serde] specifiers: Vec<(bool, String)>,
#[serde] specifiers: Vec<String>,
) -> Result<Vec<Option<(String, String)>>, AnyError> { ) -> Result<Vec<Option<(String, String)>>, AnyError> {
op_resolve_inner( op_resolve_inner(state, ResolveArgs { base, specifiers })
state,
ResolveArgs {
base,
is_base_cjs,
specifiers,
},
)
} }
struct TscRequestArray { struct TscRequestArray {
@ -4598,7 +4675,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
for doc in &docs { for doc in &docs {
let specifier = doc.specifier(); let specifier = doc.specifier();
let is_open = doc.is_open(); let is_open = doc.is_open();
if is_open || specifier.scheme() == "file" { if is_open
|| (specifier.scheme() == "file"
&& !state.state_snapshot.resolver.in_node_modules(specifier))
{
let script_names = doc let script_names = doc
.scope() .scope()
.and_then(|s| result.by_scope.get_mut(s)) .and_then(|s| result.by_scope.get_mut(s))
@ -4609,6 +4689,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
let (types, _) = documents.resolve_dependency( let (types, _) = documents.resolve_dependency(
types, types,
specifier, specifier,
state
.state_snapshot
.is_cjs_resolver
.get_doc_resolution_mode(doc),
doc.file_referrer(), doc.file_referrer(),
)?; )?;
let types_doc = documents.get_or_load(&types, doc.file_referrer())?; let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
@ -4712,6 +4796,7 @@ fn run_tsc_thread(
specifier_map, specifier_map,
request_rx, request_rx,
)], )],
create_params: create_isolate_create_params(),
startup_snapshot: Some(tsc::compiler_snapshot()), startup_snapshot: Some(tsc::compiler_snapshot()),
inspector: has_inspector_server, inspector: has_inspector_server,
..Default::default() ..Default::default()
@ -5491,6 +5576,7 @@ mod tests {
documents: Arc::new(documents), documents: Arc::new(documents),
assets: Default::default(), assets: Default::default(),
config: Arc::new(config), config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver, resolver,
}); });
let performance = Arc::new(Performance::default()); let performance = Arc::new(Performance::default());
@ -5516,7 +5602,7 @@ mod tests {
let (_tx, rx) = mpsc::unbounded_channel(); let (_tx, rx) = mpsc::unbounded_channel();
let state = let state =
State::new(state_snapshot, Default::default(), Default::default(), rx); State::new(state_snapshot, Default::default(), Default::default(), rx);
let mut op_state = OpState::new(None); let mut op_state = OpState::new(None, None);
op_state.put(state); op_state.put(state);
op_state op_state
} }
@ -6024,6 +6110,7 @@ mod tests {
Some(temp_dir.url()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
assert_eq!(info.entries.len(), 22); assert_eq!(info.entries.len(), 22);
let details = ts_server let details = ts_server
@ -6183,6 +6270,7 @@ mod tests {
Some(temp_dir.url()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
let entry = info let entry = info
.entries .entries
@ -6339,8 +6427,7 @@ mod tests {
&mut state, &mut state,
ResolveArgs { ResolveArgs {
base: temp_dir.url().join("a.ts").unwrap().to_string(), base: temp_dir.url().join("a.ts").unwrap().to_string(),
is_base_cjs: false, specifiers: vec![(false, "./b.ts".to_string())],
specifiers: vec!["./b.ts".to_string()],
}, },
) )
.unwrap(); .unwrap();

View file

@ -37,6 +37,7 @@ use crate::util::v8::init_v8_flags;
use args::TaskFlags; use args::TaskFlags;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
@ -50,7 +51,6 @@ use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_terminal::colors; use deno_terminal::colors;
use factory::CliFactory; use factory::CliFactory;
use npm::ResolvePkgFolderFromDenoReqError;
use standalone::MODULE_NOT_FOUND; use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME; use standalone::UNSUPPORTED_SCHEME;
use std::env; use std::env;
@ -135,7 +135,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::compile::compile(flags, compile_flags).await tools::compile::compile(flags, compile_flags).await
}), }),
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
tools::coverage::cover_files(flags, coverage_flags).await tools::coverage::cover_files(flags, coverage_flags)
}), }),
DenoSubcommand::Fmt(fmt_flags) => { DenoSubcommand::Fmt(fmt_flags) => {
spawn_subcommand( spawn_subcommand(
@ -144,9 +144,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
} }
DenoSubcommand::Init(init_flags) => { DenoSubcommand::Init(init_flags) => {
spawn_subcommand(async { spawn_subcommand(async {
// make compiler happy since init_project is sync tools::init::init_project(init_flags).await
tokio::task::yield_now().await;
tools::init::init_project(init_flags)
}) })
} }
DenoSubcommand::Info(info_flags) => { DenoSubcommand::Info(info_flags) => {
@ -188,6 +186,11 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::lint::lint(flags, lint_flags).await tools::lint::lint(flags, lint_flags).await
} }
}), }),
DenoSubcommand::Outdated(update_flags) => {
spawn_subcommand(async move {
tools::registry::outdated(flags, update_flags).await
})
}
DenoSubcommand::Repl(repl_flags) => { DenoSubcommand::Repl(repl_flags) => {
spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await }) spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await })
} }
@ -238,6 +241,9 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
cwd: None, cwd: None,
task: Some(run_flags.script.clone()), task: Some(run_flags.script.clone()),
is_run: true, is_run: true,
recursive: false,
filter: None,
eval: false,
}; };
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone()); new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await; let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await;
@ -350,18 +356,17 @@ fn setup_panic_hook() {
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>()); eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
eprintln!(); eprintln!();
orig_hook(panic_info); orig_hook(panic_info);
std::process::exit(1); deno_runtime::exit(1);
})); }));
} }
#[allow(clippy::print_stderr)]
fn exit_with_message(message: &str, code: i32) -> ! { fn exit_with_message(message: &str, code: i32) -> ! {
eprintln!( log::error!(
"{}: {}", "{}: {}",
colors::red_bold("error"), colors::red_bold("error"),
message.trim_start_matches("error: ") message.trim_start_matches("error: ")
); );
std::process::exit(code); deno_runtime::exit(code);
} }
fn exit_for_error(error: AnyError) -> ! { fn exit_for_error(error: AnyError) -> ! {
@ -380,13 +385,12 @@ fn exit_for_error(error: AnyError) -> ! {
exit_with_message(&error_string, error_code); exit_with_message(&error_string, error_code);
} }
#[allow(clippy::print_stderr)]
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
eprintln!( log::error!(
"Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.",
feature feature
); );
std::process::exit(70); deno_runtime::exit(70);
} }
pub fn main() { pub fn main() {
@ -419,7 +423,7 @@ pub fn main() {
drop(profiler); drop(profiler);
match result { match result {
Ok(exit_code) => std::process::exit(exit_code), Ok(exit_code) => deno_runtime::exit(exit_code),
Err(err) => exit_for_error(err), Err(err) => exit_for_error(err),
} }
} }
@ -433,12 +437,21 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayVersion => if err.kind() == clap::error::ErrorKind::DisplayVersion =>
{ {
// Ignore results to avoid BrokenPipe errors. // Ignore results to avoid BrokenPipe errors.
util::logger::init(None);
let _ = err.print(); let _ = err.print();
std::process::exit(0); deno_runtime::exit(0);
}
Err(err) => {
util::logger::init(None);
exit_for_error(AnyError::from(err))
} }
Err(err) => exit_for_error(AnyError::from(err)),
}; };
if let Some(otel_config) = flags.otel_config() {
deno_telemetry::init(otel_config)?;
}
util::logger::init(flags.log_level);
// TODO(bartlomieju): remove in Deno v2.5 and hard error then. // TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled { if flags.unstable_config.legacy_flag_enabled {
log::warn!( log::warn!(
@ -467,7 +480,6 @@ fn resolve_flags_and_init(
deno_core::JsRuntime::init_platform( deno_core::JsRuntime::init_platform(
None, /* import assertions enabled */ false, None, /* import assertions enabled */ false,
); );
util::logger::init(flags.log_level);
Ok(flags) Ok(flags)
} }

View file

@ -40,23 +40,21 @@ use std::env::current_exe;
use crate::args::Flags; use crate::args::Flags;
#[allow(clippy::print_stderr)]
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
eprintln!( log::error!(
"Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.",
feature feature
); );
std::process::exit(70); deno_runtime::exit(70);
} }
#[allow(clippy::print_stderr)]
fn exit_with_message(message: &str, code: i32) -> ! { fn exit_with_message(message: &str, code: i32) -> ! {
eprintln!( log::error!(
"{}: {}", "{}: {}",
colors::red_bold("error"), colors::red_bold("error"),
message.trim_start_matches("error: ") message.trim_start_matches("error: ")
); );
std::process::exit(code); deno_runtime::exit(code);
} }
fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T { fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
@ -89,13 +87,19 @@ fn main() {
let future = async move { let future = async move {
match standalone { match standalone {
Ok(Some(data)) => { Ok(Some(data)) => {
if let Some(otel_config) = data.metadata.otel_config.clone() {
deno_telemetry::init(otel_config)?;
}
util::logger::init(data.metadata.log_level); util::logger::init(data.metadata.log_level);
load_env_vars(&data.metadata.env_vars_from_env_file); load_env_vars(&data.metadata.env_vars_from_env_file);
let exit_code = standalone::run(data).await?; let exit_code = standalone::run(data).await?;
std::process::exit(exit_code); deno_runtime::exit(exit_code);
} }
Ok(None) => Ok(()), Ok(None) => Ok(()),
Err(err) => Err(err), Err(err) => {
util::logger::init(None);
Err(err)
}
} }
}; };

View file

@ -2,6 +2,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
@ -23,19 +24,23 @@ use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions; use crate::graph_util::CreateGraphOptions;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::node; use crate::node;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CjsTracker;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::ModuleCodeStringSource; use crate::resolver::ModuleCodeStringSource;
use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code; use crate::util::text_encoding::source_map_from_code;
use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory; use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -52,21 +57,25 @@ use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::ModuleType; use deno_core::ModuleType;
use deno_core::RequestedModuleType; use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo; use deno_core::SourceCodeCacheInfo;
use deno_graph::source::ResolutionMode;
use deno_graph::source::Resolver;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::JsModule; use deno_graph::JsModule;
use deno_graph::JsonModule; use deno_graph::JsonModule;
use deno_graph::Module; use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_graph::WasmModule;
use deno_runtime::code_cache; use deno_runtime::code_cache;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionMode; use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
pub struct ModuleLoadPreparer { pub struct ModuleLoadPreparer {
options: Arc<CliOptions>, options: Arc<CliOptions>,
@ -199,15 +208,20 @@ struct SharedCliModuleLoaderState {
initial_cwd: PathBuf, initial_cwd: PathBuf,
is_inspecting: bool, is_inspecting: bool,
is_repl: bool, is_repl: bool,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>, code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<NodeResolver>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliResolver>,
} }
pub struct CliModuleLoaderFactory { pub struct CliModuleLoaderFactory {
@ -218,15 +232,20 @@ impl CliModuleLoaderFactory {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
options: &CliOptions, options: &CliOptions,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>, code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<NodeResolver>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>, resolver: Arc<CliResolver>,
) -> Self { ) -> Self {
Self { Self {
shared: Arc::new(SharedCliModuleLoaderState { shared: Arc::new(SharedCliModuleLoaderState {
@ -239,11 +258,16 @@ impl CliModuleLoaderFactory {
options.sub_command(), options.sub_command(),
DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_)
), ),
cjs_tracker,
code_cache, code_cache,
emitter, emitter,
fs,
in_npm_pkg_checker,
main_module_graph_container, main_module_graph_container,
module_load_preparer, module_load_preparer,
node_code_translator,
node_resolver, node_resolver,
npm_req_resolver,
npm_resolver, npm_resolver,
npm_module_loader, npm_module_loader,
parsed_source_cache, parsed_source_cache,
@ -259,19 +283,30 @@ impl CliModuleLoaderFactory {
is_worker: bool, is_worker: bool,
parent_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { let module_loader =
lib, Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
is_worker, lib,
parent_permissions, is_worker,
permissions, parent_permissions,
graph_container, permissions,
graph_container: graph_container.clone(),
node_code_translator: self.shared.node_code_translator.clone(),
emitter: self.shared.emitter.clone(),
parsed_source_cache: self.shared.parsed_source_cache.clone(),
shared: self.shared.clone(),
})));
let node_require_loader = Rc::new(CliNodeRequireLoader {
cjs_tracker: self.shared.cjs_tracker.clone(),
emitter: self.shared.emitter.clone(), emitter: self.shared.emitter.clone(),
parsed_source_cache: self.shared.parsed_source_cache.clone(), fs: self.shared.fs.clone(),
shared: self.shared.clone(), graph_container,
}))); in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(),
ModuleLoaderAndSourceMapGetter { npm_resolver: self.shared.npm_resolver.clone(),
module_loader: loader, });
CreateModuleLoaderResult {
module_loader,
node_require_loader,
} }
} }
} }
@ -280,7 +315,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
fn create_for_main( fn create_for_main(
&self, &self,
root_permissions: PermissionsContainer, root_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
self.create_with_lib( self.create_with_lib(
(*self.shared.main_module_graph_container).clone(), (*self.shared.main_module_graph_container).clone(),
self.shared.lib_window, self.shared.lib_window,
@ -294,7 +329,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
&self, &self,
parent_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
self.create_with_lib( self.create_with_lib(
// create a fresh module graph for the worker // create a fresh module graph for the worker
WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new(
@ -318,6 +353,7 @@ struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
permissions: PermissionsContainer, permissions: PermissionsContainer,
shared: Arc<SharedCliModuleLoaderState>, shared: Arc<SharedCliModuleLoaderState>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
node_code_translator: Arc<CliNodeCodeTranslator>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
graph_container: TGraphContainer, graph_container: TGraphContainer,
} }
@ -331,25 +367,10 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>, maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType, requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, AnyError> { ) -> Result<ModuleSource, AnyError> {
let code_source = match self.load_prepared_module(specifier).await? { let code_source = self.load_code_source(specifier, maybe_referrer).await?;
Some(code_source) => code_source, let code = if self.shared.is_inspecting
None => { || code_source.media_type == MediaType::Wasm
if self.shared.npm_module_loader.if_in_npm_package(specifier) { {
self
.shared
.npm_module_loader
.load(specifier, maybe_referrer)
.await?
} else {
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
return Err(anyhow!(msg));
}
}
};
let code = if self.shared.is_inspecting {
// we need the code with the source map in order for // we need the code with the source map in order for
// it to work with --inspect or --inspect-brk // it to work with --inspect or --inspect-brk
code_source.code code_source.code
@ -359,6 +380,7 @@ impl<TGraphContainer: ModuleGraphContainer>
}; };
let module_type = match code_source.media_type { let module_type = match code_source.media_type {
MediaType::Json => ModuleType::Json, MediaType::Json => ModuleType::Json,
MediaType::Wasm => ModuleType::Wasm,
_ => ModuleType::JavaScript, _ => ModuleType::JavaScript,
}; };
@ -402,6 +424,29 @@ impl<TGraphContainer: ModuleGraphContainer>
)) ))
} }
async fn load_code_source(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, AnyError> {
if let Some(code_source) = self.load_prepared_module(specifier).await? {
return Ok(code_source);
}
if self.shared.in_npm_pkg_checker.in_npm_package(specifier) {
return self
.shared
.npm_module_loader
.load(specifier, maybe_referrer)
.await;
}
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
Err(anyhow!(msg))
}
fn resolve_referrer( fn resolve_referrer(
&self, &self,
referrer: &str, referrer: &str,
@ -409,7 +454,7 @@ impl<TGraphContainer: ModuleGraphContainer>
let referrer = if referrer.is_empty() && self.shared.is_repl { let referrer = if referrer.is_empty() && self.shared.is_repl {
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled // and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
"./$deno$repl.ts" "./$deno$repl.mts"
} else { } else {
referrer referrer
}; };
@ -432,16 +477,6 @@ impl<TGraphContainer: ModuleGraphContainer>
raw_specifier: &str, raw_specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
if self.shared.node_resolver.in_npm_package(referrer) {
return Ok(
self
.shared
.node_resolver
.resolve(raw_specifier, referrer, NodeResolutionMode::Execution)?
.into_url(),
);
}
let graph = self.graph_container.graph(); let graph = self.graph_container.graph();
let resolution = match graph.get(referrer) { let resolution = match graph.get(referrer) {
Some(Module::Js(module)) => module Some(Module::Js(module)) => module
@ -462,12 +497,11 @@ impl<TGraphContainer: ModuleGraphContainer>
} }
Resolution::None => Cow::Owned(self.shared.resolver.resolve( Resolution::None => Cow::Owned(self.shared.resolver.resolve(
raw_specifier, raw_specifier,
&deno_graph::Range { referrer,
specifier: referrer.clone(), deno_graph::Position::zeroed(),
start: deno_graph::Position::zeroed(), // if we're here, that means it's resolving a dynamic import
end: deno_graph::Position::zeroed(), ResolutionMode::Import,
}, NodeResolutionKind::Execution,
ResolutionMode::Execution,
)?), )?),
}; };
@ -476,13 +510,14 @@ impl<TGraphContainer: ModuleGraphContainer>
{ {
return self return self
.shared .shared
.node_resolver .npm_req_resolver
.resolve_req_reference( .resolve_req_reference(
&reference, &reference,
referrer, referrer,
NodeResolutionMode::Execution, ResolutionMode::Import,
NodeResolutionKind::Execution,
) )
.map(|res| res.into_url()); .map_err(AnyError::from);
} }
} }
@ -497,22 +532,26 @@ impl<TGraphContainer: ModuleGraphContainer>
self self
.shared .shared
.node_resolver .node_resolver
.resolve_package_sub_path_from_deno_module( .resolve_package_subpath_from_deno_module(
&package_folder, &package_folder,
module.nv_reference.sub_path(), module.nv_reference.sub_path(),
Some(referrer), Some(referrer),
NodeResolutionMode::Execution, ResolutionMode::Import,
NodeResolutionKind::Execution,
) )
.with_context(|| { .with_context(|| {
format!("Could not resolve '{}'.", module.nv_reference) format!("Could not resolve '{}'.", module.nv_reference)
})? })?
.into_url()
} }
Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Node(module)) => module.specifier.clone(),
Some(Module::Js(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(),
Some(Module::Json(module)) => module.specifier.clone(), Some(Module::Json(module)) => module.specifier.clone(),
Some(Module::Wasm(module)) => module.specifier.clone(),
Some(Module::External(module)) => { Some(Module::External(module)) => {
node::resolve_specifier_into_node_modules(&module.specifier) node::resolve_specifier_into_node_modules(
&module.specifier,
self.shared.fs.as_ref(),
)
} }
None => specifier.into_owned(), None => specifier.into_owned(),
}; };
@ -534,7 +573,7 @@ impl<TGraphContainer: ModuleGraphContainer>
}) => { }) => {
let transpile_result = self let transpile_result = self
.emitter .emitter
.emit_parsed_source(specifier, media_type, source) .emit_parsed_source(specifier, media_type, ModuleKind::Esm, source)
.await?; .await?;
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
@ -547,11 +586,19 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type, media_type,
})) }))
} }
Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type,
source,
}) => self
.load_maybe_cjs(specifier, media_type, source)
.await
.map(Some),
None => Ok(None), None => Ok(None),
} }
} }
fn load_prepared_module_sync( fn load_prepared_module_for_source_map_sync(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<Option<ModuleCodeStringSource>, AnyError> { ) -> Result<Option<ModuleCodeStringSource>, AnyError> {
@ -564,9 +611,12 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type, media_type,
source, source,
}) => { }) => {
let transpile_result = self let transpile_result = self.emitter.emit_parsed_source_sync(
.emitter specifier,
.emit_parsed_source_sync(specifier, media_type, source)?; media_type,
ModuleKind::Esm,
source,
)?;
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
@ -578,6 +628,14 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type, media_type,
})) }))
} }
Some(CodeOrDeferredEmit::Cjs { .. }) => {
self.parsed_source_cache.free(specifier);
// todo(dsherret): to make this work, we should probably just
// rely on the CJS export cache. At the moment this is hard because
// cjs export analysis is only async
Ok(None)
}
None => Ok(None), None => Ok(None),
} }
} }
@ -607,20 +665,37 @@ impl<TGraphContainer: ModuleGraphContainer>
source, source,
media_type, media_type,
specifier, specifier,
is_script,
.. ..
})) => { })) => {
if self.shared.cjs_tracker.is_cjs_with_known_is_script(
specifier,
*media_type,
*is_script,
)? {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source,
}));
}
let code: ModuleCodeString = match media_type { let code: ModuleCodeString = match media_type {
MediaType::JavaScript MediaType::JavaScript
| MediaType::Unknown | MediaType::Unknown
| MediaType::Cjs
| MediaType::Mjs | MediaType::Mjs
| MediaType::Json => source.clone().into(), | MediaType::Json => source.clone().into(),
MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { MediaType::Dts | MediaType::Dcts | MediaType::Dmts => {
Default::default() Default::default()
} }
MediaType::Cjs | MediaType::Cts => {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source,
}));
}
MediaType::TypeScript MediaType::TypeScript
| MediaType::Mts | MediaType::Mts
| MediaType::Cts
| MediaType::Jsx | MediaType::Jsx
| MediaType::Tsx => { | MediaType::Tsx => {
return Ok(Some(CodeOrDeferredEmit::DeferredEmit { return Ok(Some(CodeOrDeferredEmit::DeferredEmit {
@ -629,7 +704,7 @@ impl<TGraphContainer: ModuleGraphContainer>
source, source,
})); }));
} }
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { MediaType::Css | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {media_type} for {specifier}") panic!("Unexpected media type {media_type} for {specifier}")
} }
}; };
@ -643,6 +718,13 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type: *media_type, media_type: *media_type,
}))) })))
} }
Some(deno_graph::Module::Wasm(WasmModule {
source, specifier, ..
})) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(source.clone().into()),
found_url: specifier.clone(),
media_type: MediaType::Wasm,
}))),
Some( Some(
deno_graph::Module::External(_) deno_graph::Module::External(_)
| deno_graph::Module::Node(_) | deno_graph::Module::Node(_)
@ -651,6 +733,48 @@ impl<TGraphContainer: ModuleGraphContainer>
| None => Ok(None), | None => Ok(None),
} }
} }
async fn load_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
original_source: &Arc<str>,
) -> Result<ModuleCodeStringSource, AnyError> {
let js_source = if media_type.is_emittable() {
Cow::Owned(
self
.emitter
.emit_parsed_source(
specifier,
media_type,
ModuleKind::Cjs,
original_source,
)
.await?,
)
} else {
Cow::Borrowed(original_source.as_ref())
};
let text = self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(js_source))
.await?;
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource {
code: match text {
// perf: if the text is borrowed, that means it didn't make any changes
// to the original source, so we can just provide that instead of cloning
// the borrowed text
Cow::Borrowed(_) => {
ModuleSourceCode::String(original_source.clone().into())
}
Cow::Owned(text) => ModuleSourceCode::String(text.into()),
},
found_url: specifier.clone(),
media_type,
})
}
} }
enum CodeOrDeferredEmit<'a> { enum CodeOrDeferredEmit<'a> {
@ -660,6 +784,11 @@ enum CodeOrDeferredEmit<'a> {
media_type: MediaType, media_type: MediaType,
source: &'a Arc<str>, source: &'a Arc<str>,
}, },
Cjs {
specifier: &'a ModuleSpecifier,
media_type: MediaType,
source: &'a Arc<str>,
},
} }
// todo(dsherret): this double Rc boxing is not ideal // todo(dsherret): this double Rc boxing is not ideal
@ -674,7 +803,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
&self, &self,
specifier: &str, specifier: &str,
referrer: &str, referrer: &str,
_kind: ResolutionKind, _kind: deno_core::ResolutionKind,
) -> Result<ModuleSpecifier, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
fn ensure_not_jsr_non_jsr_remote_import( fn ensure_not_jsr_non_jsr_remote_import(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -701,7 +830,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
name: &str, name: &str,
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> { ) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
let name = deno_core::ModuleSpecifier::parse(name).ok()?; let name = deno_core::ModuleSpecifier::parse(name).ok()?;
if self.0.shared.node_resolver.in_npm_package(&name) { if self.0.shared.in_npm_pkg_checker.in_npm_package(&name) {
Some(create_host_defined_options(scope)) Some(create_host_defined_options(scope))
} else { } else {
None None
@ -738,7 +867,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
_maybe_referrer: Option<String>, _maybe_referrer: Option<String>,
is_dynamic: bool, is_dynamic: bool,
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> { ) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
if self.0.shared.node_resolver.in_npm_package(specifier) { if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
return Box::pin(deno_core::futures::future::ready(Ok(()))); return Box::pin(deno_core::futures::future::ready(Ok(())));
} }
@ -821,7 +950,10 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
"wasm" | "file" | "http" | "https" | "data" | "blob" => (), "wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None, _ => return None,
} }
let source = self.0.load_prepared_module_sync(&specifier).ok()??; let source = self
.0
.load_prepared_module_for_source_map_sync(&specifier)
.ok()??;
source_map_from_code(source.code.as_bytes()) source_map_from_code(source.code.as_bytes())
} }
@ -900,3 +1032,68 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit {
drop(self.permit); // explicit drop for clarity drop(self.permit); // explicit drop for clarity
} }
} }
#[derive(Debug)]
struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
graph_container: TGraphContainer,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
}
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
for CliNodeRequireLoader<TGraphContainer>
{
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a Path,
) -> Result<std::borrow::Cow<'a, Path>, AnyError> {
if let Ok(url) = deno_path_util::url_from_file_path(path) {
// allow reading if it's in the module graph
if self.graph_container.graph().get(&url).is_some() {
return Ok(std::borrow::Cow::Borrowed(path));
}
}
self.npm_resolver.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError> {
// todo(dsherret): use the preloaded module from the graph if available?
let media_type = MediaType::from_path(path);
let text = self.fs.read_text_file_lossy_sync(path, None)?;
if media_type.is_emittable() {
let specifier = deno_path_util::url_from_file_path(path)?;
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Err(
NotSupportedKindInNpmError {
media_type,
specifier,
}
.into(),
);
}
self.emitter.emit_parsed_source_sync(
&specifier,
media_type,
// this is probably not super accurate due to require esm, but probably ok.
// If we find this causes a lot of churn in the emit cache then we should
// investigate how we can make this better
ModuleKind::Cjs,
&text.into(),
)
} else {
Ok(text)
}
}
fn is_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
) -> Result<bool, ClosestPkgJsonError> {
let media_type = MediaType::from_specifier(specifier);
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
}

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -18,8 +19,7 @@ use serde::Serialize;
use crate::cache::CacheDBHash; use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::resolver::CliNodeResolver; use crate::resolver::CjsTracker;
use crate::util::fs::canonicalize_path_maybe_not_exists;
pub type CliNodeCodeTranslator = pub type CliNodeCodeTranslator =
NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>; NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>;
@ -32,15 +32,11 @@ pub type CliNodeCodeTranslator =
/// because the node_modules folder might not exist at that time. /// because the node_modules folder might not exist at that time.
pub fn resolve_specifier_into_node_modules( pub fn resolve_specifier_into_node_modules(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
fs: &dyn deno_fs::FileSystem,
) -> ModuleSpecifier { ) -> ModuleSpecifier {
specifier node_resolver::resolve_specifier_into_node_modules(specifier, &|path| {
.to_file_path() fs.realpath_sync(path).map_err(|err| err.into_io_error())
.ok() })
// this path might not exist at the time the graph is being created
// because the node_modules folder might not yet exist
.and_then(|path| canonicalize_path_maybe_not_exists(&path).ok())
.and_then(|path| ModuleSpecifier::from_file_path(path).ok())
.unwrap_or_else(|| specifier.clone())
} }
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -56,22 +52,22 @@ pub enum CliCjsAnalysis {
pub struct CliCjsCodeAnalyzer { pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache, cache: NodeAnalysisCache,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc, fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>, parsed_source_cache: Option<Arc<ParsedSourceCache>>,
} }
impl CliCjsCodeAnalyzer { impl CliCjsCodeAnalyzer {
pub fn new( pub fn new(
cache: NodeAnalysisCache, cache: NodeAnalysisCache,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc, fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>, parsed_source_cache: Option<Arc<ParsedSourceCache>>,
) -> Self { ) -> Self {
Self { Self {
cache, cache,
cjs_tracker,
fs, fs,
node_resolver,
parsed_source_cache, parsed_source_cache,
} }
} }
@ -88,7 +84,7 @@ impl CliCjsCodeAnalyzer {
return Ok(analysis); return Ok(analysis);
} }
let mut media_type = MediaType::from_specifier(specifier); let media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json { if media_type == MediaType::Json {
return Ok(CliCjsAnalysis::Cjs { return Ok(CliCjsAnalysis::Cjs {
exports: vec![], exports: vec![],
@ -96,62 +92,51 @@ impl CliCjsCodeAnalyzer {
}); });
} }
if media_type == MediaType::JavaScript { let cjs_tracker = self.cjs_tracker.clone();
if let Some(package_json) = let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
self.node_resolver.get_closest_package_json(specifier)? let analysis = if is_maybe_cjs {
{ let maybe_parsed_source = self
match package_json.typ.as_str() { .parsed_source_cache
"commonjs" => { .as_ref()
media_type = MediaType::Cjs; .and_then(|c| c.remove_parsed_source(specifier));
}
"module" => {
media_type = MediaType::Mjs;
}
_ => {}
}
}
}
let maybe_parsed_source = self deno_core::unsync::spawn_blocking({
.parsed_source_cache let specifier = specifier.clone();
.as_ref() let source: Arc<str> = source.into();
.and_then(|c| c.remove_parsed_source(specifier)); move || -> Result<_, AnyError> {
let parsed_source =
let analysis = deno_core::unsync::spawn_blocking({ maybe_parsed_source.map(Ok).unwrap_or_else(|| {
let specifier = specifier.clone(); deno_ast::parse_program(deno_ast::ParseParams {
let source: Arc<str> = source.into(); specifier,
move || -> Result<_, deno_ast::ParseDiagnostic> { text: source,
let parsed_source = media_type,
maybe_parsed_source.map(Ok).unwrap_or_else(|| { capture_tokens: true,
deno_ast::parse_program(deno_ast::ParseParams { scope_analysis: false,
specifier, maybe_syntax: None,
text: source, })
media_type, })?;
capture_tokens: true, let is_script = parsed_source.compute_is_script();
scope_analysis: false, let is_cjs = cjs_tracker.is_cjs_with_known_is_script(
maybe_syntax: None, parsed_source.specifier(),
media_type,
is_script,
)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(CliCjsAnalysis::Cjs {
exports: analysis.exports,
reexports: analysis.reexports,
}) })
})?; } else {
if parsed_source.is_script() { Ok(CliCjsAnalysis::Esm)
let analysis = parsed_source.analyze_cjs(); }
Ok(CliCjsAnalysis::Cjs {
exports: analysis.exports,
reexports: analysis.reexports,
})
} else if media_type == MediaType::Cjs {
// FIXME: `deno_ast` should internally handle MediaType::Cjs implying that
// the result must never be Esm
Ok(CliCjsAnalysis::Cjs {
exports: vec![],
reexports: vec![],
})
} else {
Ok(CliCjsAnalysis::Esm)
} }
} })
}) .await
.await .unwrap()?
.unwrap()?; } else {
CliCjsAnalysis::Esm
};
self self
.cache .cache
@ -163,11 +148,11 @@ impl CliCjsCodeAnalyzer {
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
async fn analyze_cjs( async fn analyze_cjs<'a>(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: Option<String>, source: Option<Cow<'a, str>>,
) -> Result<ExtNodeCjsAnalysis, AnyError> { ) -> Result<ExtNodeCjsAnalysis<'a>, AnyError> {
let source = match source { let source = match source {
Some(source) => source, Some(source) => source,
None => { None => {
@ -175,7 +160,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
if let Ok(source_from_file) = if let Ok(source_from_file) =
self.fs.read_text_file_lossy_async(path, None).await self.fs.read_text_file_lossy_async(path, None).await
{ {
source_from_file Cow::Owned(source_from_file)
} else { } else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports { return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![], exports: vec![],

View file

@ -2,19 +2,17 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageReq; use node_resolver::NpmPackageFolderResolver;
use node_resolver::NpmResolver;
use crate::args::NpmProcessState; use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
@ -22,33 +20,16 @@ use crate::resolver::CliDenoResolverFs;
use super::CliNpmResolver; use super::CliNpmResolver;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError;
pub type CliByonmNpmResolverCreateOptions = pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliDenoResolverFs>; ByonmNpmResolverCreateOptions<CliDenoResolverFs, DenoFsNodeResolverEnv>;
pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>; pub type CliByonmNpmResolver =
ByonmNpmResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>;
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
#[derive(Debug)] #[derive(Debug)]
struct CliByonmWrapper(Arc<CliByonmNpmResolver>); struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
impl NodeRequireResolver for CliByonmWrapper {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if !path
.components()
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
{
permissions.check_read_path(path)
} else {
Ok(Cow::Borrowed(path))
}
}
}
impl NpmProcessStateProvider for CliByonmWrapper { impl NpmProcessStateProvider for CliByonmWrapper {
fn get_npm_process_state(&self) -> String { fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState { serde_json::to_string(&NpmProcessState {
@ -63,12 +44,14 @@ impl NpmProcessStateProvider for CliByonmWrapper {
} }
impl CliNpmResolver for CliByonmNpmResolver { impl CliNpmResolver for CliByonmNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> { fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self self
} }
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> { fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
Arc::new(CliByonmWrapper(self)) self
} }
fn into_process_state_provider( fn into_process_state_provider(
@ -77,6 +60,10 @@ impl CliNpmResolver for CliByonmNpmResolver {
Arc::new(CliByonmWrapper(self)) Arc::new(CliByonmWrapper(self))
} }
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
Some(self)
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> { fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
Arc::new(self.clone()) Arc::new(self.clone())
} }
@ -89,15 +76,19 @@ impl CliNpmResolver for CliByonmNpmResolver {
self.root_node_modules_dir() self.root_node_modules_dir()
} }
fn resolve_pkg_folder_from_deno_module_req( fn ensure_read_permission<'a>(
&self, &self,
req: &PackageReq, permissions: &mut dyn NodePermissions,
referrer: &Url, path: &'a Path,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> { ) -> Result<Cow<'a, Path>, AnyError> {
ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req( if !path
self, req, referrer, .components()
) .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
.map_err(ResolvePkgFolderFromDenoReqError::Byonm) {
permissions.check_read_path(path).map_err(Into::into)
} else {
Ok(Cow::Borrowed(path))
}
} }
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {

View file

@ -3,6 +3,7 @@
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_npm::npm_rc::RegistryConfig; use deno_npm::npm_rc::RegistryConfig;
use http::header; use http::header;
@ -36,17 +37,21 @@ pub fn maybe_auth_header_for_npm_registry(
} }
if username.is_some() && password.is_some() { if username.is_some() && password.is_some() {
// The npm client does some double encoding when generating the
// bearer token value, see
// https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851
let pw_base64 = BASE64_STANDARD
.decode(password.unwrap())
.with_context(|| "The password in npmrc is an invalid base64 string")?;
let bearer = BASE64_STANDARD.encode(format!(
"{}:{}",
username.unwrap(),
String::from_utf8_lossy(&pw_base64)
));
return Ok(Some(( return Ok(Some((
header::AUTHORIZATION, header::AUTHORIZATION,
header::HeaderValue::from_str(&format!( header::HeaderValue::from_str(&format!("Basic {}", bearer)).unwrap(),
"Basic {}",
BASE64_STANDARD.encode(format!(
"{}:{}",
username.unwrap(),
password.unwrap()
))
))
.unwrap(),
))); )));
} }

View file

@ -36,7 +36,7 @@ pub use tarball::TarballCache;
/// Stores a single copy of npm packages in a cache. /// Stores a single copy of npm packages in a cache.
#[derive(Debug)] #[derive(Debug)]
pub struct NpmCache { pub struct NpmCache {
cache_dir: NpmCacheDir, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: CacheSetting,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
/// ensures a package is only downloaded once per run /// ensures a package is only downloaded once per run
@ -45,7 +45,7 @@ pub struct NpmCache {
impl NpmCache { impl NpmCache {
pub fn new( pub fn new(
cache_dir: NpmCacheDir, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: CacheSetting,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
) -> Self { ) -> Self {
@ -61,6 +61,10 @@ impl NpmCache {
&self.cache_setting &self.cache_setting
} }
pub fn root_dir_path(&self) -> &Path {
self.cache_dir.root_dir()
}
pub fn root_dir_url(&self) -> &Url { pub fn root_dir_url(&self) -> &Url {
self.cache_dir.root_dir_url() self.cache_dir.root_dir_url()
} }
@ -152,10 +156,6 @@ impl NpmCache {
self.cache_dir.package_name_folder(name, registry_url) self.cache_dir.package_name_folder(name, registry_url)
} }
pub fn root_folder(&self) -> PathBuf {
self.cache_dir.root_dir().to_owned()
}
pub fn resolve_package_folder_id_from_specifier( pub fn resolve_package_folder_id_from_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,

View file

@ -12,6 +12,7 @@ use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryApi;
@ -21,16 +22,17 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::colors; use deno_runtime::colors;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::NpmResolver; use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use resolution::AddPkgReqsResult; use resolution::AddPkgReqsResult;
use crate::args::CliLockfile; use crate::args::CliLockfile;
@ -38,7 +40,7 @@ use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::NpmProcessState; use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
use crate::cache::DenoCacheEnvFsAdapter; use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
@ -65,12 +67,12 @@ pub enum CliNpmResolverManagedSnapshotOption {
Specified(Option<ValidSerializedNpmResolutionSnapshot>), Specified(Option<ValidSerializedNpmResolutionSnapshot>),
} }
pub struct CliNpmResolverManagedCreateOptions { pub struct CliManagedNpmResolverCreateOptions {
pub snapshot: CliNpmResolverManagedSnapshotOption, pub snapshot: CliNpmResolverManagedSnapshotOption,
pub maybe_lockfile: Option<Arc<CliLockfile>>, pub maybe_lockfile: Option<Arc<CliLockfile>>,
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>, pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>, pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
pub npm_global_cache_dir: PathBuf, pub npm_cache_dir: Arc<NpmCacheDir>,
pub cache_setting: crate::args::CacheSetting, pub cache_setting: crate::args::CacheSetting,
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>, pub maybe_node_modules_path: Option<PathBuf>,
@ -81,7 +83,7 @@ pub struct CliNpmResolverManagedCreateOptions {
} }
pub async fn create_managed_npm_resolver_for_lsp( pub async fn create_managed_npm_resolver_for_lsp(
options: CliNpmResolverManagedCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> { ) -> Arc<dyn CliNpmResolver> {
let npm_cache = create_cache(&options); let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_api = create_api(&options, npm_cache.clone());
@ -114,7 +116,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
} }
pub async fn create_managed_npm_resolver( pub async fn create_managed_npm_resolver(
options: CliNpmResolverManagedCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
let npm_cache = create_cache(&options); let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_api = create_api(&options, npm_cache.clone());
@ -188,20 +190,16 @@ fn create_inner(
)) ))
} }
fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> { fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> {
Arc::new(NpmCache::new( Arc::new(NpmCache::new(
NpmCacheDir::new( options.npm_cache_dir.clone(),
&DenoCacheEnvFsAdapter(options.fs.as_ref()),
options.npm_global_cache_dir.clone(),
options.npmrc.get_all_known_registries_urls(),
),
options.cache_setting.clone(), options.cache_setting.clone(),
options.npmrc.clone(), options.npmrc.clone(),
)) ))
} }
fn create_api( fn create_api(
options: &CliNpmResolverManagedCreateOptions, options: &CliManagedNpmResolverCreateOptions,
npm_cache: Arc<NpmCache>, npm_cache: Arc<NpmCache>,
) -> Arc<CliNpmRegistryApi> { ) -> Arc<CliNpmRegistryApi> {
Arc::new(CliNpmRegistryApi::new( Arc::new(CliNpmRegistryApi::new(
@ -258,6 +256,35 @@ async fn snapshot_from_lockfile(
Ok(snapshot) Ok(snapshot)
} }
#[derive(Debug)]
struct ManagedInNpmPackageChecker {
root_dir: Url,
}
impl InNpmPackageChecker for ManagedInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
specifier.as_ref().starts_with(self.root_dir.as_str())
}
}
pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> {
pub root_cache_dir_url: &'a Url,
pub maybe_node_modules_path: Option<&'a Path>,
}
pub fn create_managed_in_npm_pkg_checker(
options: CliManagedInNpmPkgCheckerCreateOptions,
) -> Arc<dyn InNpmPackageChecker> {
let root_dir = match options.maybe_node_modules_path {
Some(node_modules_folder) => {
deno_path_util::url_from_directory_path(node_modules_folder).unwrap()
}
None => options.root_cache_dir_url.clone(),
};
debug_assert!(root_dir.as_str().ends_with('/'));
Arc::new(ManagedInNpmPackageChecker { root_dir })
}
/// An npm resolver where the resolution is managed by Deno rather than /// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system. /// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver { pub struct ManagedCliNpmResolver {
@ -473,26 +500,31 @@ impl ManagedCliNpmResolver {
self.resolve_pkg_folder_from_pkg_id(&pkg_id) self.resolve_pkg_folder_from_pkg_id(&pkg_id)
} }
fn resolve_pkg_id_from_pkg_req( pub fn resolve_pkg_id_from_pkg_req(
&self, &self,
req: &PackageReq, req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> { ) -> Result<NpmPackageId, PackageReqNotFoundError> {
self.resolution.resolve_pkg_id_from_pkg_req(req) self.resolution.resolve_pkg_id_from_pkg_req(req)
} }
pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> { pub fn ensure_no_pkg_json_dep_errors(
&self,
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() { for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match err { match &err.source {
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
return Err( return Err(Box::new(err.clone()));
AnyError::from(err.clone())
.context("Failed to install from package.json"),
);
} }
deno_package_json::PackageJsonDepValueParseError::Unsupported { deno_package_json::PackageJsonDepValueParseError::Unsupported {
.. ..
} => { } => {
log::warn!("{} {} in package.json", colors::yellow("Warning"), err) // only warn for this one
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err.source,
err.location,
)
} }
} }
} }
@ -549,8 +581,16 @@ impl ManagedCliNpmResolver {
.map_err(|err| err.into()) .map_err(|err| err.into())
} }
pub fn global_cache_root_folder(&self) -> PathBuf { pub fn maybe_node_modules_path(&self) -> Option<&Path> {
self.npm_cache.root_folder() self.fs_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache.root_dir_path()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache.root_dir_url()
} }
} }
@ -566,7 +606,7 @@ fn npm_process_state(
.unwrap() .unwrap()
} }
impl NpmResolver for ManagedCliNpmResolver { impl NpmPackageFolderResolver for ManagedCliNpmResolver {
fn resolve_package_folder_from_package( fn resolve_package_folder_from_package(
&self, &self,
name: &str, name: &str,
@ -585,22 +625,6 @@ impl NpmResolver for ManagedCliNpmResolver {
log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
Ok(path) Ok(path)
} }
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
let root_dir_url = self.fs_resolver.root_dir_url();
debug_assert!(root_dir_url.as_str().ends_with('/'));
specifier.as_ref().starts_with(root_dir_url.as_str())
}
}
impl NodeRequireResolver for ManagedCliNpmResolver {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self.fs_resolver.ensure_read_permission(permissions, path)
}
} }
impl NpmProcessStateProvider for ManagedCliNpmResolver { impl NpmProcessStateProvider for ManagedCliNpmResolver {
@ -612,12 +636,29 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver {
} }
} }
impl CliNpmReqResolver for ManagedCliNpmResolver {
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
_referrer: &ModuleSpecifier,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
let pkg_id = self
.resolve_pkg_id_from_pkg_req(req)
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
self
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
}
}
impl CliNpmResolver for ManagedCliNpmResolver { impl CliNpmResolver for ManagedCliNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> { fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self self
} }
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> { fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
self self
} }
@ -668,17 +709,12 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.fs_resolver.node_modules_path() self.fs_resolver.node_modules_path()
} }
fn resolve_pkg_folder_from_deno_module_req( fn ensure_read_permission<'a>(
&self, &self,
req: &PackageReq, permissions: &mut dyn NodePermissions,
_referrer: &ModuleSpecifier, path: &'a Path,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> { ) -> Result<Cow<'a, Path>, AnyError> {
let pkg_id = self self.fs_resolver.ensure_read_permission(permissions, path)
.resolve_pkg_id_from_pkg_req(req)
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
self
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
} }
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {

View file

@ -17,7 +17,6 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures; use deno_core::futures;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
@ -30,9 +29,6 @@ use crate::npm::managed::cache::TarballCache;
/// Part of the resolution that interacts with the file system. /// Part of the resolution that interacts with the file system.
#[async_trait(?Send)] #[async_trait(?Send)]
pub trait NpmPackageFsResolver: Send + Sync { pub trait NpmPackageFsResolver: Send + Sync {
/// Specifier for the root directory.
fn root_dir_url(&self) -> &Url;
/// The local node_modules folder if it is applicable to the implementation. /// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&Path>; fn node_modules_path(&self) -> Option<&Path>;
@ -137,7 +133,7 @@ impl RegistryReadPermissionChecker {
} }
} }
permissions.check_read_path(path) permissions.check_read_path(path).map_err(Into::into)
} }
} }

View file

@ -18,6 +18,7 @@ pub struct BinEntries<'a> {
seen_names: HashMap<&'a str, &'a NpmPackageId>, seen_names: HashMap<&'a str, &'a NpmPackageId>,
/// The bin entries /// The bin entries
entries: Vec<(&'a NpmResolutionPackage, PathBuf)>, entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
sorted: bool,
} }
/// Returns the name of the default binary for the given package. /// Returns the name of the default binary for the given package.
@ -31,6 +32,20 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
.map_or(package.id.nv.name.as_str(), |(_, name)| name) .map_or(package.id.nv.name.as_str(), |(_, name)| name)
} }
pub fn warn_missing_entrypoint(
bin_name: &str,
package_path: &Path,
entrypoint: &Path,
) {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name,
package_path.display(),
entrypoint.display()
);
}
impl<'a> BinEntries<'a> { impl<'a> BinEntries<'a> {
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
@ -42,6 +57,7 @@ impl<'a> BinEntries<'a> {
package: &'a NpmResolutionPackage, package: &'a NpmResolutionPackage,
package_path: PathBuf, package_path: PathBuf,
) { ) {
self.sorted = false;
// check for a new collision, if we haven't already // check for a new collision, if we haven't already
// found one // found one
match package.bin.as_ref().unwrap() { match package.bin.as_ref().unwrap() {
@ -79,16 +95,21 @@ impl<'a> BinEntries<'a> {
&str, // bin name &str, // bin name
&str, // bin script &str, // bin script
) -> Result<(), AnyError>, ) -> Result<(), AnyError>,
mut filter: impl FnMut(&NpmResolutionPackage) -> bool,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
if !self.collisions.is_empty() { if !self.collisions.is_empty() && !self.sorted {
// walking the dependency tree to find out the depth of each package // walking the dependency tree to find out the depth of each package
// is sort of expensive, so we only do it if there's a collision // is sort of expensive, so we only do it if there's a collision
sort_by_depth(snapshot, &mut self.entries, &mut self.collisions); sort_by_depth(snapshot, &mut self.entries, &mut self.collisions);
self.sorted = true;
} }
let mut seen = HashSet::new(); let mut seen = HashSet::new();
for (package, package_path) in &self.entries { for (package, package_path) in &self.entries {
if !filter(package) {
continue;
}
if let Some(bin_entries) = &package.bin { if let Some(bin_entries) = &package.bin {
match bin_entries { match bin_entries {
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
@ -118,8 +139,8 @@ impl<'a> BinEntries<'a> {
} }
/// Collect the bin entries into a vec of (name, script path) /// Collect the bin entries into a vec of (name, script path)
pub fn into_bin_files( pub fn collect_bin_files(
mut self, &mut self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> { ) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new(); let mut bins = Vec::new();
@ -131,17 +152,18 @@ impl<'a> BinEntries<'a> {
bins.push((name.to_string(), package_path.join(script))); bins.push((name.to_string(), package_path.join(script)));
Ok(()) Ok(())
}, },
|_| true,
) )
.unwrap(); .unwrap();
bins bins
} }
/// Finish setting up the bin entries, writing the necessary files fn set_up_entries_filtered(
/// to disk.
pub fn finish(
mut self, mut self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path, bin_node_modules_dir_path: &Path,
filter: impl FnMut(&NpmResolutionPackage) -> bool,
mut handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
std::fs::create_dir_all(bin_node_modules_dir_path).with_context( std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
@ -160,18 +182,54 @@ impl<'a> BinEntries<'a> {
Ok(()) Ok(())
}, },
|package, package_path, name, script| { |package, package_path, name, script| {
set_up_bin_entry( let outcome = set_up_bin_entry(
package, package,
name, name,
script, script,
package_path, package_path,
bin_node_modules_dir_path, bin_node_modules_dir_path,
) )?;
handler(&outcome);
Ok(())
}, },
filter,
)?; )?;
Ok(()) Ok(())
} }
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|_| true,
handler,
)
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish_only(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
only: &HashSet<&NpmPackageId>,
) -> Result<(), AnyError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|package| only.contains(&package.id),
handler,
)
}
} }
// walk the dependency tree to find out the depth of each package // walk the dependency tree to find out the depth of each package
@ -233,16 +291,17 @@ fn sort_by_depth(
}); });
} }
pub fn set_up_bin_entry( pub fn set_up_bin_entry<'a>(
package: &NpmResolutionPackage, package: &'a NpmResolutionPackage,
bin_name: &str, bin_name: &'a str,
#[allow(unused_variables)] bin_script: &str, #[allow(unused_variables)] bin_script: &str,
#[allow(unused_variables)] package_path: &Path, #[allow(unused_variables)] package_path: &'a Path,
bin_node_modules_dir_path: &Path, bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> { ) -> Result<EntrySetupOutcome<'a>, AnyError> {
#[cfg(windows)] #[cfg(windows)]
{ {
set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?;
Ok(EntrySetupOutcome::Success)
} }
#[cfg(unix)] #[cfg(unix)]
{ {
@ -252,9 +311,8 @@ pub fn set_up_bin_entry(
bin_script, bin_script,
package_path, package_path,
bin_node_modules_dir_path, bin_node_modules_dir_path,
)?; )
} }
Ok(())
} }
#[cfg(windows)] #[cfg(windows)]
@ -301,14 +359,39 @@ fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
Ok(true) Ok(true)
} }
pub enum EntrySetupOutcome<'a> {
#[cfg_attr(windows, allow(dead_code))]
MissingEntrypoint {
bin_name: &'a str,
package_path: &'a Path,
entrypoint: PathBuf,
package: &'a NpmResolutionPackage,
},
Success,
}
impl<'a> EntrySetupOutcome<'a> {
pub fn warn_if_failed(&self) {
match self {
EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path,
entrypoint,
..
} => warn_missing_entrypoint(bin_name, package_path, entrypoint),
EntrySetupOutcome::Success => {}
}
}
}
#[cfg(unix)] #[cfg(unix)]
fn symlink_bin_entry( fn symlink_bin_entry<'a>(
_package: &NpmResolutionPackage, package: &'a NpmResolutionPackage,
bin_name: &str, bin_name: &'a str,
bin_script: &str, bin_script: &str,
package_path: &Path, package_path: &'a Path,
bin_node_modules_dir_path: &Path, bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> { ) -> Result<EntrySetupOutcome<'a>, AnyError> {
use std::io; use std::io;
use std::os::unix::fs::symlink; use std::os::unix::fs::symlink;
let link = bin_node_modules_dir_path.join(bin_name); let link = bin_node_modules_dir_path.join(bin_name);
@ -318,14 +401,12 @@ fn symlink_bin_entry(
format!("Can't set up '{}' bin at {}", bin_name, original.display()) format!("Can't set up '{}' bin at {}", bin_name, original.display())
})?; })?;
if !found { if !found {
log::warn!( return Ok(EntrySetupOutcome::MissingEntrypoint {
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name, bin_name,
package_path.display(), package_path,
original.display() entrypoint: original,
); package,
return Ok(()); });
} }
let original_relative = let original_relative =
@ -348,7 +429,7 @@ fn symlink_bin_entry(
original_relative.display() original_relative.display()
) )
})?; })?;
return Ok(()); return Ok(EntrySetupOutcome::Success);
} }
return Err(err).with_context(|| { return Err(err).with_context(|| {
format!( format!(
@ -359,5 +440,5 @@ fn symlink_bin_entry(
}); });
} }
Ok(()) Ok(EntrySetupOutcome::Success)
} }

View file

@ -10,6 +10,7 @@ use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashSet;
use std::rc::Rc; use std::rc::Rc;
use std::path::Path; use std::path::Path;
@ -61,7 +62,7 @@ impl<'a> LifecycleScripts<'a> {
} }
} }
fn has_lifecycle_scripts( pub fn has_lifecycle_scripts(
package: &NpmResolutionPackage, package: &NpmResolutionPackage,
package_path: &Path, package_path: &Path,
) -> bool { ) -> bool {
@ -83,7 +84,7 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
} }
impl<'a> LifecycleScripts<'a> { impl<'a> LifecycleScripts<'a> {
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
if !self.strategy.can_run_scripts() { if !self.strategy.can_run_scripts() {
return false; return false;
} }
@ -98,6 +99,9 @@ impl<'a> LifecycleScripts<'a> {
PackagesAllowedScripts::None => false, PackagesAllowedScripts::None => false,
} }
} }
pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool {
self.strategy.has_run(package)
}
/// Register a package for running lifecycle scripts, if applicable. /// Register a package for running lifecycle scripts, if applicable.
/// ///
/// `package_path` is the path containing the package's code (its root dir). /// `package_path` is the path containing the package's code (its root dir).
@ -110,12 +114,12 @@ impl<'a> LifecycleScripts<'a> {
) { ) {
if has_lifecycle_scripts(package, &package_path) { if has_lifecycle_scripts(package, &package_path) {
if self.can_run_scripts(&package.id.nv) { if self.can_run_scripts(&package.id.nv) {
if !self.strategy.has_run(package) { if !self.has_run_scripts(package) {
self self
.packages_with_scripts .packages_with_scripts
.push((package, package_path.into_owned())); .push((package, package_path.into_owned()));
} }
} else if !self.strategy.has_run(package) } else if !self.has_run_scripts(package)
&& (self.config.explicit_install || !self.strategy.has_warned(package)) && (self.config.explicit_install || !self.strategy.has_warned(package))
{ {
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script // Skip adding `esbuild` as it is known that it can work properly without lifecycle script
@ -149,25 +153,41 @@ impl<'a> LifecycleScripts<'a> {
self, self,
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage], packages: &[NpmResolutionPackage],
root_node_modules_dir_path: Option<&Path>, root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar, progress_bar: &ProgressBar,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
self.warn_not_run_scripts()?; self.warn_not_run_scripts()?;
let get_package_path = let get_package_path =
|p: &NpmResolutionPackage| self.strategy.package_path(p); |p: &NpmResolutionPackage| self.strategy.package_path(p);
let mut failed_packages = Vec::new(); let mut failed_packages = Vec::new();
let mut bin_entries = BinEntries::new();
if !self.packages_with_scripts.is_empty() { if !self.packages_with_scripts.is_empty() {
let package_ids = self
.packages_with_scripts
.iter()
.map(|(p, _)| &p.id)
.collect::<HashSet<_>>();
// get custom commands for each bin available in the node_modules dir (essentially // get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`) // the scripts that are in `node_modules/.bin`)
let base = let base = resolve_baseline_custom_commands(
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?; &mut bin_entries,
snapshot,
packages,
get_package_path,
)?;
let init_cwd = &self.config.initial_cwd; let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state( let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(), snapshot.as_valid_serialized(),
root_node_modules_dir_path, Some(root_node_modules_dir_path),
); );
let mut env_vars = crate::task_runner::real_env_vars(); let mut env_vars = crate::task_runner::real_env_vars();
// so the subprocess can detect that it is running as part of a lifecycle script,
// and avoid trying to set up node_modules again
env_vars.insert(
LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR.to_string(),
"1".to_string(),
);
// we want to pass the current state of npm resolution down to the deno subprocess // we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file // (that may be running as part of the script). we do this with an inherited temp file
// //
@ -221,7 +241,7 @@ impl<'a> LifecycleScripts<'a> {
custom_commands: custom_commands.clone(), custom_commands: custom_commands.clone(),
init_cwd, init_cwd,
argv: &[], argv: &[],
root_node_modules_dir: root_node_modules_dir_path, root_node_modules_dir: Some(root_node_modules_dir_path),
stdio: Some(crate::task_runner::TaskIo { stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(), stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(), stdout: TaskStdio::piped(),
@ -262,6 +282,17 @@ impl<'a> LifecycleScripts<'a> {
} }
self.strategy.did_run_scripts(package)?; self.strategy.did_run_scripts(package)?;
} }
// re-set up bin entries for the packages which we've run scripts for.
// lifecycle scripts can create files that are linked to by bin entries,
// and the only reliable way to handle this is to re-link bin entries
// (this is what PNPM does as well)
bin_entries.finish_only(
snapshot,
&root_node_modules_dir_path.join(".bin"),
|outcome| outcome.warn_if_failed(),
&package_ids,
)?;
} }
if failed_packages.is_empty() { if failed_packages.is_empty() {
Ok(()) Ok(())
@ -278,12 +309,20 @@ impl<'a> LifecycleScripts<'a> {
} }
} }
const LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR: &str =
"DENO_INTERNAL_IS_LIFECYCLE_SCRIPT";
pub fn is_running_lifecycle_script() -> bool {
std::env::var(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR).is_ok()
}
// take in all (non copy) packages from snapshot, // take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create // and resolve the set of available binaries to create
// custom commands available to the task runner // custom commands available to the task runner
fn resolve_baseline_custom_commands( fn resolve_baseline_custom_commands<'a>(
snapshot: &NpmResolutionSnapshot, bin_entries: &mut BinEntries<'a>,
packages: &[NpmResolutionPackage], snapshot: &'a NpmResolutionSnapshot,
packages: &'a [NpmResolutionPackage],
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
@ -306,6 +345,7 @@ fn resolve_baseline_custom_commands(
// doing it for packages that are set up already. // doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue. // realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages( resolve_custom_commands_from_packages(
bin_entries,
custom_commands, custom_commands,
snapshot, snapshot,
packages, packages,
@ -320,12 +360,12 @@ fn resolve_custom_commands_from_packages<
'a, 'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>, P: IntoIterator<Item = &'a NpmResolutionPackage>,
>( >(
bin_entries: &mut BinEntries<'a>,
mut commands: crate::task_runner::TaskCustomCommands, mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot, snapshot: &'a NpmResolutionSnapshot,
packages: P, packages: P,
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf, get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
for package in packages { for package in packages {
let package_path = get_package_path(package); let package_path = get_package_path(package);
@ -333,7 +373,7 @@ fn resolve_custom_commands_from_packages<
bin_entries.add(package, package_path); bin_entries.add(package, package_path);
} }
} }
let bins = bin_entries.into_bin_files(snapshot); let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot);
for (bin_name, script_path) in bins { for (bin_name, script_path) in bins {
commands.insert( commands.insert(
bin_name.clone(), bin_name.clone(),
@ -356,7 +396,9 @@ fn resolve_custom_commands_from_deps(
snapshot: &NpmResolutionSnapshot, snapshot: &NpmResolutionSnapshot,
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
resolve_custom_commands_from_packages( resolve_custom_commands_from_packages(
&mut bin_entries,
baseline, baseline,
snapshot, snapshot,
package package

View file

@ -11,7 +11,6 @@ use crate::colors;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
@ -56,7 +55,7 @@ impl GlobalNpmPackageResolver {
Self { Self {
registry_read_permission_checker: RegistryReadPermissionChecker::new( registry_read_permission_checker: RegistryReadPermissionChecker::new(
fs, fs,
cache.root_folder(), cache.root_dir_path().to_path_buf(),
), ),
cache, cache,
tarball_cache, tarball_cache,
@ -69,10 +68,6 @@ impl GlobalNpmPackageResolver {
#[async_trait(?Send)] #[async_trait(?Send)]
impl NpmPackageFsResolver for GlobalNpmPackageResolver { impl NpmPackageFsResolver for GlobalNpmPackageResolver {
fn root_dir_url(&self) -> &Url {
self.cache.root_dir_url()
}
fn node_modules_path(&self) -> Option<&Path> { fn node_modules_path(&self) -> Option<&Path> {
None None
} }

View file

@ -55,6 +55,7 @@ use crate::util::progress_bar::ProgressMessagePrompt;
use super::super::cache::NpmCache; use super::super::cache::NpmCache;
use super::super::cache::TarballCache; use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution; use super::super::resolution::NpmResolution;
use super::common::bin_entries;
use super::common::NpmPackageFsResolver; use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker; use super::common::RegistryReadPermissionChecker;
@ -155,10 +156,6 @@ impl LocalNpmPackageResolver {
#[async_trait(?Send)] #[async_trait(?Send)]
impl NpmPackageFsResolver for LocalNpmPackageResolver { impl NpmPackageFsResolver for LocalNpmPackageResolver {
fn root_dir_url(&self) -> &Url {
&self.root_node_modules_url
}
fn node_modules_path(&self) -> Option<&Path> { fn node_modules_path(&self) -> Option<&Path> {
Some(self.root_node_modules_path.as_ref()) Some(self.root_node_modules_path.as_ref())
} }
@ -301,6 +298,12 @@ async fn sync_resolution_with_fs(
return Ok(()); // don't create the directory return Ok(()); // don't create the directory
} }
// don't set up node_modules (and more importantly try to acquire the file lock)
// if we're running as part of a lifecycle script
if super::common::lifecycle_scripts::is_running_lifecycle_script() {
return Ok(());
}
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno"); let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules"); let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
fs::create_dir_all(&deno_node_modules_dir).with_context(|| { fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
@ -333,8 +336,7 @@ async fn sync_resolution_with_fs(
let mut cache_futures = FuturesUnordered::new(); let mut cache_futures = FuturesUnordered::new();
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
HashMap::with_capacity(package_partitions.packages.len()); HashMap::with_capacity(package_partitions.packages.len());
let bin_entries = let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new()));
let mut lifecycle_scripts = let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new( super::common::lifecycle_scripts::LifecycleScripts::new(
lifecycle_scripts, lifecycle_scripts,
@ -662,7 +664,28 @@ async fn sync_resolution_with_fs(
// 7. Set up `node_modules/.bin` entries for packages that need it. // 7. Set up `node_modules/.bin` entries for packages that need it.
{ {
let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut()); let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut());
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?; bin_entries.finish(
snapshot,
&bin_node_modules_dir_path,
|setup_outcome| {
match setup_outcome {
bin_entries::EntrySetupOutcome::MissingEntrypoint {
package,
package_path,
..
} if super::common::lifecycle_scripts::has_lifecycle_scripts(
package,
package_path,
) && lifecycle_scripts.can_run_scripts(&package.id.nv)
&& !lifecycle_scripts.has_run_scripts(package) =>
{
// ignore, it might get fixed when the lifecycle scripts run.
// if not, we'll warn then
}
outcome => outcome.warn_if_failed(),
}
},
)?;
} }
// 8. Create symlinks for the workspace packages // 8. Create symlinks for the workspace packages
@ -712,7 +735,7 @@ async fn sync_resolution_with_fs(
.finish( .finish(
snapshot, snapshot,
&package_partitions.packages, &package_partitions.packages,
Some(root_node_modules_dir_path), root_node_modules_dir_path,
progress_bar, progress_bar,
) )
.await?; .await?;
@ -1039,12 +1062,18 @@ fn junction_or_symlink_dir(
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied => if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>
{ {
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path).map_err(Into::into) junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
}
Err(symlink_err) => {
log::warn!(
"{} Unexpected error symlinking node_modules: {symlink_err}",
colors::yellow("Warning")
);
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
} }
Err(symlink_err) => Err(
AnyError::from(symlink_err)
.context("Failed creating symlink in node_modules folder"),
),
} }
} }

View file

@ -4,45 +4,40 @@ mod byonm;
mod common; mod common;
mod managed; mod managed;
use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use common::maybe_auth_header_for_npm_registry; use common::maybe_auth_header_for_npm_registry;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::deno_node::NodeRequireResolver; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use managed::cache::registry_info::get_package_url; use managed::cache::registry_info::get_package_url;
use node_resolver::NpmResolver; use managed::create_managed_in_npm_pkg_checker;
use thiserror::Error; use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions; pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedCreateOptions; pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver; pub use self::managed::ManagedCliNpmResolver;
#[derive(Debug, Error)]
pub enum ResolvePkgFolderFromDenoReqError {
#[error(transparent)]
Managed(deno_core::error::AnyError),
#[error(transparent)]
Byonm(#[from] ByonmResolvePkgFolderFromDenoReqError),
}
pub enum CliNpmResolverCreateOptions { pub enum CliNpmResolverCreateOptions {
Managed(CliNpmResolverManagedCreateOptions), Managed(CliManagedNpmResolverCreateOptions),
Byonm(CliByonmNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions),
} }
@ -68,18 +63,39 @@ pub async fn create_cli_npm_resolver(
} }
} }
pub enum CreateInNpmPkgCheckerOptions<'a> {
Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>),
Byonm,
}
pub fn create_in_npm_pkg_checker(
options: CreateInNpmPkgCheckerOptions,
) -> Arc<dyn InNpmPackageChecker> {
match options {
CreateInNpmPkgCheckerOptions::Managed(options) => {
create_managed_in_npm_pkg_checker(options)
}
CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker),
}
}
pub enum InnerCliNpmResolverRef<'a> { pub enum InnerCliNpmResolverRef<'a> {
Managed(&'a ManagedCliNpmResolver), Managed(&'a ManagedCliNpmResolver),
#[allow(dead_code)] #[allow(dead_code)]
Byonm(&'a CliByonmNpmResolver), Byonm(&'a CliByonmNpmResolver),
} }
pub trait CliNpmResolver: NpmResolver { pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>; fn into_npm_pkg_folder_resolver(
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver>; self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver>;
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver>;
fn into_process_state_provider( fn into_process_state_provider(
self: Arc<Self>, self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider>; ) -> Arc<dyn NpmProcessStateProvider>;
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
None
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>; fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
@ -101,11 +117,11 @@ pub trait CliNpmResolver: NpmResolver {
fn root_node_modules_path(&self) -> Option<&Path>; fn root_node_modules_path(&self) -> Option<&Path>;
fn resolve_pkg_folder_from_deno_module_req( fn ensure_read_permission<'a>(
&self, &self,
req: &PackageReq, permissions: &mut dyn NodePermissions,
referrer: &ModuleSpecifier, path: &'a Path,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>; ) -> Result<Cow<'a, Path>, AnyError>;
/// Returns a hash returning the state of the npm resolver /// Returns a hash returning the state of the npm resolver
/// or `None` if the state currently can't be determined. /// or `None` if the state currently can't be determined.
@ -189,3 +205,15 @@ impl NpmFetchResolver {
info info
} }
} }
pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
pub fn get_npm_config_user_agent() -> String {
format!(
"deno/{} npm/? deno/{} {} {}",
env!("CARGO_PKG_VERSION"),
env!("CARGO_PKG_VERSION"),
std::env::consts::OS,
std::env::consts::ARCH
)
}

View file

@ -2,7 +2,6 @@
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use std::time;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::type_error; use deno_core::error::type_error;
@ -13,6 +12,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::StartTime;
use tokio::sync::mpsc::UnboundedSender; use tokio::sync::mpsc::UnboundedSender;
use uuid::Uuid; use uuid::Uuid;
@ -51,12 +51,12 @@ fn op_bench_get_origin(state: &mut OpState) -> String {
#[derive(Clone)] #[derive(Clone)]
struct PermissionsHolder(Uuid, PermissionsContainer); struct PermissionsHolder(Uuid, PermissionsContainer);
#[op2] #[op2(stack_trace)]
#[serde] #[serde]
pub fn op_pledge_test_permissions( pub fn op_pledge_test_permissions(
state: &mut OpState, state: &mut OpState,
#[serde] args: ChildPermissionsArg, #[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> { ) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4(); let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?; let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -147,8 +147,8 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) {
#[op2(fast)] #[op2(fast)]
#[number] #[number]
fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> { fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> {
let ns = state.borrow::<time::Instant>().elapsed().as_nanos(); let ns = state.borrow::<StartTime>().elapsed().as_nanos();
let ns_u64 = u64::try_from(ns)?; let ns_u64 = u64::try_from(ns)?;
Ok(ns_u64) Ok(ns_u64)
} }

View file

@ -46,7 +46,7 @@ pub fn op_jupyter_input(
state: &mut OpState, state: &mut OpState,
#[string] prompt: String, #[string] prompt: String,
is_password: bool, is_password: bool,
) -> Result<Option<String>, AnyError> { ) -> Option<String> {
let (last_execution_request, stdin_connection_proxy) = { let (last_execution_request, stdin_connection_proxy) = {
( (
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(), state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
@ -58,11 +58,11 @@ pub fn op_jupyter_input(
if let Some(last_request) = maybe_last_request { if let Some(last_request) = maybe_last_request {
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
else { else {
return Ok(None); return None;
}; };
if !msg.allow_stdin { if !msg.allow_stdin {
return Ok(None); return None;
} }
let content = InputRequest { let content = InputRequest {
@ -73,7 +73,7 @@ pub fn op_jupyter_input(
let msg = JupyterMessage::new(content, Some(&last_request)); let msg = JupyterMessage::new(content, Some(&last_request));
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else { let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
return Ok(None); return None;
}; };
// Need to spawn a separate thread here, because `blocking_recv()` can't // Need to spawn a separate thread here, because `blocking_recv()` can't
@ -82,17 +82,25 @@ pub fn op_jupyter_input(
stdin_connection_proxy.lock().rx.blocking_recv() stdin_connection_proxy.lock().rx.blocking_recv()
}); });
let Ok(Some(response)) = join_handle.join() else { let Ok(Some(response)) = join_handle.join() else {
return Ok(None); return None;
}; };
let JupyterMessageContent::InputReply(msg) = response.content else { let JupyterMessageContent::InputReply(msg) = response.content else {
return Ok(None); return None;
}; };
return Ok(Some(msg.value)); return Some(msg.value);
} }
Ok(None) None
}
#[derive(Debug, thiserror::Error)]
pub enum JupyterBroadcastError {
#[error(transparent)]
SerdeJson(serde_json::Error),
#[error(transparent)]
ZeroMq(AnyError),
} }
#[op2(async)] #[op2(async)]
@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast(
#[serde] content: serde_json::Value, #[serde] content: serde_json::Value,
#[serde] metadata: serde_json::Value, #[serde] metadata: serde_json::Value,
#[serde] buffers: Vec<deno_core::JsBuffer>, #[serde] buffers: Vec<deno_core::JsBuffer>,
) -> Result<(), AnyError> { ) -> Result<(), JupyterBroadcastError> {
let (iopub_connection, last_execution_request) = { let (iopub_connection, last_execution_request) = {
let s = state.borrow(); let s = state.borrow();
@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast(
content, content,
err err
); );
err JupyterBroadcastError::SerdeJson(err)
})?; })?;
let jupyter_message = JupyterMessage::new(content, Some(&last_request)) let jupyter_message = JupyterMessage::new(content, Some(&last_request))
.with_metadata(metadata) .with_metadata(metadata)
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect()); .with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
iopub_connection.lock().send(jupyter_message).await?; iopub_connection
.lock()
.send(jupyter_message)
.await
.map_err(JupyterBroadcastError::ZeroMq)?;
} }
Ok(()) Ok(())
} }
#[op2(fast)] #[op2(fast)]
pub fn op_print( pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
state: &mut OpState,
#[string] msg: &str,
is_err: bool,
) -> Result<(), AnyError> {
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>(); let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
if is_err { if is_err {
if let Err(err) = sender.send(StreamContent::stderr(msg)) { if let Err(err) = sender.send(StreamContent::stderr(msg)) {
log::error!("Failed to send stderr message: {}", err); log::error!("Failed to send stderr message: {}", err);
} }
return Ok(()); return;
} }
if let Err(err) = sender.send(StreamContent::stdout(msg)) { if let Err(err) = sender.send(StreamContent::stdout(msg)) {
log::error!("Failed to send stdout message: {}", err); log::error!("Failed to send stdout message: {}", err);
} }
Ok(())
} }

View file

@ -46,12 +46,12 @@ deno_core::extension!(deno_test,
#[derive(Clone)] #[derive(Clone)]
struct PermissionsHolder(Uuid, PermissionsContainer); struct PermissionsHolder(Uuid, PermissionsContainer);
#[op2] #[op2(stack_trace)]
#[serde] #[serde]
pub fn op_pledge_test_permissions( pub fn op_pledge_test_permissions(
state: &mut OpState, state: &mut OpState,
#[serde] args: ChildPermissionsArg, #[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> { ) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4(); let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?; let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -150,7 +150,7 @@ fn op_register_test_step(
#[smi] parent_id: usize, #[smi] parent_id: usize,
#[smi] root_id: usize, #[smi] root_id: usize,
#[string] root_name: String, #[string] root_name: String,
) -> Result<usize, AnyError> { ) -> usize {
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let origin = state.borrow::<ModuleSpecifier>().to_string(); let origin = state.borrow::<ModuleSpecifier>().to_string();
let description = TestStepDescription { let description = TestStepDescription {
@ -169,7 +169,7 @@ fn op_register_test_step(
}; };
let sender = state.borrow_mut::<TestEventSender>(); let sender = state.borrow_mut::<TestEventSender>();
sender.send(TestEvent::StepRegister(description)).ok(); sender.send(TestEvent::StepRegister(description)).ok();
Ok(id) id
} }
#[op2(fast)] #[op2(fast)]

View file

@ -4,58 +4,55 @@ use async_trait::async_trait;
use dashmap::DashMap; use dashmap::DashMap;
use dashmap::DashSet; use dashmap::DashSet;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError; use deno_config::workspace::MappedResolutionError;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode; use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::source::Resolver;
use deno_graph::source::UnknownBuiltInNodeModuleError; use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use deno_graph::NpmLoadError; use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult; use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError; use deno_npm::resolution::NpmResolutionError;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_resolver::sloppy_imports::SloppyImportsResolver; use deno_resolver::sloppy_imports::SloppyImportsResolver;
use deno_runtime::colors; use deno_runtime::colors;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::is_builtin_node_module; use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NodeResolutionKind;
use node_resolver::errors::NodeResolveError; use node_resolver::ResolutionMode;
use node_resolver::errors::NodeResolveErrorKind; use std::borrow::Cow;
use node_resolver::errors::PackageFolderResolveErrorKind;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::PackageResolveErrorKind;
use node_resolver::errors::UrlToNodeResolutionError;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolution;
use node_resolver::NodeResolutionMode;
use node_resolver::PackageJson;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use thiserror::Error;
use crate::args::JsxImportSourceConfig;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef; use crate::npm::InnerCliNpmResolverRef;
use crate::util::path::specifier_has_extension;
use crate::util::sync::AtomicFlag; use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_owned; use crate::util::text_encoding::from_utf8_lossy_owned;
pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>;
pub type IsCjsResolver =
deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>;
pub type IsCjsResolverOptions = deno_resolver::cjs::IsCjsResolverOptions;
pub type CliSloppyImportsResolver =
SloppyImportsResolver<SloppyImportsCachedFs>;
pub type CliDenoResolver = deno_resolver::DenoResolver<
CliDenoResolverFs,
DenoFsNodeResolverEnv,
SloppyImportsCachedFs,
>;
pub type CliNpmReqResolver =
deno_resolver::npm::NpmReqResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>;
pub struct ModuleCodeStringSource { pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode, pub code: ModuleSourceCode,
pub found_url: ModuleSpecifier, pub found_url: ModuleSpecifier,
@ -77,6 +74,10 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
self.0.realpath_sync(path).map_err(|e| e.into_io_error()) self.0.realpath_sync(path).map_err(|e| e.into_io_error())
} }
fn exists_sync(&self, path: &Path) -> bool {
self.0.exists_sync(path)
}
fn is_dir_sync(&self, path: &Path) -> bool { fn is_dir_sync(&self, path: &Path) -> bool {
self.0.is_dir_sync(path) self.0.is_dir_sync(path)
} }
@ -102,275 +103,34 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
} }
} }
#[derive(Debug)] #[derive(Debug, Error)]
pub struct CliNodeResolver { #[error("{media_type} files are not supported in npm packages: {specifier}")]
cjs_resolutions: Arc<CjsResolutionStore>, pub struct NotSupportedKindInNpmError {
fs: Arc<dyn deno_fs::FileSystem>, pub media_type: MediaType,
node_resolver: Arc<NodeResolver>, pub specifier: Url,
npm_resolver: Arc<dyn CliNpmResolver>,
} }
impl CliNodeResolver { // todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone)
pub fn new(
cjs_resolutions: Arc<CjsResolutionStore>,
fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
) -> Self {
Self {
cjs_resolutions,
fs,
node_resolver,
npm_resolver,
}
}
pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.npm_resolver.in_npm_package(specifier)
}
pub fn get_closest_package_json(
&self,
referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
self.node_resolver.get_closest_package_json(referrer)
}
pub fn resolve_if_for_npm_pkg(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<Option<NodeResolution>, AnyError> {
let resolution_result = self.resolve(specifier, referrer, mode);
match resolution_result {
Ok(res) => Ok(Some(res)),
Err(err) => {
let err = err.into_kind();
match err {
NodeResolveErrorKind::RelativeJoin(_)
| NodeResolveErrorKind::PackageImportsResolve(_)
| NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
| NodeResolveErrorKind::DataUrlReferrer(_)
| NodeResolveErrorKind::TypesNotFound(_)
| NodeResolveErrorKind::FinalizeResolution(_)
| NodeResolveErrorKind::UrlToNodeResolution(_) => Err(err.into()),
NodeResolveErrorKind::PackageResolve(err) => {
let err = err.into_kind();
match err {
PackageResolveErrorKind::ClosestPkgJson(_)
| PackageResolveErrorKind::InvalidModuleSpecifier(_)
| PackageResolveErrorKind::ExportsResolve(_)
| PackageResolveErrorKind::SubpathResolve(_) => Err(err.into()),
PackageResolveErrorKind::PackageFolderResolve(err) => {
match err.as_kind() {
PackageFolderResolveErrorKind::Io(
PackageFolderResolveIoError { package_name, .. },
)
| PackageFolderResolveErrorKind::PackageNotFound(
PackageNotFoundError { package_name, .. },
) => {
if self.in_npm_package(referrer) {
return Err(err.into());
}
if let Some(byonm_npm_resolver) =
self.npm_resolver.as_byonm()
{
if byonm_npm_resolver
.find_ancestor_package_json_with_dep(
package_name,
referrer,
)
.is_some()
{
return Err(anyhow!(
concat!(
"Could not resolve \"{}\", but found it in a package.json. ",
"Deno expects the node_modules/ directory to be up to date. ",
"Did you forget to run `deno install`?"
),
specifier
));
}
}
Ok(None)
}
PackageFolderResolveErrorKind::ReferrerNotFound(_) => {
if self.in_npm_package(referrer) {
return Err(err.into());
}
Ok(None)
}
}
}
}
}
}
}
}
}
pub fn resolve(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<NodeResolution, NodeResolveError> {
let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) {
NodeModuleKind::Cjs
} else {
NodeModuleKind::Esm
};
let res =
self
.node_resolver
.resolve(specifier, referrer, referrer_kind, mode)?;
Ok(self.handle_node_resolution(res))
}
pub fn resolve_req_reference(
&self,
req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> {
self.resolve_req_with_sub_path(
req_ref.req(),
req_ref.sub_path(),
referrer,
mode,
)
}
pub fn resolve_req_with_sub_path(
&self,
req: &PackageReq,
sub_path: Option<&str>,
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> {
let package_folder = self
.npm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)?;
let resolution_result = self.resolve_package_sub_path_from_deno_module(
&package_folder,
sub_path,
Some(referrer),
mode,
);
match resolution_result {
Ok(resolution) => Ok(resolution),
Err(err) => {
if self.npm_resolver.as_byonm().is_some() {
let package_json_path = package_folder.join("package.json");
if !self.fs.exists_sync(&package_json_path) {
return Err(anyhow!(
"Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?",
package_json_path.display(),
));
}
}
Err(err)
}
}
}
pub fn resolve_package_sub_path_from_deno_module(
&self,
package_folder: &Path,
sub_path: Option<&str>,
maybe_referrer: Option<&ModuleSpecifier>,
mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> {
let res = self
.node_resolver
.resolve_package_subpath_from_deno_module(
package_folder,
sub_path,
maybe_referrer,
mode,
)?;
Ok(self.handle_node_resolution(res))
}
pub fn handle_if_in_node_modules(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<ModuleSpecifier>, AnyError> {
// skip canonicalizing if we definitely know it's unnecessary
if specifier.scheme() == "file"
&& specifier.path().contains("/node_modules/")
{
// Specifiers in the node_modules directory are canonicalized
// so canoncalize then check if it's in the node_modules directory.
// If so, check if we need to store this specifier as being a CJS
// resolution.
let specifier =
crate::node::resolve_specifier_into_node_modules(specifier);
if self.in_npm_package(&specifier) {
let resolution =
self.node_resolver.url_to_node_resolution(specifier)?;
let resolution = self.handle_node_resolution(resolution);
return Ok(Some(resolution.into_url()));
}
}
Ok(None)
}
pub fn url_to_node_resolution(
&self,
specifier: ModuleSpecifier,
) -> Result<NodeResolution, UrlToNodeResolutionError> {
self.node_resolver.url_to_node_resolution(specifier)
}
fn handle_node_resolution(
&self,
resolution: NodeResolution,
) -> NodeResolution {
if let NodeResolution::CommonJs(specifier) = &resolution {
// remember that this was a common js resolution
self.mark_cjs_resolution(specifier.clone());
}
resolution
}
pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) {
self.cjs_resolutions.insert(specifier);
}
}
// todo(dsherret): move to module_loader.rs
#[derive(Clone)] #[derive(Clone)]
pub struct NpmModuleLoader { pub struct NpmModuleLoader {
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
node_code_translator: Arc<CliNodeCodeTranslator>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
} }
impl NpmModuleLoader { impl NpmModuleLoader {
pub fn new( pub fn new(
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
node_code_translator: Arc<CliNodeCodeTranslator>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
) -> Self { ) -> Self {
Self { Self {
cjs_resolutions, cjs_tracker,
node_code_translator, node_code_translator,
fs, fs,
node_resolver,
} }
} }
pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.node_resolver.in_npm_package(specifier)
|| self.cjs_resolutions.is_known_cjs(specifier)
}
pub async fn load( pub async fn load(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -413,20 +173,30 @@ impl NpmModuleLoader {
} }
})?; })?;
let code = if self.cjs_resolutions.is_known_cjs(specifier) { let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(AnyError::from(NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
}));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals // translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_owned(code); let code = from_utf8_lossy_owned(code);
ModuleSourceCode::String( ModuleSourceCode::String(
self self
.node_code_translator .node_code_translator
.translate_cjs_to_esm(specifier, Some(code)) .translate_cjs_to_esm(specifier, Some(Cow::Owned(code)))
.await? .await?
.into_owned()
.into(), .into(),
) )
} else { } else {
// esm and json code is untouched // esm and json code is untouched
ModuleSourceCode::Bytes(code.into_boxed_slice().into()) ModuleSourceCode::Bytes(code.into_boxed_slice().into())
}; };
Ok(ModuleCodeStringSource { Ok(ModuleCodeStringSource {
code, code,
found_url: specifier.clone(), found_url: specifier.clone(),
@ -435,85 +205,36 @@ impl NpmModuleLoader {
} }
} }
/// Keeps track of what module specifiers were resolved as CJS. pub struct CliResolverOptions {
#[derive(Debug, Default)] pub deno_resolver: Arc<CliDenoResolver>,
pub struct CjsResolutionStore(DashSet<ModuleSpecifier>); pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
pub bare_node_builtins_enabled: bool,
impl CjsResolutionStore {
pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
specifier_has_extension(specifier, "cjs") || self.0.contains(specifier)
}
pub fn insert(&self, specifier: ModuleSpecifier) {
self.0.insert(specifier);
}
} }
pub type CliSloppyImportsResolver =
SloppyImportsResolver<SloppyImportsCachedFs>;
/// A resolver that takes care of resolution, taking into account loaded /// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings. /// import map, JSX settings.
#[derive(Debug)] #[derive(Debug)]
pub struct CliGraphResolver { pub struct CliResolver {
node_resolver: Option<Arc<CliNodeResolver>>, deno_resolver: Arc<CliDenoResolver>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>, npm_resolver: Option<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Arc<WorkspaceResolver>,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_vendor_specifier: Option<ModuleSpecifier>,
found_package_json_dep_flag: AtomicFlag, found_package_json_dep_flag: AtomicFlag,
bare_node_builtins_enabled: bool, bare_node_builtins_enabled: bool,
warned_pkgs: DashSet<PackageReq>, warned_pkgs: DashSet<PackageReq>,
} }
pub struct CliGraphResolverOptions<'a> { impl CliResolver {
pub node_resolver: Option<Arc<CliNodeResolver>>, pub fn new(options: CliResolverOptions) -> Self {
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub workspace_resolver: Arc<WorkspaceResolver>,
pub bare_node_builtins_enabled: bool,
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
pub maybe_vendor_dir: Option<&'a PathBuf>,
}
impl CliGraphResolver {
pub fn new(options: CliGraphResolverOptions) -> Self {
Self { Self {
node_resolver: options.node_resolver, deno_resolver: options.deno_resolver,
npm_resolver: options.npm_resolver, npm_resolver: options.npm_resolver,
sloppy_imports_resolver: options.sloppy_imports_resolver,
workspace_resolver: options.workspace_resolver,
maybe_default_jsx_import_source: options
.maybe_jsx_import_source_config
.as_ref()
.and_then(|c| c.default_specifier.clone()),
maybe_default_jsx_import_source_types: options
.maybe_jsx_import_source_config
.as_ref()
.and_then(|c| c.default_types_specifier.clone()),
maybe_jsx_import_source_module: options
.maybe_jsx_import_source_config
.map(|c| c.module),
maybe_vendor_specifier: options
.maybe_vendor_dir
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
found_package_json_dep_flag: Default::default(), found_package_json_dep_flag: Default::default(),
bare_node_builtins_enabled: options.bare_node_builtins_enabled, bare_node_builtins_enabled: options.bare_node_builtins_enabled,
warned_pkgs: Default::default(), warned_pkgs: Default::default(),
} }
} }
pub fn as_graph_resolver(&self) -> &dyn Resolver { // todo(dsherret): move this off CliResolver as CliResolver is acting
self // like a factory by doing this (it's beyond its responsibility)
}
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver { pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
WorkerCliNpmGraphResolver { WorkerCliNpmGraphResolver {
npm_resolver: self.npm_resolver.as_ref(), npm_resolver: self.npm_resolver.as_ref(),
@ -521,249 +242,55 @@ impl CliGraphResolver {
bare_node_builtins_enabled: self.bare_node_builtins_enabled, bare_node_builtins_enabled: self.bare_node_builtins_enabled,
} }
} }
}
impl Resolver for CliGraphResolver { pub fn resolve(
fn default_jsx_import_source(&self) -> Option<String> {
self.maybe_default_jsx_import_source.clone()
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self.maybe_default_jsx_import_source_types.clone()
}
fn jsx_import_source_module(&self) -> &str {
self
.maybe_jsx_import_source_module
.as_deref()
.unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self, &self,
raw_specifier: &str, raw_specifier: &str,
referrer_range: &deno_graph::Range, referrer: &ModuleSpecifier,
mode: ResolutionMode, referrer_range_start: deno_graph::Position,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> { ) -> Result<ModuleSpecifier, ResolveError> {
fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode { let resolution = self
match mode { .deno_resolver
ResolutionMode::Execution => NodeResolutionMode::Execution, .resolve(raw_specifier, referrer, resolution_mode, resolution_kind)
ResolutionMode::Types => NodeResolutionMode::Types, .map_err(|err| match err.into_kind() {
} deno_resolver::DenoResolveErrorKind::MappedResolution(
mapped_resolution_error,
) => match mapped_resolution_error {
MappedResolutionError::Specifier(e) => ResolveError::Specifier(e),
// deno_graph checks specifically for an ImportMapError
MappedResolutionError::ImportMap(e) => ResolveError::Other(e.into()),
err => ResolveError::Other(err.into()),
},
err => ResolveError::Other(err.into()),
})?;
if resolution.found_package_json_dep {
// mark that we need to do an "npm install" later
self.found_package_json_dep_flag.raise();
} }
let referrer = &referrer_range.specifier; if let Some(diagnostic) = resolution.maybe_diagnostic {
match &*diagnostic {
// Use node resolution if we're in an npm package MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion {
if let Some(node_resolver) = self.node_resolver.as_ref() { reference,
if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) {
return node_resolver
.resolve(raw_specifier, referrer, to_node_mode(mode))
.map(|res| res.into_url())
.map_err(|e| ResolveError::Other(e.into()));
}
}
// Attempt to resolve with the workspace resolver
let result: Result<_, ResolveError> = self
.workspace_resolver
.resolve(raw_specifier, referrer)
.map_err(|err| match err {
MappedResolutionError::Specifier(err) => ResolveError::Specifier(err),
MappedResolutionError::ImportMap(err) => {
ResolveError::Other(err.into())
}
MappedResolutionError::Workspace(err) => {
ResolveError::Other(err.into())
}
});
let result = match result {
Ok(resolution) => match resolution {
MappedResolution::Normal {
specifier,
maybe_diagnostic,
}
| MappedResolution::ImportMap {
specifier,
maybe_diagnostic,
} => {
if let Some(diagnostic) = maybe_diagnostic {
match &*diagnostic {
MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { reference, .. } => {
if self.warned_pkgs.insert(reference.req().clone()) {
log::warn!("{} {}\n at {}", colors::yellow("Warning"), diagnostic, referrer_range);
}
}
}
}
// do sloppy imports resolution if enabled
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
Ok(
sloppy_imports_resolver
.resolve(
&specifier,
match mode {
ResolutionMode::Execution => {
SloppyImportsResolutionMode::Execution
}
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
},
)
.map(|s| s.into_specifier())
.unwrap_or(specifier),
)
} else {
Ok(specifier)
}
}
MappedResolution::WorkspaceJsrPackage { specifier, .. } => {
Ok(specifier)
}
MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
sub_path,
..
} => self
.node_resolver
.as_ref()
.unwrap()
.resolve_package_sub_path_from_deno_module(
pkg_json.dir_path(),
sub_path.as_deref(),
Some(referrer),
to_node_mode(mode),
)
.map_err(ResolveError::Other)
.map(|res| res.into_url()),
MappedResolution::PackageJson {
dep_result,
alias,
sub_path,
.. ..
} => { } => {
// found a specifier in the package.json, so mark that if self.warned_pkgs.insert(reference.req().clone()) {
// we need to do an "npm install" later log::warn!(
self.found_package_json_dep_flag.raise(); "{} {}\n at {}:{}",
colors::yellow("Warning"),
dep_result diagnostic,
.as_ref() referrer,
.map_err(|e| ResolveError::Other(e.clone().into())) referrer_range_start,
.and_then(|dep| match dep {
PackageJsonDepValue::Req(req) => {
ModuleSpecifier::parse(&format!(
"npm:{}{}",
req,
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
))
.map_err(|e| ResolveError::Other(e.into()))
}
PackageJsonDepValue::Workspace(version_req) => self
.workspace_resolver
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)
.map_err(|e| ResolveError::Other(e.into()))
.and_then(|pkg_folder| {
Ok(
self
.node_resolver
.as_ref()
.unwrap()
.resolve_package_sub_path_from_deno_module(
pkg_folder,
sub_path.as_deref(),
Some(referrer),
to_node_mode(mode),
)?
.into_url(),
)
}),
})
}
},
Err(err) => Err(err),
};
// When the user is vendoring, don't allow them to import directly from the vendor/ directory
// as it might cause them confusion or duplicate dependencies. Additionally, this folder has
// special treatment in the language server so it will definitely cause issues/confusion there
// if they do this.
if let Some(vendor_specifier) = &self.maybe_vendor_specifier {
if let Ok(specifier) = &result {
if specifier.as_str().starts_with(vendor_specifier.as_str()) {
return Err(ResolveError::Other(anyhow!("Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring.")));
}
}
}
let Some(node_resolver) = &self.node_resolver else {
return result;
};
let is_byonm = self
.npm_resolver
.as_ref()
.is_some_and(|r| r.as_byonm().is_some());
match result {
Ok(specifier) => {
if let Ok(npm_req_ref) =
NpmPackageReqReference::from_specifier(&specifier)
{
// check if the npm specifier resolves to a workspace member
if let Some(pkg_folder) = self
.workspace_resolver
.resolve_workspace_pkg_json_folder_for_npm_specifier(
npm_req_ref.req(),
)
{
return Ok(
node_resolver
.resolve_package_sub_path_from_deno_module(
pkg_folder,
npm_req_ref.sub_path(),
Some(referrer),
to_node_mode(mode),
)?
.into_url(),
); );
} }
// do npm resolution for byonm
if is_byonm {
return node_resolver
.resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode))
.map(|res| res.into_url())
.map_err(|err| err.into());
}
} }
Ok(match node_resolver.handle_if_in_node_modules(&specifier)? {
Some(specifier) => specifier,
None => specifier,
})
}
Err(err) => {
// If byonm, check if the bare specifier resolves to an npm package
if is_byonm && referrer.scheme() == "file" {
let maybe_resolution = node_resolver
.resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode))
.map_err(ResolveError::Other)?;
if let Some(res) = maybe_resolution {
match res {
NodeResolution::Esm(url) | NodeResolution::CommonJs(url) => {
return Ok(url)
}
NodeResolution::BuiltIn(_) => {
// don't resolve bare specifiers for built-in modules via node resolution
}
}
}
}
Err(err)
} }
} }
Ok(resolution.url)
} }
} }
@ -797,13 +324,10 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
module_name: &str, module_name: &str,
range: &deno_graph::Range, range: &deno_graph::Range,
) { ) {
let deno_graph::Range { let start = range.range.start;
start, specifier, .. let specifier = &range.specifier;
} = range;
let line = start.line + 1;
let column = start.character + 1;
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS { if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{line}:{column}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning")) log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{start}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
} }
} }

View file

@ -431,8 +431,34 @@
"type": "object", "type": "object",
"patternProperties": { "patternProperties": {
"^[A-Za-z][A-Za-z0-9_\\-:]*$": { "^[A-Za-z][A-Za-z0-9_\\-:]*$": {
"type": "string", "oneOf": [
"description": "Command to execute for this task name." {
"type": "string",
"description": "Command to execute for this task name."
},
{
"type": "object",
"description": "A definition of a task to execute",
"properties": {
"description": {
"type": "string",
"description": "Description of a task that will be shown when running `deno task` without a task name"
},
"command": {
"type": "string",
"required": true,
"description": "The task to execute"
},
"dependencies": {
"type": "array",
"items": {
"type": "string"
},
"description": "Tasks that should be executed before this task"
}
}
}
]
} }
}, },
"additionalProperties": false "additionalProperties": false
@ -528,13 +554,14 @@
"bare-node-builtins", "bare-node-builtins",
"byonm", "byonm",
"cron", "cron",
"detect-cjs",
"ffi", "ffi",
"fs", "fs",
"fmt-component", "fmt-component",
"fmt-sql",
"http", "http",
"kv", "kv",
"net", "net",
"node-globals",
"sloppy-imports", "sloppy-imports",
"temporal", "temporal",
"unsafe-proto", "unsafe-proto",

View file

@ -4,6 +4,7 @@ use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::VecDeque; use std::collections::VecDeque;
use std::env;
use std::env::current_exe; use std::env::current_exe;
use std::ffi::OsString; use std::ffi::OsString;
use std::fs; use std::fs;
@ -15,12 +16,14 @@ use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::io::Write; use std::io::Write;
use std::ops::Range; use std::ops::Range;
use std::path::Component;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_config::workspace::ResolverWorkspaceJsrPackage;
@ -50,6 +53,7 @@ use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::Version; use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError; use deno_semver::VersionReqSpecifierParseError;
use deno_telemetry::OtelConfig;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::Level; use log::Level;
use serde::Deserialize; use serde::Deserialize;
@ -62,11 +66,13 @@ use crate::args::NpmInstallDepsProvider;
use crate::args::PermissionFlags; use crate::args::PermissionFlags;
use crate::args::UnstableConfig; use crate::args::UnstableConfig;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::cache::FastInsecureHasher;
use crate::emit::Emitter; use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef; use crate::npm::InnerCliNpmResolverRef;
use crate::resolver::CjsTracker;
use crate::shared::ReleaseChannel; use crate::shared::ReleaseChannel;
use crate::standalone::virtual_fs::VfsEntry; use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive; use crate::util::archive;
@ -83,6 +89,7 @@ use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder; use super::serialization::RemoteModulesStoreBuilder;
use super::virtual_fs::FileBackedVfs; use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsFileSubDataKind;
use super::virtual_fs::VfsRoot; use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory; use super::virtual_fs::VirtualDirectory;
@ -171,6 +178,7 @@ pub struct SerializedWorkspaceResolver {
pub struct Metadata { pub struct Metadata {
pub argv: Vec<String>, pub argv: Vec<String>,
pub seed: Option<u64>, pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionFlags, pub permissions: PermissionFlags,
pub location: Option<Url>, pub location: Option<Url>,
pub v8_flags: Vec<String>, pub v8_flags: Vec<String>,
@ -183,6 +191,7 @@ pub struct Metadata {
pub entrypoint_key: String, pub entrypoint_key: String,
pub node_modules: Option<NodeModules>, pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig, pub unstable_config: UnstableConfig,
pub otel_config: Option<OtelConfig>, // None means disabled.
} }
fn write_binary_bytes( fn write_binary_bytes(
@ -195,7 +204,8 @@ fn write_binary_bytes(
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let data_section_bytes = let data_section_bytes =
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?; serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)
.context("Serializing binary data section.")?;
let target = compile_flags.resolve_target(); let target = compile_flags.resolve_target();
if target.contains("linux") { if target.contains("linux") {
@ -257,6 +267,10 @@ impl StandaloneModules {
} }
} }
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>( pub fn read<'a>(
&'a self, &'a self,
specifier: &'a ModuleSpecifier, specifier: &'a ModuleSpecifier,
@ -264,7 +278,9 @@ impl StandaloneModules {
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?; let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) { let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry)?, Ok(entry) => self
.vfs
.read_file_all(entry, VfsFileSubDataKind::ModuleGraph)?,
Err(err) if err.kind() == ErrorKind::NotFound => { Err(err) if err.kind() == ErrorKind::NotFound => {
let bytes = match RealFs.read_file_sync(&path, None) { let bytes = match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes, Ok(bytes) => bytes,
@ -353,6 +369,8 @@ pub fn extract_standalone(
} }
pub struct DenoCompileBinaryWriter<'a> { pub struct DenoCompileBinaryWriter<'a> {
cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter, emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
@ -365,6 +383,8 @@ pub struct DenoCompileBinaryWriter<'a> {
impl<'a> DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter, emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
@ -374,6 +394,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_system_info: NpmSystemInfo, npm_system_info: NpmSystemInfo,
) -> Self { ) -> Self {
Self { Self {
cjs_tracker,
cli_options,
deno_dir, deno_dir,
emitter, emitter,
file_fetcher, file_fetcher,
@ -390,8 +412,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
graph: &ModuleGraph, graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
cli_options: &CliOptions,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// Select base binary based on target // Select base binary based on target
let mut original_binary = self.get_base_binary(compile_flags).await?; let mut original_binary = self.get_base_binary(compile_flags).await?;
@ -404,7 +426,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
target, target,
) )
} }
set_windows_binary_to_gui(&mut original_binary)?; set_windows_binary_to_gui(&mut original_binary)
.context("Setting windows binary to GUI.")?;
} }
if compile_flags.icon.is_some() { if compile_flags.icon.is_some() {
let target = compile_flags.resolve_target(); let target = compile_flags.resolve_target();
@ -422,7 +445,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
graph, graph,
root_dir_url, root_dir_url,
entrypoint, entrypoint,
cli_options, include_files,
compile_flags, compile_flags,
) )
.await .await
@ -436,7 +459,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
// //
// Phase 2 of the 'min sized' deno compile RFC talks // Phase 2 of the 'min sized' deno compile RFC talks
// about adding this as a flag. // about adding this as a flag.
if let Some(path) = std::env::var_os("DENORT_BIN") { if let Some(path) = get_dev_binary_path() {
return std::fs::read(&path).with_context(|| { return std::fs::read(&path).with_context(|| {
format!("Could not find denort at '{}'", path.to_string_lossy()) format!("Could not find denort at '{}'", path.to_string_lossy())
}); });
@ -465,10 +488,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
if !binary_path.exists() { if !binary_path.exists() {
self self
.download_base_binary(&download_directory, &binary_path_suffix) .download_base_binary(&download_directory, &binary_path_suffix)
.await?; .await
.context("Setting up base binary.")?;
} }
let archive_data = std::fs::read(binary_path)?; let read_file = |path: &Path| -> Result<Vec<u8>, AnyError> {
std::fs::read(path).with_context(|| format!("Reading {}", path.display()))
};
let archive_data = read_file(&binary_path)?;
let temp_dir = tempfile::TempDir::new()?; let temp_dir = tempfile::TempDir::new()?;
let base_binary_path = archive::unpack_into_dir(archive::UnpackArgs { let base_binary_path = archive::unpack_into_dir(archive::UnpackArgs {
exe_name: "denort", exe_name: "denort",
@ -477,7 +504,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
is_windows: target.contains("windows"), is_windows: target.contains("windows"),
dest_path: temp_dir.path(), dest_path: temp_dir.path(),
})?; })?;
let base_binary = std::fs::read(base_binary_path)?; let base_binary = read_file(&base_binary_path)?;
drop(temp_dir); // delete the temp dir drop(temp_dir); // delete the temp dir
Ok(base_binary) Ok(base_binary)
} }
@ -505,15 +532,19 @@ impl<'a> DenoCompileBinaryWriter<'a> {
let bytes = match maybe_bytes { let bytes = match maybe_bytes {
Some(bytes) => bytes, Some(bytes) => bytes,
None => { None => {
log::info!("Download could not be found, aborting"); bail!("Download could not be found, aborting");
std::process::exit(1)
} }
}; };
std::fs::create_dir_all(output_directory)?; let create_dir_all = |dir: &Path| {
std::fs::create_dir_all(dir)
.with_context(|| format!("Creating {}", dir.display()))
};
create_dir_all(output_directory)?;
let output_path = output_directory.join(binary_path_suffix); let output_path = output_directory.join(binary_path_suffix);
std::fs::create_dir_all(output_path.parent().unwrap())?; create_dir_all(output_path.parent().unwrap())?;
tokio::fs::write(output_path, bytes).await?; std::fs::write(&output_path, bytes)
.with_context(|| format!("Writing {}", output_path.display()))?;
Ok(()) Ok(())
} }
@ -527,91 +558,118 @@ impl<'a> DenoCompileBinaryWriter<'a> {
graph: &ModuleGraph, graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
cli_options: &CliOptions, include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let ca_data = match cli_options.ca_data() { let ca_data = match self.cli_options.ca_data() {
Some(CaData::File(ca_file)) => Some( Some(CaData::File(ca_file)) => Some(
std::fs::read(ca_file) std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
.with_context(|| format!("Reading: {ca_file}"))?,
), ),
Some(CaData::Bytes(bytes)) => Some(bytes.clone()), Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
None => None, None => None,
}; };
let root_path = root_dir_url.inner().to_file_path().unwrap(); let root_path = root_dir_url.inner().to_file_path().unwrap();
let (maybe_npm_vfs, node_modules, npm_snapshot) = match self let (maybe_npm_vfs, node_modules, npm_snapshot) =
.npm_resolver match self.npm_resolver.as_inner() {
.as_inner() InnerCliNpmResolverRef::Managed(managed) => {
{ let snapshot =
InnerCliNpmResolverRef::Managed(managed) => { managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
let snapshot = if !snapshot.as_serialized().packages.is_empty() {
managed.serialized_valid_snapshot_for_system(&self.npm_system_info); let npm_vfs_builder = self
if !snapshot.as_serialized().packages.is_empty() { .build_npm_vfs(&root_path)
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?; .context("Building npm vfs.")?;
(
Some(npm_vfs_builder),
Some(NodeModules::Managed {
node_modules_dir: self
.npm_resolver
.root_node_modules_path()
.map(|path| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(path).unwrap(),
)
.into_owned()
}),
}),
Some(snapshot),
)
} else {
(None, None, None)
}
}
InnerCliNpmResolverRef::Byonm(resolver) => {
let npm_vfs_builder = self.build_npm_vfs(&root_path)?;
( (
Some(npm_vfs_builder), Some(npm_vfs_builder),
Some(NodeModules::Managed { Some(NodeModules::Byonm {
node_modules_dir: self.npm_resolver.root_node_modules_path().map( root_node_modules_dir: resolver.root_node_modules_path().map(
|path| { |node_modules_dir| {
root_dir_url root_dir_url
.specifier_key( .specifier_key(
&ModuleSpecifier::from_directory_path(path).unwrap(), &ModuleSpecifier::from_directory_path(node_modules_dir)
.unwrap(),
) )
.into_owned() .into_owned()
}, },
), ),
}), }),
Some(snapshot), None,
) )
} else {
(None, None, None)
} }
} };
InnerCliNpmResolverRef::Byonm(resolver) => {
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
(
Some(npm_vfs_builder),
Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(node_modules_dir)
.unwrap(),
)
.into_owned()
},
),
}),
None,
)
}
};
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs { let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
npm_vfs npm_vfs
} else { } else {
VfsBuilder::new(root_path.clone())? VfsBuilder::new(root_path.clone())?
}; };
for include_file in include_files {
let path = deno_path_util::url_to_file_path(include_file)?;
if path.is_dir() {
// TODO(#26941): we should analyze if any of these are
// modules in order to include their dependencies
vfs
.add_dir_recursive(&path)
.with_context(|| format!("Including {}", path.display()))?;
} else {
vfs
.add_file_at_path(&path)
.with_context(|| format!("Including {}", path.display()))?;
}
}
let mut remote_modules_store = RemoteModulesStoreBuilder::default(); let mut remote_modules_store = RemoteModulesStoreBuilder::default();
let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() {
Some(FastInsecureHasher::new_deno_versioned())
} else {
None
};
for module in graph.modules() { for module in graph.modules() {
if module.specifier().scheme() == "data" { if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code continue; // don't store data urls as an entry as they're in the code
} }
if let Some(hasher) = &mut code_cache_key_hasher {
if let Some(source) = module.source() {
hasher.write(module.specifier().as_str().as_bytes());
hasher.write(source.as_bytes());
}
}
let (maybe_source, media_type) = match module { let (maybe_source, media_type) = match module {
deno_graph::Module::Js(m) => { deno_graph::Module::Js(m) => {
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable() let source = if m.media_type.is_emittable() {
let is_emittable = matches!( let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
m.media_type, &m.specifier,
MediaType::TypeScript m.media_type,
| MediaType::Mts m.is_script,
| MediaType::Cts )?;
| MediaType::Jsx let module_kind = ModuleKind::from_is_cjs(is_cjs);
| MediaType::Tsx
);
let source = if is_emittable {
let source = self let source = self
.emitter .emitter
.emit_parsed_source(&m.specifier, m.media_type, &m.source) .emit_parsed_source(
&m.specifier,
m.media_type,
module_kind,
&m.source,
)
.await?; .await?;
source.into_bytes() source.into_bytes()
} else { } else {
@ -622,6 +680,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
deno_graph::Module::Json(m) => { deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes().to_vec()), m.media_type) (Some(m.source.as_bytes().to_vec()), m.media_type)
} }
deno_graph::Module::Wasm(m) => {
(Some(m.source.to_vec()), MediaType::Wasm)
}
deno_graph::Module::Npm(_) deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_) | deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, MediaType::Unknown), | deno_graph::Module::External(_) => (None, MediaType::Unknown),
@ -635,6 +696,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Some(source) => source, Some(source) => source,
None => RealFs.read_file_sync(&file_path, None)?, None => RealFs.read_file_sync(&file_path, None)?,
}, },
VfsFileSubDataKind::ModuleGraph,
) )
.with_context(|| { .with_context(|| {
format!("Failed adding '{}'", file_path.display()) format!("Failed adding '{}'", file_path.display())
@ -645,25 +707,33 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} }
remote_modules_store.add_redirects(&graph.redirects); remote_modules_store.add_redirects(&graph.redirects);
let env_vars_from_env_file = match cli_options.env_file_name() { let env_vars_from_env_file = match self.cli_options.env_file_name() {
Some(env_filename) => { Some(env_filenames) => {
log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename); let mut aggregated_env_vars = IndexMap::new();
get_file_env_vars(env_filename.to_string())? for env_filename in env_filenames.iter().rev() {
log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename);
let env_vars = get_file_env_vars(env_filename.to_string())?;
aggregated_env_vars.extend(env_vars);
}
aggregated_env_vars
} }
None => Default::default(), None => Default::default(),
}; };
let metadata = Metadata { let metadata = Metadata {
argv: compile_flags.args.clone(), argv: compile_flags.args.clone(),
seed: cli_options.seed(), seed: self.cli_options.seed(),
location: cli_options.location_flag().clone(), code_cache_key: code_cache_key_hasher.map(|h| h.finish()),
permissions: cli_options.permission_flags().clone(), location: self.cli_options.location_flag().clone(),
v8_flags: cli_options.v8_flags().clone(), permissions: self.cli_options.permission_flags().clone(),
unsafely_ignore_certificate_errors: cli_options v8_flags: self.cli_options.v8_flags().clone(),
unsafely_ignore_certificate_errors: self
.cli_options
.unsafely_ignore_certificate_errors() .unsafely_ignore_certificate_errors()
.clone(), .clone(),
log_level: cli_options.log_level(), log_level: self.cli_options.log_level(),
ca_stores: cli_options.ca_stores().clone(), ca_stores: self.cli_options.ca_stores().clone(),
ca_data, ca_data,
env_vars_from_env_file, env_vars_from_env_file,
entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(), entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
@ -706,11 +776,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
node_modules, node_modules,
unstable_config: UnstableConfig { unstable_config: UnstableConfig {
legacy_flag_enabled: false, legacy_flag_enabled: false,
bare_node_builtins: cli_options.unstable_bare_node_builtins(), bare_node_builtins: self.cli_options.unstable_bare_node_builtins(),
detect_cjs: cli_options.unstable_detect_cjs(), sloppy_imports: self.cli_options.unstable_sloppy_imports(),
sloppy_imports: cli_options.unstable_sloppy_imports(), features: self.cli_options.unstable_features(),
features: cli_options.unstable_features(),
}, },
otel_config: self.cli_options.otel_config(),
}; };
write_binary_bytes( write_binary_bytes(
@ -722,13 +792,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
vfs, vfs,
compile_flags, compile_flags,
) )
.context("Writing binary bytes")
} }
fn build_npm_vfs( fn build_npm_vfs(&self, root_path: &Path) -> Result<VfsBuilder, AnyError> {
&self,
root_path: &Path,
cli_options: &CliOptions,
) -> Result<VfsBuilder, AnyError> {
fn maybe_warn_different_system(system_info: &NpmSystemInfo) { fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
if system_info != &NpmSystemInfo::default() { if system_info != &NpmSystemInfo::default() {
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning")); log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
@ -745,8 +812,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} else { } else {
// DO NOT include the user's registry url as it may contain credentials, // DO NOT include the user's registry url as it may contain credentials,
// but also don't make this dependent on the registry url // but also don't make this dependent on the registry url
let global_cache_root_path = npm_resolver.global_cache_root_folder(); let global_cache_root_path = npm_resolver.global_cache_root_path();
let mut builder = VfsBuilder::new(global_cache_root_path)?; let mut builder =
VfsBuilder::new(global_cache_root_path.to_path_buf())?;
let mut packages = let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info); npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
@ -804,13 +872,18 @@ impl<'a> DenoCompileBinaryWriter<'a> {
InnerCliNpmResolverRef::Byonm(_) => { InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info); maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?; let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
for pkg_json in cli_options.workspace().package_jsons() { for pkg_json in self.cli_options.workspace().package_jsons() {
builder.add_file_at_path(&pkg_json.path)?; builder.add_file_at_path(&pkg_json.path)?;
} }
// traverse and add all the node_modules directories in the workspace // traverse and add all the node_modules directories in the workspace
let mut pending_dirs = VecDeque::new(); let mut pending_dirs = VecDeque::new();
pending_dirs.push_back( pending_dirs.push_back(
cli_options.workspace().root_dir().to_file_path().unwrap(), self
.cli_options
.workspace()
.root_dir()
.to_file_path()
.unwrap(),
); );
while let Some(pending_dir) = pending_dirs.pop_front() { while let Some(pending_dir) = pending_dirs.pop_front() {
let mut entries = fs::read_dir(&pending_dir) let mut entries = fs::read_dir(&pending_dir)
@ -837,6 +910,31 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} }
} }
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {
let mut denort = deno_exe;
denort.set_file_name(if cfg!(windows) {
"denort.exe"
} else {
"denort"
});
denort.exists().then(|| denort.into_os_string())
}
fn get_dev_binary_path() -> Option<OsString> {
env::var_os("DENORT_BIN").or_else(|| {
env::current_exe().ok().and_then(|exec_path| {
if exec_path
.components()
.any(|component| component == Component::Normal("target".as_ref()))
{
get_denort_path(exec_path)
} else {
None
}
})
})
}
/// This function returns the environment variables specified /// This function returns the environment variables specified
/// in the passed environment file. /// in the passed environment file.
fn get_file_env_vars( fn get_file_env_vars(

View file

@ -0,0 +1,523 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::BufReader;
use std::io::BufWriter;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::sync::AtomicFlag;
use deno_runtime::code_cache::CodeCache;
use deno_runtime::code_cache::CodeCacheType;
use crate::cache::FastInsecureHasher;
use crate::util::path::get_atomic_file_path;
use crate::worker::CliCodeCache;
enum CodeCacheStrategy {
FirstRun(FirstRunCodeCacheStrategy),
SubsequentRun(SubsequentRunCodeCacheStrategy),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DenoCompileCodeCacheEntry {
pub source_hash: u64,
pub data: Vec<u8>,
}
pub struct DenoCompileCodeCache {
strategy: CodeCacheStrategy,
}
impl DenoCompileCodeCache {
pub fn new(file_path: PathBuf, cache_key: u64) -> Self {
// attempt to deserialize the cache data
match deserialize(&file_path, cache_key) {
Ok(data) => {
log::debug!(
"Loaded {} code cache entries from {}",
data.len(),
file_path.display()
);
Self {
strategy: CodeCacheStrategy::SubsequentRun(
SubsequentRunCodeCacheStrategy {
is_finished: AtomicFlag::lowered(),
data: Mutex::new(data),
},
),
}
}
Err(err) => {
log::debug!(
"Failed to deserialize code cache from {}: {:#}",
file_path.display(),
err
);
Self {
strategy: CodeCacheStrategy::FirstRun(FirstRunCodeCacheStrategy {
cache_key,
file_path,
is_finished: AtomicFlag::lowered(),
data: Mutex::new(FirstRunCodeCacheData {
cache: HashMap::new(),
add_count: 0,
}),
}),
}
}
}
}
}
impl CodeCache for DenoCompileCodeCache {
fn get_sync(
&self,
specifier: &ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
if !strategy.is_finished.is_raised() {
// we keep track of how many times the cache is requested
// then serialize the cache when we get that number of
// "set" calls
strategy.data.lock().add_count += 1;
}
None
}
CodeCacheStrategy::SubsequentRun(strategy) => {
if strategy.is_finished.is_raised() {
return None;
}
strategy.take_from_cache(specifier, code_cache_type, source_hash)
}
}
}
fn set_sync(
&self,
specifier: ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
bytes: &[u8],
) {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
if strategy.is_finished.is_raised() {
return;
}
let data_to_serialize = {
let mut data = strategy.data.lock();
data.cache.insert(
(specifier.to_string(), code_cache_type),
DenoCompileCodeCacheEntry {
source_hash,
data: bytes.to_vec(),
},
);
if data.add_count != 0 {
data.add_count -= 1;
}
if data.add_count == 0 {
// don't allow using the cache anymore
strategy.is_finished.raise();
if data.cache.is_empty() {
None
} else {
Some(std::mem::take(&mut data.cache))
}
} else {
None
}
};
if let Some(cache_data) = &data_to_serialize {
strategy.write_cache_data(cache_data);
}
}
CodeCacheStrategy::SubsequentRun(_) => {
// do nothing
}
}
}
}
impl CliCodeCache for DenoCompileCodeCache {
fn enabled(&self) -> bool {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
!strategy.is_finished.is_raised()
}
CodeCacheStrategy::SubsequentRun(strategy) => {
!strategy.is_finished.is_raised()
}
}
}
fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> {
self
}
}
type CodeCacheKey = (String, CodeCacheType);
struct FirstRunCodeCacheData {
cache: HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
add_count: usize,
}
struct FirstRunCodeCacheStrategy {
cache_key: u64,
file_path: PathBuf,
is_finished: AtomicFlag,
data: Mutex<FirstRunCodeCacheData>,
}
impl FirstRunCodeCacheStrategy {
fn write_cache_data(
&self,
cache_data: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
) {
let count = cache_data.len();
let temp_file = get_atomic_file_path(&self.file_path);
match serialize(&temp_file, self.cache_key, cache_data) {
Ok(()) => {
if let Err(err) = std::fs::rename(&temp_file, &self.file_path) {
log::debug!("Failed to rename code cache: {}", err);
let _ = std::fs::remove_file(&temp_file);
} else {
log::debug!("Serialized {} code cache entries", count);
}
}
Err(err) => {
let _ = std::fs::remove_file(&temp_file);
log::debug!("Failed to serialize code cache: {}", err);
}
}
}
}
struct SubsequentRunCodeCacheStrategy {
is_finished: AtomicFlag,
data: Mutex<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>>,
}
impl SubsequentRunCodeCacheStrategy {
fn take_from_cache(
&self,
specifier: &ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
let mut data = self.data.lock();
// todo(dsherret): how to avoid the clone here?
let entry = data.remove(&(specifier.to_string(), code_cache_type))?;
if entry.source_hash != source_hash {
return None;
}
if data.is_empty() {
self.is_finished.raise();
}
Some(entry.data)
}
}
/// File format:
/// - <header>
/// - <cache key>
/// - <u32: number of entries>
/// - <[entry length]> - u64 * number of entries
/// - <[entry]>
/// - <[u8]: entry data>
/// - <String: specifier>
/// - <u8>: code cache type
/// - <u32: specifier length>
/// - <u64: source hash>
/// - <u64: entry data hash>
fn serialize(
file_path: &Path,
cache_key: u64,
cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
) -> Result<(), AnyError> {
let cache_file = std::fs::OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(file_path)?;
let mut writer = BufWriter::new(cache_file);
serialize_with_writer(&mut writer, cache_key, cache)
}
fn serialize_with_writer<T: Write>(
writer: &mut BufWriter<T>,
cache_key: u64,
cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
) -> Result<(), AnyError> {
// header
writer.write_all(&cache_key.to_le_bytes())?;
writer.write_all(&(cache.len() as u32).to_le_bytes())?;
// lengths of each entry
for ((specifier, _), entry) in cache {
let len: u64 =
entry.data.len() as u64 + specifier.len() as u64 + 1 + 4 + 8 + 8;
writer.write_all(&len.to_le_bytes())?;
}
// entries
for ((specifier, code_cache_type), entry) in cache {
writer.write_all(&entry.data)?;
writer.write_all(&[match code_cache_type {
CodeCacheType::EsModule => 0,
CodeCacheType::Script => 1,
}])?;
writer.write_all(specifier.as_bytes())?;
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
writer.write_all(&entry.source_hash.to_le_bytes())?;
let hash: u64 = FastInsecureHasher::new_without_deno_version()
.write(&entry.data)
.finish();
writer.write_all(&hash.to_le_bytes())?;
}
writer.flush()?;
Ok(())
}
fn deserialize(
file_path: &Path,
expected_cache_key: u64,
) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> {
let cache_file = std::fs::File::open(file_path)?;
let mut reader = BufReader::new(cache_file);
deserialize_with_reader(&mut reader, expected_cache_key)
}
fn deserialize_with_reader<T: Read>(
reader: &mut BufReader<T>,
expected_cache_key: u64,
) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> {
// it's very important to use this below so that a corrupt cache file
// doesn't cause a memory allocation error
fn new_vec_sized<T: Clone>(
capacity: usize,
default_value: T,
) -> Result<Vec<T>, AnyError> {
let mut vec = Vec::new();
vec.try_reserve(capacity)?;
vec.resize(capacity, default_value);
Ok(vec)
}
fn try_subtract(a: usize, b: usize) -> Result<usize, AnyError> {
if a < b {
bail!("Integer underflow");
}
Ok(a - b)
}
let mut header_bytes = vec![0; 8 + 4];
reader.read_exact(&mut header_bytes)?;
let actual_cache_key = u64::from_le_bytes(header_bytes[..8].try_into()?);
if actual_cache_key != expected_cache_key {
// cache bust
bail!("Cache key mismatch");
}
let len = u32::from_le_bytes(header_bytes[8..].try_into()?) as usize;
// read the lengths for each entry found in the file
let entry_len_bytes_capacity = len * 8;
let mut entry_len_bytes = new_vec_sized(entry_len_bytes_capacity, 0)?;
reader.read_exact(&mut entry_len_bytes)?;
let mut lengths = Vec::new();
lengths.try_reserve(len)?;
for i in 0..len {
let pos = i * 8;
lengths.push(
u64::from_le_bytes(entry_len_bytes[pos..pos + 8].try_into()?) as usize,
);
}
let mut map = HashMap::new();
map.try_reserve(len)?;
for len in lengths {
let mut buffer = new_vec_sized(len, 0)?;
reader.read_exact(&mut buffer)?;
let entry_data_hash_start_pos = try_subtract(buffer.len(), 8)?;
let expected_entry_data_hash =
u64::from_le_bytes(buffer[entry_data_hash_start_pos..].try_into()?);
let source_hash_start_pos = try_subtract(entry_data_hash_start_pos, 8)?;
let source_hash = u64::from_le_bytes(
buffer[source_hash_start_pos..entry_data_hash_start_pos].try_into()?,
);
let specifier_end_pos = try_subtract(source_hash_start_pos, 4)?;
let specifier_len = u32::from_le_bytes(
buffer[specifier_end_pos..source_hash_start_pos].try_into()?,
) as usize;
let specifier_start_pos = try_subtract(specifier_end_pos, specifier_len)?;
let specifier = String::from_utf8(
buffer[specifier_start_pos..specifier_end_pos].to_vec(),
)?;
let code_cache_type_pos = try_subtract(specifier_start_pos, 1)?;
let code_cache_type = match buffer[code_cache_type_pos] {
0 => CodeCacheType::EsModule,
1 => CodeCacheType::Script,
_ => bail!("Invalid code cache type"),
};
buffer.truncate(code_cache_type_pos);
let actual_entry_data_hash: u64 =
FastInsecureHasher::new_without_deno_version()
.write(&buffer)
.finish();
if expected_entry_data_hash != actual_entry_data_hash {
bail!("Hash mismatch.")
}
map.insert(
(specifier, code_cache_type),
DenoCompileCodeCacheEntry {
source_hash,
data: buffer,
},
);
}
Ok(map)
}
#[cfg(test)]
mod test {
use test_util::TempDir;
use super::*;
use std::fs::File;
#[test]
fn serialize_deserialize() {
let cache_key = 123456;
let cache = {
let mut cache = HashMap::new();
cache.insert(
("specifier1".to_string(), CodeCacheType::EsModule),
DenoCompileCodeCacheEntry {
source_hash: 1,
data: vec![1, 2, 3],
},
);
cache.insert(
("specifier2".to_string(), CodeCacheType::EsModule),
DenoCompileCodeCacheEntry {
source_hash: 2,
data: vec![4, 5, 6],
},
);
cache.insert(
("specifier2".to_string(), CodeCacheType::Script),
DenoCompileCodeCacheEntry {
source_hash: 2,
data: vec![6, 5, 1],
},
);
cache
};
let mut buffer = Vec::new();
serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache)
.unwrap();
let deserialized =
deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key)
.unwrap();
assert_eq!(cache, deserialized);
}
#[test]
fn serialize_deserialize_empty() {
let cache_key = 1234;
let cache = HashMap::new();
let mut buffer = Vec::new();
serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache)
.unwrap();
let deserialized =
deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key)
.unwrap();
assert_eq!(cache, deserialized);
}
#[test]
fn serialize_deserialize_corrupt() {
let buffer = "corrupttestingtestingtesting".as_bytes().to_vec();
let err = deserialize_with_reader(&mut BufReader::new(&buffer[..]), 1234)
.unwrap_err();
assert_eq!(err.to_string(), "Cache key mismatch");
}
#[test]
fn code_cache() {
let temp_dir = TempDir::new();
let file_path = temp_dir.path().join("cache.bin").to_path_buf();
let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap();
let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap();
// first run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);
assert!(code_cache
.get_sync(&url1, CodeCacheType::EsModule, 0)
.is_none());
assert!(code_cache
.get_sync(&url2, CodeCacheType::EsModule, 1)
.is_none());
assert!(code_cache.enabled());
code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[1, 2, 3]);
assert!(code_cache.enabled());
assert!(!file_path.exists());
code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[2, 1, 3]);
assert!(file_path.exists()); // now the new code cache exists
assert!(!code_cache.enabled()); // no longer enabled
}
// second run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);
assert!(code_cache.enabled());
let result1 = code_cache
.get_sync(&url1, CodeCacheType::EsModule, 0)
.unwrap();
assert!(code_cache.enabled());
let result2 = code_cache
.get_sync(&url2, CodeCacheType::EsModule, 1)
.unwrap();
assert!(!code_cache.enabled()); // no longer enabled
assert_eq!(result1, vec![1, 2, 3]);
assert_eq!(result2, vec![2, 1, 3]);
}
// new cache key first run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321);
assert!(code_cache
.get_sync(&url1, CodeCacheType::EsModule, 0)
.is_none());
assert!(code_cache
.get_sync(&url2, CodeCacheType::EsModule, 1)
.is_none());
code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[2, 2, 3]);
code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[3, 2, 3]);
}
// new cache key second run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321);
let result1 = code_cache
.get_sync(&url1, CodeCacheType::EsModule, 0)
.unwrap();
assert_eq!(result1, vec![2, 2, 3]);
assert!(code_cache
.get_sync(&url2, CodeCacheType::EsModule, 5) // different hash will cause none
.is_none());
}
}
}

View file

@ -17,6 +17,7 @@ use deno_runtime::deno_io::fs::FsResult;
use deno_runtime::deno_io::fs::FsStat; use deno_runtime::deno_io::fs::FsStat;
use super::virtual_fs::FileBackedVfs; use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsFileSubDataKind;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>); pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
@ -36,7 +37,8 @@ impl DenoCompileFileSystem {
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
let old_file = self.0.file_entry(oldpath)?; let old_file = self.0.file_entry(oldpath)?;
let old_file_bytes = self.0.read_file_all(old_file)?; let old_file_bytes =
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
RealFs.write_file_sync( RealFs.write_file_sync(
newpath, newpath,
OpenOptions { OpenOptions {

View file

@ -7,6 +7,7 @@
use binary::StandaloneData; use binary::StandaloneData;
use binary::StandaloneModules; use binary::StandaloneModules;
use code_cache::DenoCompileCodeCache;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
@ -17,8 +18,10 @@ use deno_core::anyhow::Context;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::type_error; use deno_core::error::type_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::v8_set_flags; use deno_core::v8_set_flags;
use deno_core::FastString;
use deno_core::FeatureChecker; use deno_core::FeatureChecker;
use deno_core::ModuleLoader; use deno_core::ModuleLoader;
use deno_core::ModuleSourceCode; use deno_core::ModuleSourceCode;
@ -26,11 +29,15 @@ use deno_core::ModuleSpecifier;
use deno_core::ModuleType; use deno_core::ModuleType;
use deno_core::RequestedModuleType; use deno_core::RequestedModuleType;
use deno_core::ResolutionKind; use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
@ -42,10 +49,15 @@ use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use import_map::parse_from_json; use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::NodeResolutionMode; use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use serialization::DenoCompileModuleSource;
use std::borrow::Cow; use std::borrow::Cow;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use virtual_fs::FileBackedVfs;
use virtual_fs::VfsFileSubDataKind;
use crate::args::create_default_npmrc; use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store; use crate::args::get_root_cert_store;
@ -57,28 +69,37 @@ use crate::args::StorageKeyResolver;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::FastInsecureHasher;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::RealDenoCacheEnv; use crate::cache::RealDenoCacheEnv;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNpmReqResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
use crate::util::v8::construct_v8_flags; use crate::util::v8::construct_v8_flags;
use crate::worker::CliCodeCache;
use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory; use crate::worker::ModuleLoaderFactory;
pub mod binary; pub mod binary;
mod code_cache;
mod file_system; mod file_system;
mod serialization; mod serialization;
mod virtual_fs; mod virtual_fs;
@ -91,10 +112,45 @@ use self::binary::Metadata;
use self::file_system::DenoCompileFileSystem; use self::file_system::DenoCompileFileSystem;
struct SharedModuleLoaderState { struct SharedModuleLoaderState {
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<dyn CliCodeCache>>,
fs: Arc<dyn deno_fs::FileSystem>,
modules: StandaloneModules, modules: StandaloneModules,
workspace_resolver: WorkspaceResolver, node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<NodeResolver>,
npm_module_loader: Arc<NpmModuleLoader>, npm_module_loader: Arc<NpmModuleLoader>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
vfs: Arc<FileBackedVfs>,
workspace_resolver: WorkspaceResolver,
}
impl SharedModuleLoaderState {
fn get_code_cache(
&self,
specifier: &ModuleSpecifier,
source: &[u8],
) -> Option<SourceCodeCacheInfo> {
let Some(code_cache) = &self.code_cache else {
return None;
};
if !code_cache.enabled() {
return None;
}
// deno version is already included in the root cache key
let hash = FastInsecureHasher::new_without_deno_version()
.write_hashable(source)
.finish();
let data = code_cache.get_sync(
specifier,
deno_runtime::code_cache::CodeCacheType::EsModule,
hash,
);
Some(SourceCodeCacheInfo {
hash,
data: data.map(Cow::Owned),
})
}
} }
#[derive(Clone)] #[derive(Clone)]
@ -102,6 +158,12 @@ struct EmbeddedModuleLoader {
shared: Arc<SharedModuleLoaderState>, shared: Arc<SharedModuleLoaderState>,
} }
impl std::fmt::Debug for EmbeddedModuleLoader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EmbeddedModuleLoader").finish()
}
}
pub const MODULE_NOT_FOUND: &str = "Module not found"; pub const MODULE_NOT_FOUND: &str = "Module not found";
pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
@ -126,13 +188,27 @@ impl ModuleLoader for EmbeddedModuleLoader {
type_error(format!("Referrer uses invalid specifier: {}", err)) type_error(format!("Referrer uses invalid specifier: {}", err))
})? })?
}; };
let referrer_kind = if self
.shared
.cjs_tracker
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
{
ResolutionMode::Require
} else {
ResolutionMode::Import
};
if self.shared.node_resolver.in_npm_package(&referrer) { if self.shared.node_resolver.in_npm_package(&referrer) {
return Ok( return Ok(
self self
.shared .shared
.node_resolver .node_resolver
.resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)? .resolve(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)?
.into_url(), .into_url(),
); );
} }
@ -154,13 +230,13 @@ impl ModuleLoader for EmbeddedModuleLoader {
self self
.shared .shared
.node_resolver .node_resolver
.resolve_package_sub_path_from_deno_module( .resolve_package_subpath_from_deno_module(
pkg_json.dir_path(), pkg_json.dir_path(),
sub_path.as_deref(), sub_path.as_deref(),
Some(&referrer), Some(&referrer),
NodeResolutionMode::Execution, referrer_kind,
)? NodeResolutionKind::Execution,
.into_url(), )?,
), ),
Ok(MappedResolution::PackageJson { Ok(MappedResolution::PackageJson {
dep_result, dep_result,
@ -170,14 +246,15 @@ impl ModuleLoader for EmbeddedModuleLoader {
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
PackageJsonDepValue::Req(req) => self PackageJsonDepValue::Req(req) => self
.shared .shared
.node_resolver .npm_req_resolver
.resolve_req_with_sub_path( .resolve_req_with_sub_path(
req, req,
sub_path.as_deref(), sub_path.as_deref(),
&referrer, &referrer,
NodeResolutionMode::Execution, referrer_kind,
NodeResolutionKind::Execution,
) )
.map(|res| res.into_url()), .map_err(AnyError::from),
PackageJsonDepValue::Workspace(version_req) => { PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = self let pkg_folder = self
.shared .shared
@ -190,13 +267,13 @@ impl ModuleLoader for EmbeddedModuleLoader {
self self
.shared .shared
.node_resolver .node_resolver
.resolve_package_sub_path_from_deno_module( .resolve_package_subpath_from_deno_module(
pkg_folder, pkg_folder,
sub_path.as_deref(), sub_path.as_deref(),
Some(&referrer), Some(&referrer),
NodeResolutionMode::Execution, referrer_kind,
)? NodeResolutionKind::Execution,
.into_url(), )?,
) )
} }
}, },
@ -205,15 +282,12 @@ impl ModuleLoader for EmbeddedModuleLoader {
if let Ok(reference) = if let Ok(reference) =
NpmPackageReqReference::from_specifier(&specifier) NpmPackageReqReference::from_specifier(&specifier)
{ {
return self return Ok(self.shared.npm_req_resolver.resolve_req_reference(
.shared &reference,
.node_resolver &referrer,
.resolve_req_reference( referrer_kind,
&reference, NodeResolutionKind::Execution,
&referrer, )?);
NodeResolutionMode::Execution,
)
.map(|res| res.into_url());
} }
if specifier.scheme() == "jsr" { if specifier.scheme() == "jsr" {
@ -228,17 +302,18 @@ impl ModuleLoader for EmbeddedModuleLoader {
self self
.shared .shared
.node_resolver .node_resolver
.handle_if_in_node_modules(&specifier)? .handle_if_in_node_modules(&specifier)
.unwrap_or(specifier), .unwrap_or(specifier),
) )
} }
Err(err) Err(err)
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" => if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
{ {
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg( let maybe_res = self.shared.npm_req_resolver.resolve_if_for_npm_pkg(
raw_specifier, raw_specifier,
&referrer, &referrer,
NodeResolutionMode::Execution, referrer_kind,
NodeResolutionKind::Execution,
)?; )?;
if let Some(res) = maybe_res { if let Some(res) = maybe_res {
return Ok(res.into_url()); return Ok(res.into_url());
@ -292,14 +367,19 @@ impl ModuleLoader for EmbeddedModuleLoader {
} }
if self.shared.node_resolver.in_npm_package(original_specifier) { if self.shared.node_resolver.in_npm_package(original_specifier) {
let npm_module_loader = self.shared.npm_module_loader.clone(); let shared = self.shared.clone();
let original_specifier = original_specifier.clone(); let original_specifier = original_specifier.clone();
let maybe_referrer = maybe_referrer.cloned(); let maybe_referrer = maybe_referrer.cloned();
return deno_core::ModuleLoadResponse::Async( return deno_core::ModuleLoadResponse::Async(
async move { async move {
let code_source = npm_module_loader let code_source = shared
.npm_module_loader
.load(&original_specifier, maybe_referrer.as_ref()) .load(&original_specifier, maybe_referrer.as_ref())
.await?; .await?;
let code_cache_entry = shared.get_code_cache(
&code_source.found_url,
code_source.code.as_bytes(),
);
Ok(deno_core::ModuleSource::new_with_redirect( Ok(deno_core::ModuleSource::new_with_redirect(
match code_source.media_type { match code_source.media_type {
MediaType::Json => ModuleType::Json, MediaType::Json => ModuleType::Json,
@ -308,7 +388,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
code_source.code, code_source.code,
&original_specifier, &original_specifier,
&code_source.found_url, &code_source.found_url,
None, code_cache_entry,
)) ))
} }
.boxed_local(), .boxed_local(),
@ -317,17 +397,77 @@ impl ModuleLoader for EmbeddedModuleLoader {
match self.shared.modules.read(original_specifier) { match self.shared.modules.read(original_specifier) {
Ok(Some(module)) => { Ok(Some(module)) => {
let media_type = module.media_type;
let (module_specifier, module_type, module_source) = let (module_specifier, module_type, module_source) =
module.into_for_v8(); module.into_parts();
deno_core::ModuleLoadResponse::Sync(Ok( let is_maybe_cjs = match self
deno_core::ModuleSource::new_with_redirect( .shared
module_type, .cjs_tracker
module_source, .is_maybe_cjs(original_specifier, media_type)
original_specifier, {
module_specifier, Ok(is_maybe_cjs) => is_maybe_cjs,
None, Err(err) => {
), return deno_core::ModuleLoadResponse::Sync(Err(type_error(
)) format!("{:?}", err),
)));
}
};
if is_maybe_cjs {
let original_specifier = original_specifier.clone();
let module_specifier = module_specifier.clone();
let shared = self.shared.clone();
deno_core::ModuleLoadResponse::Async(
async move {
let source = match module_source {
DenoCompileModuleSource::String(string) => {
Cow::Borrowed(string)
}
DenoCompileModuleSource::Bytes(module_code_bytes) => {
match module_code_bytes {
Cow::Owned(bytes) => Cow::Owned(
crate::util::text_encoding::from_utf8_lossy_owned(bytes),
),
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
}
}
};
let source = shared
.node_code_translator
.translate_cjs_to_esm(&module_specifier, Some(source))
.await?;
let module_source = match source {
Cow::Owned(source) => ModuleSourceCode::String(source.into()),
Cow::Borrowed(source) => {
ModuleSourceCode::String(FastString::from_static(source))
}
};
let code_cache_entry = shared
.get_code_cache(&module_specifier, module_source.as_bytes());
Ok(deno_core::ModuleSource::new_with_redirect(
module_type,
module_source,
&original_specifier,
&module_specifier,
code_cache_entry,
))
}
.boxed_local(),
)
} else {
let module_source = module_source.into_for_v8();
let code_cache_entry = self
.shared
.get_code_cache(module_specifier, module_source.as_bytes());
deno_core::ModuleLoadResponse::Sync(Ok(
deno_core::ModuleSource::new_with_redirect(
module_type,
module_source,
original_specifier,
module_specifier,
code_cache_entry,
),
))
}
} }
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error( Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{MODULE_NOT_FOUND}: {}", original_specifier), format!("{MODULE_NOT_FOUND}: {}", original_specifier),
@ -337,34 +477,93 @@ impl ModuleLoader for EmbeddedModuleLoader {
))), ))),
} }
} }
fn code_cache_ready(
&self,
specifier: ModuleSpecifier,
source_hash: u64,
code_cache_data: &[u8],
) -> LocalBoxFuture<'static, ()> {
if let Some(code_cache) = &self.shared.code_cache {
code_cache.set_sync(
specifier,
deno_runtime::code_cache::CodeCacheType::EsModule,
source_hash,
code_cache_data,
);
}
std::future::ready(()).boxed_local()
}
}
impl NodeRequireLoader for EmbeddedModuleLoader {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a std::path::Path,
) -> Result<Cow<'a, std::path::Path>, AnyError> {
if self.shared.modules.has_file(path) {
// allow reading if the file is in the snapshot
return Ok(Cow::Borrowed(path));
}
self
.shared
.npm_resolver
.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(
&self,
path: &std::path::Path,
) -> Result<String, AnyError> {
let file_entry = self.shared.vfs.file_entry(path)?;
let file_bytes = self
.shared
.vfs
.read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)?;
Ok(String::from_utf8(file_bytes.into_owned())?)
}
fn is_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
) -> Result<bool, ClosestPkgJsonError> {
let media_type = MediaType::from_specifier(specifier);
self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
} }
struct StandaloneModuleLoaderFactory { struct StandaloneModuleLoaderFactory {
shared: Arc<SharedModuleLoaderState>, shared: Arc<SharedModuleLoaderState>,
} }
impl StandaloneModuleLoaderFactory {
pub fn create_result(&self) -> CreateModuleLoaderResult {
let loader = Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
});
CreateModuleLoaderResult {
module_loader: loader.clone(),
node_require_loader: loader,
}
}
}
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
fn create_for_main( fn create_for_main(
&self, &self,
_root_permissions: PermissionsContainer, _root_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
ModuleLoaderAndSourceMapGetter { self.create_result()
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
}),
}
} }
fn create_for_worker( fn create_for_worker(
&self, &self,
_parent_permissions: PermissionsContainer, _parent_permissions: PermissionsContainer,
_permissions: PermissionsContainer, _permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
ModuleLoaderAndSourceMapGetter { self.create_result()
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
}),
}
} }
} }
@ -410,106 +609,156 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let cache_setting = CacheSetting::Only; let cache_setting = CacheSetting::Only;
let npm_resolver = match metadata.node_modules { let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules {
Some(binary::NodeModules::Managed { node_modules_dir }) => { Some(binary::NodeModules::Managed { node_modules_dir }) => {
// create an npmrc that uses the fake npm_registry_url to resolve packages
let npmrc = Arc::new(ResolvedNpmRc {
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
registry_url: npm_registry_url.clone(),
config: Default::default(),
},
scopes: Default::default(),
registry_configs: Default::default(),
});
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
npm_global_cache_dir,
npmrc.get_all_known_registries_urls(),
));
let snapshot = npm_snapshot.unwrap(); let snapshot = npm_snapshot.unwrap();
let maybe_node_modules_path = node_modules_dir let maybe_node_modules_path = node_modules_dir
.map(|node_modules_dir| root_path.join(node_modules_dir)); .map(|node_modules_dir| root_path.join(node_modules_dir));
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( let in_npm_pkg_checker =
CliNpmResolverManagedCreateOptions { create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( CliManagedInNpmPkgCheckerCreateOptions {
snapshot, root_cache_dir_url: npm_cache_dir.root_dir_url(),
)), maybe_node_modules_path: maybe_node_modules_path.as_deref(),
maybe_lockfile: None, },
fs: fs.clone(), ));
http_client_provider: http_client_provider.clone(), let npm_resolver =
npm_global_cache_dir, create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
cache_setting, CliManagedNpmResolverCreateOptions {
text_only_progress_bar: progress_bar, snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
maybe_node_modules_path, snapshot,
npm_system_info: Default::default(), )),
npm_install_deps_provider: Arc::new( maybe_lockfile: None,
// this is only used for installing packages, which isn't necessary with deno compile fs: fs.clone(),
NpmInstallDepsProvider::empty(), http_client_provider: http_client_provider.clone(),
), npm_cache_dir,
// create an npmrc that uses the fake npm_registry_url to resolve packages cache_setting,
npmrc: Arc::new(ResolvedNpmRc { text_only_progress_bar: progress_bar,
default_config: deno_npm::npm_rc::RegistryConfigWithUrl { maybe_node_modules_path,
registry_url: npm_registry_url.clone(), npm_system_info: Default::default(),
config: Default::default(), npm_install_deps_provider: Arc::new(
}, // this is only used for installing packages, which isn't necessary with deno compile
scopes: Default::default(), NpmInstallDepsProvider::empty(),
registry_configs: Default::default(), ),
}), npmrc,
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}, },
)) ))
.await? .await?;
(in_npm_pkg_checker, npm_resolver)
} }
Some(binary::NodeModules::Byonm { Some(binary::NodeModules::Byonm {
root_node_modules_dir, root_node_modules_dir,
}) => { }) => {
let root_node_modules_dir = let root_node_modules_dir =
root_node_modules_dir.map(|p| vfs.root().join(p)); root_node_modules_dir.map(|p| vfs.root().join(p));
create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm( let in_npm_pkg_checker =
CliByonmNpmResolverCreateOptions { create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm);
let npm_resolver = create_cli_npm_resolver(
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()), fs: CliDenoResolverFs(fs.clone()),
pkg_json_resolver: pkg_json_resolver.clone(),
root_node_modules_dir, root_node_modules_dir,
}, }),
)) )
.await? .await?;
(in_npm_pkg_checker, npm_resolver)
} }
None => { None => {
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( // Packages from different registries are already inlined in the binary,
CliNpmResolverManagedCreateOptions { // so no need to create actual `.npmrc` configuration.
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), let npmrc = create_default_npmrc();
maybe_lockfile: None, let npm_cache_dir = Arc::new(NpmCacheDir::new(
fs: fs.clone(), &DenoCacheEnvFsAdapter(fs.as_ref()),
http_client_provider: http_client_provider.clone(), npm_global_cache_dir,
npm_global_cache_dir, npmrc.get_all_known_registries_urls(),
cache_setting, ));
text_only_progress_bar: progress_bar, let in_npm_pkg_checker =
maybe_node_modules_path: None, create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
npm_system_info: Default::default(), CliManagedInNpmPkgCheckerCreateOptions {
npm_install_deps_provider: Arc::new( root_cache_dir_url: npm_cache_dir.root_dir_url(),
// this is only used for installing packages, which isn't necessary with deno compile maybe_node_modules_path: None,
NpmInstallDepsProvider::empty(), },
), ));
// Packages from different registries are already inlined in the binary, let npm_resolver =
// so no need to create actual `.npmrc` configuration. create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
npmrc: create_default_npmrc(), CliManagedNpmResolverCreateOptions {
lifecycle_scripts: Default::default(), snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
}, maybe_lockfile: None,
)) fs: fs.clone(),
.await? http_client_provider: http_client_provider.clone(),
npm_cache_dir,
cache_setting,
text_only_progress_bar: progress_bar,
maybe_node_modules_path: None,
npm_system_info: Default::default(),
npm_install_deps_provider: Arc::new(
// this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(),
),
npmrc: create_default_npmrc(),
lifecycle_scripts: Default::default(),
},
))
.await?;
(in_npm_pkg_checker, npm_resolver)
} }
}; };
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
let node_resolver = Arc::new(NodeResolver::new( let node_resolver = Arc::new(NodeResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
npm_resolver.clone().into_npm_resolver(), in_npm_pkg_checker.clone(),
npm_resolver.clone().into_npm_pkg_folder_resolver(),
pkg_json_resolver.clone(),
));
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
IsCjsResolverOptions {
detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(),
is_node_main: false,
},
)); ));
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
let cache_db = Caches::new(deno_dir_provider.clone()); let cache_db = Caches::new(deno_dir_provider.clone());
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
let cli_node_resolver = Arc::new(CliNodeResolver::new( let npm_req_resolver =
cjs_resolutions.clone(), Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
fs.clone(), byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
node_resolver.clone(), fs: CliDenoResolverFs(fs.clone()),
npm_resolver.clone(), in_npm_pkg_checker: in_npm_pkg_checker.clone(),
)); node_resolver: node_resolver.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
}));
let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache, node_analysis_cache,
cjs_tracker.clone(),
fs.clone(), fs.clone(),
cli_node_resolver.clone(),
None, None,
); );
let node_code_translator = Arc::new(NodeCodeTranslator::new( let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer, cjs_esm_code_analyzer,
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
in_npm_pkg_checker,
node_resolver.clone(), node_resolver.clone(),
npm_resolver.clone().into_npm_resolver(), npm_resolver.clone().into_npm_pkg_folder_resolver(),
pkg_json_resolver.clone(),
)); ));
let workspace_resolver = { let workspace_resolver = {
let import_map = match metadata.workspace_resolver.import_map { let import_map = match metadata.workspace_resolver.import_map {
@ -560,17 +809,36 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
metadata.workspace_resolver.pkg_json_resolution, metadata.workspace_resolver.pkg_json_resolution,
) )
}; };
let code_cache = match metadata.code_cache_key {
Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new(
root_path.with_file_name(format!(
"{}.cache",
root_path.file_name().unwrap().to_string_lossy()
)),
code_cache_key,
)) as Arc<dyn CliCodeCache>),
None => {
log::debug!("Code cache disabled.");
None
}
};
let module_loader_factory = StandaloneModuleLoaderFactory { let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState { shared: Arc::new(SharedModuleLoaderState {
cjs_tracker: cjs_tracker.clone(),
code_cache: code_cache.clone(),
fs: fs.clone(),
modules, modules,
workspace_resolver, node_code_translator: node_code_translator.clone(),
node_resolver: cli_node_resolver.clone(), node_resolver: node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new( npm_module_loader: Arc::new(NpmModuleLoader::new(
cjs_resolutions.clone(), cjs_tracker.clone(),
node_code_translator,
fs.clone(), fs.clone(),
cli_node_resolver, node_code_translator,
)), )),
npm_resolver: npm_resolver.clone(),
npm_req_resolver,
vfs,
workspace_resolver,
}), }),
}; };
@ -609,9 +877,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
}); });
let worker_factory = CliMainWorkerFactory::new( let worker_factory = CliMainWorkerFactory::new(
Arc::new(BlobStore::default()), Arc::new(BlobStore::default()),
cjs_resolutions, code_cache,
// Code cache is not supported for standalone binary yet.
None,
feature_checker, feature_checker,
fs, fs,
None, None,
@ -620,6 +886,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
Box::new(module_loader_factory), Box::new(module_loader_factory),
node_resolver, node_resolver,
npm_resolver, npm_resolver,
pkg_json_resolver,
root_cert_store_provider, root_cert_store_provider,
permissions, permissions,
StorageKeyResolver::empty(), StorageKeyResolver::empty(),
@ -635,7 +902,6 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
inspect_wait: false, inspect_wait: false,
strace_ops: None, strace_ops: None,
is_inspecting: false, is_inspecting: false,
is_npm_main: main_module.scheme() == "npm",
skip_op_registration: true, skip_op_registration: true,
location: metadata.location, location: metadata.location,
argv0: NpmPackageReqReference::from_specifier(&main_module) argv0: NpmPackageReqReference::from_specifier(&main_module)
@ -652,8 +918,8 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
node_ipc: None, node_ipc: None,
serve_port: None, serve_port: None,
serve_host: None, serve_host: None,
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
}, },
metadata.otel_config,
); );
// Initialize v8 once from the main thread. // Initialize v8 once from the main thread.

View file

@ -173,6 +173,7 @@ pub struct RemoteModulesStoreBuilder {
impl RemoteModulesStoreBuilder { impl RemoteModulesStoreBuilder {
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) { pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
log::debug!("Adding '{}' ({})", specifier, media_type);
let specifier = specifier.to_string(); let specifier = specifier.to_string();
self.specifiers.push((specifier, self.data_byte_len)); self.specifiers.push((specifier, self.data_byte_len));
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
@ -182,6 +183,7 @@ impl RemoteModulesStoreBuilder {
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) { pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
self.redirects.reserve(redirects.len()); self.redirects.reserve(redirects.len());
for (from, to) in redirects { for (from, to) in redirects {
log::debug!("Adding redirect '{}' -> '{}'", from, to);
let from = from.to_string(); let from = from.to_string();
let to = to.to_string(); let to = to.to_string();
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64; self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
@ -212,14 +214,13 @@ impl RemoteModulesStoreBuilder {
} }
} }
pub struct DenoCompileModuleData<'a> { pub enum DenoCompileModuleSource {
pub specifier: &'a Url, String(&'static str),
pub media_type: MediaType, Bytes(Cow<'static, [u8]>),
pub data: Cow<'static, [u8]>,
} }
impl<'a> DenoCompileModuleData<'a> { impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> (&'a Url, ModuleType, ModuleSourceCode) { pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data { ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(), Cow::Borrowed(d) => d.into(),
@ -227,16 +228,31 @@ impl<'a> DenoCompileModuleData<'a> {
}) })
} }
fn into_string_unsafe(data: Cow<'static, [u8]>) -> ModuleSourceCode { match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether // todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii() // the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static // check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data { match data {
Cow::Borrowed(d) => ModuleSourceCode::String( Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string // SAFETY: we know this is a valid utf8 string
unsafe { FastString::from_static(std::str::from_utf8_unchecked(d)) }, unsafe { std::str::from_utf8_unchecked(d) },
), ),
Cow::Owned(d) => ModuleSourceCode::Bytes(d.into_boxed_slice().into()), Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
} }
} }
@ -255,11 +271,14 @@ impl<'a> DenoCompileModuleData<'a> {
(ModuleType::JavaScript, into_string_unsafe(self.data)) (ModuleType::JavaScript, into_string_unsafe(self.data))
} }
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)), MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => (ModuleType::Wasm, into_bytes(self.data)), MediaType::Wasm => {
// just assume javascript if we made it here (ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
MediaType::TsBuildInfo | MediaType::SourceMap | MediaType::Unknown => {
(ModuleType::JavaScript, into_bytes(self.data))
} }
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
ModuleType::JavaScript,
DenoCompileModuleSource::Bytes(self.data),
),
}; };
(self.specifier, media_type, source) (self.specifier, media_type, source)
} }
@ -354,17 +373,17 @@ impl RemoteModulesStore {
pub fn read<'a>( pub fn read<'a>(
&'a self, &'a self,
specifier: &'a Url, original_specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> { ) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
let mut count = 0; let mut count = 0;
let mut current = specifier; let mut specifier = original_specifier;
loop { loop {
if count > 10 { if count > 10 {
bail!("Too many redirects resolving '{}'", specifier); bail!("Too many redirects resolving '{}'", original_specifier);
} }
match self.specifiers.get(current) { match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to; specifier = to;
count += 1; count += 1;
} }
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => { Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
@ -549,7 +568,7 @@ fn serialize_media_type(media_type: MediaType) -> u8 {
MediaType::Tsx => 10, MediaType::Tsx => 10,
MediaType::Json => 11, MediaType::Json => 11,
MediaType::Wasm => 12, MediaType::Wasm => 12,
MediaType::TsBuildInfo => 13, MediaType::Css => 13,
MediaType::SourceMap => 14, MediaType::SourceMap => 14,
MediaType::Unknown => 15, MediaType::Unknown => 15,
} }
@ -570,7 +589,7 @@ fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
10 => Ok(MediaType::Tsx), 10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json), 11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm), 12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::TsBuildInfo), 13 => Ok(MediaType::Css),
14 => Ok(MediaType::SourceMap), 14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown), 15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value), _ => bail!("Unknown media type value: {}", value),

View file

@ -32,6 +32,15 @@ use thiserror::Error;
use crate::util; use crate::util;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
#[derive(Debug, Copy, Clone)]
pub enum VfsFileSubDataKind {
/// Raw bytes of the file.
Raw,
/// Bytes to use for module loading. For example, for TypeScript
/// files this will be the transpiled JavaScript source.
ModuleGraph,
}
#[derive(Error, Debug)] #[derive(Error, Debug)]
#[error( #[error(
"Failed to strip prefix '{}' from '{}'", root_path.display(), target.display() "Failed to strip prefix '{}' from '{}'", root_path.display(), target.display()
@ -51,7 +60,8 @@ pub struct VfsBuilder {
impl VfsBuilder { impl VfsBuilder {
pub fn new(root_path: PathBuf) -> Result<Self, AnyError> { pub fn new(root_path: PathBuf) -> Result<Self, AnyError> {
let root_path = canonicalize_path(&root_path)?; let root_path = canonicalize_path(&root_path)
.with_context(|| format!("Canonicalizing {}", root_path.display()))?;
log::debug!("Building vfs with root '{}'", root_path.display()); log::debug!("Building vfs with root '{}'", root_path.display());
Ok(Self { Ok(Self {
root_dir: VirtualDirectory { root_dir: VirtualDirectory {
@ -140,7 +150,11 @@ impl VfsBuilder {
// inline the symlink and make the target file // inline the symlink and make the target file
let file_bytes = std::fs::read(&target) let file_bytes = std::fs::read(&target)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data_inner(&path, file_bytes)?; self.add_file_with_data_inner(
&path,
file_bytes,
VfsFileSubDataKind::Raw,
)?;
} else { } else {
log::warn!( log::warn!(
"{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.", "{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.",
@ -218,25 +232,27 @@ impl VfsBuilder {
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let file_bytes = std::fs::read(path) let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data_inner(path, file_bytes) self.add_file_with_data_inner(path, file_bytes, VfsFileSubDataKind::Raw)
} }
pub fn add_file_with_data( pub fn add_file_with_data(
&mut self, &mut self,
path: &Path, path: &Path,
data: Vec<u8>, data: Vec<u8>,
sub_data_kind: VfsFileSubDataKind,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let target_path = canonicalize_path(path)?; let target_path = canonicalize_path(path)?;
if target_path != path { if target_path != path {
self.add_symlink(path, &target_path)?; self.add_symlink(path, &target_path)?;
} }
self.add_file_with_data_inner(&target_path, data) self.add_file_with_data_inner(&target_path, data, sub_data_kind)
} }
fn add_file_with_data_inner( fn add_file_with_data_inner(
&mut self, &mut self,
path: &Path, path: &Path,
data: Vec<u8>, data: Vec<u8>,
sub_data_kind: VfsFileSubDataKind,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display()); log::debug!("Adding file '{}'", path.display());
let checksum = util::checksum::gen(&[&data]); let checksum = util::checksum::gen(&[&data]);
@ -252,8 +268,19 @@ impl VfsBuilder {
let name = path.file_name().unwrap().to_string_lossy(); let name = path.file_name().unwrap().to_string_lossy();
let data_len = data.len(); let data_len = data.len();
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
Ok(_) => { Ok(index) => {
// already added, just ignore let entry = &mut dir.entries[index];
match entry {
VfsEntry::File(virtual_file) => match sub_data_kind {
VfsFileSubDataKind::Raw => {
virtual_file.offset = offset;
}
VfsFileSubDataKind::ModuleGraph => {
virtual_file.module_graph_offset = offset;
}
},
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
}
} }
Err(insert_index) => { Err(insert_index) => {
dir.entries.insert( dir.entries.insert(
@ -261,6 +288,7 @@ impl VfsBuilder {
VfsEntry::File(VirtualFile { VfsEntry::File(VirtualFile {
name: name.to_string(), name: name.to_string(),
offset, offset,
module_graph_offset: offset,
len: data.len() as u64, len: data.len() as u64,
}), }),
); );
@ -301,7 +329,7 @@ impl VfsBuilder {
let dir = self.add_dir(path.parent().unwrap())?; let dir = self.add_dir(path.parent().unwrap())?;
let name = path.file_name().unwrap().to_string_lossy(); let name = path.file_name().unwrap().to_string_lossy();
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
Ok(_) => unreachable!(), Ok(_) => Ok(()), // previously inserted
Err(insert_index) => { Err(insert_index) => {
dir.entries.insert( dir.entries.insert(
insert_index, insert_index,
@ -313,9 +341,9 @@ impl VfsBuilder {
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
}), }),
); );
Ok(())
} }
} }
Ok(())
} }
pub fn into_dir_and_files(self) -> (VirtualDirectory, Vec<Vec<u8>>) { pub fn into_dir_and_files(self) -> (VirtualDirectory, Vec<Vec<u8>>) {
@ -350,6 +378,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None, atime: None,
birthtime: None, birthtime: None,
mtime: None, mtime: None,
ctime: None,
blksize: 0, blksize: 0,
size: 0, size: 0,
dev: 0, dev: 0,
@ -372,6 +401,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None, atime: None,
birthtime: None, birthtime: None,
mtime: None, mtime: None,
ctime: None,
blksize: 0, blksize: 0,
size: file.len, size: file.len,
dev: 0, dev: 0,
@ -394,6 +424,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None, atime: None,
birthtime: None, birthtime: None,
mtime: None, mtime: None,
ctime: None,
blksize: 0, blksize: 0,
size: 0, size: 0,
dev: 0, dev: 0,
@ -450,6 +481,12 @@ pub struct VirtualDirectory {
pub struct VirtualFile { pub struct VirtualFile {
pub name: String, pub name: String,
pub offset: u64, pub offset: u64,
/// Offset file to use for module loading when it differs from the
/// raw file. Often this will be the same offset as above for data
/// such as JavaScript files, but for TypeScript files the `offset`
/// will be the original raw bytes when included as an asset and this
/// offset will be to the transpiled JavaScript source.
pub module_graph_offset: u64,
pub len: u64, pub len: u64,
} }
@ -630,7 +667,7 @@ impl FileBackedVfsFile {
} }
fn read_to_buf(&self, buf: &mut [u8]) -> FsResult<usize> { fn read_to_buf(&self, buf: &mut [u8]) -> FsResult<usize> {
let pos = { let read_pos = {
let mut pos = self.pos.lock(); let mut pos = self.pos.lock();
let read_pos = *pos; let read_pos = *pos;
// advance the position due to the read // advance the position due to the read
@ -639,12 +676,12 @@ impl FileBackedVfsFile {
}; };
self self
.vfs .vfs
.read_file(&self.file, pos, buf) .read_file(&self.file, read_pos, buf)
.map_err(|err| err.into()) .map_err(|err| err.into())
} }
fn read_to_end(&self) -> FsResult<Vec<u8>> { fn read_to_end(&self) -> FsResult<Cow<'static, [u8]>> {
let pos = { let read_pos = {
let mut pos = self.pos.lock(); let mut pos = self.pos.lock();
let read_pos = *pos; let read_pos = *pos;
// todo(dsherret): should this always set it to the end of the file? // todo(dsherret): should this always set it to the end of the file?
@ -654,13 +691,21 @@ impl FileBackedVfsFile {
} }
read_pos read_pos
}; };
if pos > self.file.len { if read_pos > self.file.len {
return Ok(Vec::new()); return Ok(Cow::Borrowed(&[]));
}
if read_pos == 0 {
Ok(
self
.vfs
.read_file_all(&self.file, VfsFileSubDataKind::Raw)?,
)
} else {
let size = (self.file.len - read_pos) as usize;
let mut buf = vec![0; size];
self.vfs.read_file(&self.file, read_pos, &mut buf)?;
Ok(Cow::Owned(buf))
} }
let size = (self.file.len - pos) as usize;
let mut buf = vec![0; size];
self.vfs.read_file(&self.file, pos, &mut buf)?;
Ok(buf)
} }
} }
@ -699,11 +744,14 @@ impl deno_io::fs::File for FileBackedVfsFile {
} }
fn read_all_sync(self: Rc<Self>) -> FsResult<Vec<u8>> { fn read_all_sync(self: Rc<Self>) -> FsResult<Vec<u8>> {
self.read_to_end() self.read_to_end().map(|bytes| bytes.into_owned())
} }
async fn read_all_async(self: Rc<Self>) -> FsResult<Vec<u8>> { async fn read_all_async(self: Rc<Self>) -> FsResult<Vec<u8>> {
let inner = (*self).clone(); let inner = (*self).clone();
tokio::task::spawn_blocking(move || inner.read_to_end()).await? tokio::task::spawn_blocking(move || {
inner.read_to_end().map(|bytes| bytes.into_owned())
})
.await?
} }
fn chmod_sync(self: Rc<Self>, _pathmode: u32) -> FsResult<()> { fn chmod_sync(self: Rc<Self>, _pathmode: u32) -> FsResult<()> {
@ -874,8 +922,9 @@ impl FileBackedVfs {
pub fn read_file_all( pub fn read_file_all(
&self, &self,
file: &VirtualFile, file: &VirtualFile,
sub_data_kind: VfsFileSubDataKind,
) -> std::io::Result<Cow<'static, [u8]>> { ) -> std::io::Result<Cow<'static, [u8]>> {
let read_range = self.get_read_range(file, 0, file.len)?; let read_range = self.get_read_range(file, sub_data_kind, 0, file.len)?;
match &self.vfs_data { match &self.vfs_data {
Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])), Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])),
Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())), Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())),
@ -888,26 +937,37 @@ impl FileBackedVfs {
pos: u64, pos: u64,
buf: &mut [u8], buf: &mut [u8],
) -> std::io::Result<usize> { ) -> std::io::Result<usize> {
let read_range = self.get_read_range(file, pos, buf.len() as u64)?; let read_range = self.get_read_range(
buf.copy_from_slice(&self.vfs_data[read_range]); file,
Ok(buf.len()) VfsFileSubDataKind::Raw,
pos,
buf.len() as u64,
)?;
let read_len = read_range.len();
buf[..read_len].copy_from_slice(&self.vfs_data[read_range]);
Ok(read_len)
} }
fn get_read_range( fn get_read_range(
&self, &self,
file: &VirtualFile, file: &VirtualFile,
sub_data_kind: VfsFileSubDataKind,
pos: u64, pos: u64,
len: u64, len: u64,
) -> std::io::Result<Range<usize>> { ) -> std::io::Result<Range<usize>> {
let data = &self.vfs_data; if pos > file.len {
let start = self.fs_root.start_file_offset + file.offset + pos;
let end = start + len;
if end > data.len() as u64 {
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof, std::io::ErrorKind::UnexpectedEof,
"unexpected EOF", "unexpected EOF",
)); ));
} }
let offset = match sub_data_kind {
VfsFileSubDataKind::Raw => file.offset,
VfsFileSubDataKind::ModuleGraph => file.module_graph_offset,
};
let file_offset = self.fs_root.start_file_offset + offset;
let start = file_offset + pos;
let end = file_offset + std::cmp::min(pos + len, file.len);
Ok(start as usize..end as usize) Ok(start as usize..end as usize)
} }
@ -946,7 +1006,13 @@ mod test {
#[track_caller] #[track_caller]
fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { fn read_file(vfs: &FileBackedVfs, path: &Path) -> String {
let file = vfs.file_entry(path).unwrap(); let file = vfs.file_entry(path).unwrap();
String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap() String::from_utf8(
vfs
.read_file_all(file, VfsFileSubDataKind::Raw)
.unwrap()
.into_owned(),
)
.unwrap()
} }
#[test] #[test]
@ -959,23 +1025,40 @@ mod test {
let src_path = src_path.to_path_buf(); let src_path = src_path.to_path_buf();
let mut builder = VfsBuilder::new(src_path.clone()).unwrap(); let mut builder = VfsBuilder::new(src_path.clone()).unwrap();
builder builder
.add_file_with_data_inner(&src_path.join("a.txt"), "data".into()) .add_file_with_data_inner(
&src_path.join("a.txt"),
"data".into(),
VfsFileSubDataKind::Raw,
)
.unwrap(); .unwrap();
builder builder
.add_file_with_data_inner(&src_path.join("b.txt"), "data".into()) .add_file_with_data_inner(
&src_path.join("b.txt"),
"data".into(),
VfsFileSubDataKind::Raw,
)
.unwrap(); .unwrap();
assert_eq!(builder.files.len(), 1); // because duplicate data assert_eq!(builder.files.len(), 1); // because duplicate data
builder builder
.add_file_with_data_inner(&src_path.join("c.txt"), "c".into()) .add_file_with_data_inner(
&src_path.join("c.txt"),
"c".into(),
VfsFileSubDataKind::Raw,
)
.unwrap(); .unwrap();
builder builder
.add_file_with_data_inner( .add_file_with_data_inner(
&src_path.join("sub_dir").join("d.txt"), &src_path.join("sub_dir").join("d.txt"),
"d".into(), "d".into(),
VfsFileSubDataKind::Raw,
) )
.unwrap(); .unwrap();
builder builder
.add_file_with_data_inner(&src_path.join("e.txt"), "e".into()) .add_file_with_data_inner(
&src_path.join("e.txt"),
"e".into(),
VfsFileSubDataKind::Raw,
)
.unwrap(); .unwrap();
builder builder
.add_symlink( .add_symlink(
@ -1146,6 +1229,7 @@ mod test {
.add_file_with_data_inner( .add_file_with_data_inner(
temp_path.join("a.txt").as_path(), temp_path.join("a.txt").as_path(),
"0123456789".to_string().into_bytes(), "0123456789".to_string().into_bytes(),
VfsFileSubDataKind::Raw,
) )
.unwrap(); .unwrap();
let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir);

View file

@ -155,6 +155,12 @@ fn prepare_env_vars(
initial_cwd.to_string_lossy().to_string(), initial_cwd.to_string_lossy().to_string(),
); );
} }
if !env_vars.contains_key(crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR) {
env_vars.insert(
crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR.into(),
crate::npm::get_npm_config_user_agent(),
);
}
if let Some(node_modules_dir) = node_modules_dir { if let Some(node_modules_dir) = node_modules_dir {
prepend_to_path( prepend_to_path(
&mut env_vars, &mut env_vars,
@ -204,7 +210,7 @@ impl ShellCommand for NpmCommand {
mut context: ShellCommandContext, mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> { ) -> LocalBoxFuture<'static, ExecuteResult> {
if context.args.first().map(|s| s.as_str()) == Some("run") if context.args.first().map(|s| s.as_str()) == Some("run")
&& context.args.len() > 2 && context.args.len() >= 2
// for now, don't run any npm scripts that have a flag because // for now, don't run any npm scripts that have a flag because
// we don't handle stuff like `--workspaces` properly // we don't handle stuff like `--workspaces` properly
&& !context.args.iter().any(|s| s.starts_with('-')) && !context.args.iter().any(|s| s.starts_with('-'))
@ -267,10 +273,12 @@ impl ShellCommand for NodeCommand {
) )
.execute(context); .execute(context);
} }
args.extend(["run", "-A"].into_iter().map(|s| s.to_string())); args.extend(["run", "-A"].into_iter().map(|s| s.to_string()));
args.extend(context.args.iter().cloned()); args.extend(context.args.iter().cloned());
let mut state = context.state; let mut state = context.state;
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1"); state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap()) ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap())
.execute(ShellCommandContext { .execute(ShellCommandContext {
@ -475,20 +483,32 @@ fn resolve_execution_path_from_npx_shim(
static SCRIPT_PATH_RE: Lazy<Regex> = static SCRIPT_PATH_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#); lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
if text.starts_with("#!/usr/bin/env node") { let maybe_first_line = {
// launch this file itself because it's a JS file let index = text.find("\n")?;
Some(file_path) Some(&text[0..index])
} else { };
// Search for...
// > "$basedir/../next/dist/bin/next" "$@" if let Some(first_line) = maybe_first_line {
// ...which is what it will look like on Windows // NOTE(bartlomieju): this is not perfect, but handle two most common scenarios
SCRIPT_PATH_RE // where Node is run without any args. If there are args then we use `NodeCommand`
.captures(text) // struct.
.and_then(|c| c.get(1)) if first_line == "#!/usr/bin/env node"
.map(|relative_path| { || first_line == "#!/usr/bin/env -S node"
file_path.parent().unwrap().join(relative_path.as_str()) {
}) // launch this file itself because it's a JS file
return Some(file_path);
}
} }
// Search for...
// > "$basedir/../next/dist/bin/next" "$@"
// ...which is what it will look like on Windows
SCRIPT_PATH_RE
.captures(text)
.and_then(|c| c.get(1))
.map(|relative_path| {
file_path.parent().unwrap().join(relative_path.as_str())
})
} }
fn resolve_managed_npm_commands( fn resolve_managed_npm_commands(
@ -556,6 +576,16 @@ mod test {
let unix_shim = r#"#!/usr/bin/env node let unix_shim = r#"#!/usr/bin/env node
"use strict"; "use strict";
console.log('Hi!'); console.log('Hi!');
"#;
let path = PathBuf::from("/node_modules/.bin/example");
assert_eq!(
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
path
);
// example shim on unix
let unix_shim = r#"#!/usr/bin/env -S node
"use strict";
console.log('Hi!');
"#; "#;
let path = PathBuf::from("/node_modules/.bin/example"); let path = PathBuf::from("/node_modules/.bin/example");
assert_eq!( assert_eq!(

View file

@ -193,7 +193,7 @@ async fn bench_specifier_inner(
.await?; .await?;
// We execute the main module as a side module so that import.meta.main is not set. // We execute the main module as a side module so that import.meta.main is not set.
worker.execute_side_module_possibly_with_npm().await?; worker.execute_side_module().await?;
let mut worker = worker.into_main_worker(); let mut worker = worker.into_main_worker();
@ -486,6 +486,7 @@ pub async fn run_benchmarks_with_watch(
), ),
move |flags, watcher_communicator, changed_paths| { move |flags, watcher_communicator, changed_paths| {
let bench_flags = bench_flags.clone(); let bench_flags = bench_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move { Ok(async move {
let factory = CliFactory::from_flags_for_watcher( let factory = CliFactory::from_flags_for_watcher(
flags, flags,

View file

@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::tsc; use crate::tsc;
use crate::tsc::Diagnostics; use crate::tsc::Diagnostics;
use crate::tsc::TypeCheckingCjsTracker;
use crate::util::extract; use crate::util::extract;
use crate::util::path::to_percent_decoded_str; use crate::util::path::to_percent_decoded_str;
@ -99,6 +100,7 @@ pub struct CheckOptions {
pub struct TypeChecker { pub struct TypeChecker {
caches: Arc<Caches>, caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
@ -108,6 +110,7 @@ pub struct TypeChecker {
impl TypeChecker { impl TypeChecker {
pub fn new( pub fn new(
caches: Arc<Caches>, caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
@ -115,6 +118,7 @@ impl TypeChecker {
) -> Self { ) -> Self {
Self { Self {
caches, caches,
cjs_tracker,
cli_options, cli_options,
module_graph_builder, module_graph_builder,
node_resolver, node_resolver,
@ -244,6 +248,7 @@ impl TypeChecker {
graph: graph.clone(), graph: graph.clone(),
hash_data, hash_data,
maybe_npm: Some(tsc::RequestNpmState { maybe_npm: Some(tsc::RequestNpmState {
cjs_tracker: self.cjs_tracker.clone(),
node_resolver: self.node_resolver.clone(), node_resolver: self.node_resolver.clone(),
npm_resolver: self.npm_resolver.clone(), npm_resolver: self.npm_resolver.clone(),
}), }),
@ -346,7 +351,7 @@ fn get_check_hash(
} }
} }
MediaType::Json MediaType::Json
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Wasm | MediaType::Wasm
| MediaType::Unknown => continue, | MediaType::Unknown => continue,
@ -375,6 +380,11 @@ fn get_check_hash(
hasher.write_str(module.specifier.as_str()); hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source); hasher.write_str(&module.source);
} }
Module::Wasm(module) => {
has_file_to_type_check = true;
hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source_dts);
}
Module::External(module) => { Module::External(module) => {
hasher.write_str(module.specifier.as_str()); hasher.write_str(module.specifier.as_str());
} }
@ -428,10 +438,11 @@ fn get_tsc_roots(
} }
MediaType::Json MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => None, | MediaType::Unknown => None,
}, },
Module::Wasm(module) => Some((module.specifier.clone(), MediaType::Dmts)),
Module::External(_) Module::External(_)
| Module::Node(_) | Module::Node(_)
| Module::Npm(_) | Module::Npm(_)
@ -536,7 +547,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool {
| MediaType::Tsx | MediaType::Tsx
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => false, | MediaType::Unknown => false,
} }

View file

@ -7,6 +7,7 @@ use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::standalone::binary::StandaloneRelativeFileBaseUrl; use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
use crate::standalone::is_standalone_binary; use crate::standalone::is_standalone_binary;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -31,15 +32,12 @@ pub async fn compile(
let module_graph_creator = factory.module_graph_creator().await?; let module_graph_creator = factory.module_graph_creator().await?;
let binary_writer = factory.create_compile_binary_writer().await?; let binary_writer = factory.create_compile_binary_writer().await?;
let http_client = factory.http_client_provider(); let http_client = factory.http_client_provider();
let module_specifier = cli_options.resolve_main_module()?; let entrypoint = cli_options.resolve_main_module()?;
let module_roots = { let (module_roots, include_files) = get_module_roots_and_include_files(
let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); entrypoint,
vec.push(module_specifier.clone()); &compile_flags,
for side_module in &compile_flags.include { cli_options.initial_cwd(),
vec.push(resolve_url_or_path(side_module, cli_options.initial_cwd())?); )?;
}
vec
};
// this is not supported, so show a warning about it, but don't error in order // this is not supported, so show a warning about it, but don't error in order
// to allow someone to still run `deno compile` when this is in a deno.json // to allow someone to still run `deno compile` when this is in a deno.json
@ -53,16 +51,6 @@ pub async fn compile(
); );
} }
if cli_options.unstable_detect_cjs() {
log::warn!(
concat!(
"{} --unstable-detect-cjs is not properly supported in deno compile. ",
"The compiled executable may encounter runtime errors.",
),
crate::colors::yellow("Warning"),
);
}
let output_path = resolve_compile_executable_output_path( let output_path = resolve_compile_executable_output_path(
http_client, http_client,
&compile_flags, &compile_flags,
@ -92,18 +80,22 @@ pub async fn compile(
check_warn_tsconfig(&ts_config_for_emit); check_warn_tsconfig(&ts_config_for_emit);
let root_dir_url = resolve_root_dir_from_specifiers( let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(), cli_options.workspace().root_dir(),
graph.specifiers().map(|(s, _)| s).chain( graph
cli_options .specifiers()
.node_modules_dir_path() .map(|(s, _)| s)
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok()) .chain(
.iter(), cli_options
), .node_modules_dir_path()
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
.iter(),
)
.chain(include_files.iter()),
); );
log::debug!("Binary root dir: {}", root_dir_url); log::debug!("Binary root dir: {}", root_dir_url);
log::info!( log::info!(
"{} {} to {}", "{} {} to {}",
colors::green("Compile"), colors::green("Compile"),
module_specifier.to_string(), entrypoint,
output_path.display(), output_path.display(),
); );
validate_output_path(&output_path)?; validate_output_path(&output_path)?;
@ -128,9 +120,9 @@ pub async fn compile(
file, file,
&graph, &graph,
StandaloneRelativeFileBaseUrl::from(&root_dir_url), StandaloneRelativeFileBaseUrl::from(&root_dir_url),
module_specifier, entrypoint,
&include_files,
&compile_flags, &compile_flags,
cli_options,
) )
.await .await
.with_context(|| { .with_context(|| {
@ -222,6 +214,48 @@ fn validate_output_path(output_path: &Path) -> Result<(), AnyError> {
Ok(()) Ok(())
} }
fn get_module_roots_and_include_files(
entrypoint: &ModuleSpecifier,
compile_flags: &CompileFlags,
initial_cwd: &Path,
) -> Result<(Vec<ModuleSpecifier>, Vec<ModuleSpecifier>), AnyError> {
fn is_module_graph_module(url: &ModuleSpecifier) -> bool {
if url.scheme() != "file" {
return true;
}
let media_type = MediaType::from_specifier(url);
match media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx
| MediaType::Json
| MediaType::Wasm => true,
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => false,
}
}
let mut module_roots = Vec::with_capacity(compile_flags.include.len() + 1);
let mut include_files = Vec::with_capacity(compile_flags.include.len());
module_roots.push(entrypoint.clone());
for side_module in &compile_flags.include {
let url = resolve_url_or_path(side_module, initial_cwd)?;
if is_module_graph_module(&url) {
module_roots.push(url);
} else {
include_files.push(url);
}
}
Ok((module_roots, include_files))
}
async fn resolve_compile_executable_output_path( async fn resolve_compile_executable_output_path(
http_client_provider: &HttpClientProvider, http_client_provider: &HttpClientProvider,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,

View file

@ -6,12 +6,12 @@ use crate::args::FileFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::cdp; use crate::cdp;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::npm::CliNpmResolver;
use crate::tools::fmt::format_json; use crate::tools::fmt::format_json;
use crate::tools::test::is_supported_test_path; use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code; use crate::util::text_encoding::source_map_from_code;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::glob::FileCollector; use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns; use deno_config::glob::FilePatterns;
@ -25,6 +25,7 @@ use deno_core::serde_json;
use deno_core::sourcemap::SourceMap; use deno_core::sourcemap::SourceMap;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::LocalInspectorSession; use deno_core::LocalInspectorSession;
use node_resolver::InNpmPackageChecker;
use regex::Regex; use regex::Regex;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
@ -327,6 +328,7 @@ fn generate_coverage_report(
coverage_report.found_lines = coverage_report.found_lines =
if let Some(source_map) = maybe_source_map.as_ref() { if let Some(source_map) = maybe_source_map.as_ref() {
let script_source_lines = script_source.lines().collect::<Vec<_>>();
let mut found_lines = line_counts let mut found_lines = line_counts
.iter() .iter()
.enumerate() .enumerate()
@ -334,7 +336,23 @@ fn generate_coverage_report(
// get all the mappings from this destination line to a different src line // get all the mappings from this destination line to a different src line
let mut results = source_map let mut results = source_map
.tokens() .tokens()
.filter(move |token| token.get_dst_line() as usize == index) .filter(|token| {
let dst_line = token.get_dst_line() as usize;
dst_line == index && {
let dst_col = token.get_dst_col() as usize;
let content = script_source_lines
.get(dst_line)
.and_then(|line| {
line.get(dst_col..std::cmp::min(dst_col + 2, line.len()))
})
.unwrap_or("");
!content.is_empty()
&& content != "/*"
&& content != "*/"
&& content != "//"
}
})
.map(move |token| (token.get_src_line() as usize, *count)) .map(move |token| (token.get_src_line() as usize, *count))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// only keep the results that point at different src lines // only keep the results that point at different src lines
@ -444,7 +462,7 @@ fn filter_coverages(
coverages: Vec<cdp::ScriptCoverage>, coverages: Vec<cdp::ScriptCoverage>,
include: Vec<String>, include: Vec<String>,
exclude: Vec<String>, exclude: Vec<String>,
npm_resolver: &dyn CliNpmResolver, in_npm_pkg_checker: &dyn InNpmPackageChecker,
) -> Vec<cdp::ScriptCoverage> { ) -> Vec<cdp::ScriptCoverage> {
let include: Vec<Regex> = let include: Vec<Regex> =
include.iter().map(|e| Regex::new(e).unwrap()).collect(); include.iter().map(|e| Regex::new(e).unwrap()).collect();
@ -462,13 +480,13 @@ fn filter_coverages(
.filter(|e| { .filter(|e| {
let is_internal = e.url.starts_with("ext:") let is_internal = e.url.starts_with("ext:")
|| e.url.ends_with("__anonymous__") || e.url.ends_with("__anonymous__")
|| e.url.ends_with("$deno$test.js") || e.url.ends_with("$deno$test.mjs")
|| e.url.ends_with(".snap") || e.url.ends_with(".snap")
|| is_supported_test_path(Path::new(e.url.as_str())) || is_supported_test_path(Path::new(e.url.as_str()))
|| doc_test_re.is_match(e.url.as_str()) || doc_test_re.is_match(e.url.as_str())
|| Url::parse(&e.url) || Url::parse(&e.url)
.ok() .ok()
.map(|url| npm_resolver.in_npm_package(&url)) .map(|url| in_npm_pkg_checker.in_npm_package(&url))
.unwrap_or(false); .unwrap_or(false);
let is_included = include.iter().any(|p| p.is_match(&e.url)); let is_included = include.iter().any(|p| p.is_match(&e.url));
@ -479,7 +497,7 @@ fn filter_coverages(
.collect::<Vec<cdp::ScriptCoverage>>() .collect::<Vec<cdp::ScriptCoverage>>()
} }
pub async fn cover_files( pub fn cover_files(
flags: Arc<Flags>, flags: Arc<Flags>,
coverage_flags: CoverageFlags, coverage_flags: CoverageFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
@ -489,9 +507,10 @@ pub async fn cover_files(
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let npm_resolver = factory.npm_resolver().await?; let in_npm_pkg_checker = factory.in_npm_pkg_checker()?;
let file_fetcher = factory.file_fetcher()?; let file_fetcher = factory.file_fetcher()?;
let emitter = factory.emitter()?; let emitter = factory.emitter()?;
let cjs_tracker = factory.cjs_tracker()?;
assert!(!coverage_flags.files.include.is_empty()); assert!(!coverage_flags.files.include.is_empty());
@ -511,7 +530,7 @@ pub async fn cover_files(
script_coverages, script_coverages,
coverage_flags.include, coverage_flags.include,
coverage_flags.exclude, coverage_flags.exclude,
npm_resolver.as_ref(), in_npm_pkg_checker.as_ref(),
); );
if script_coverages.is_empty() { if script_coverages.is_empty() {
return Err(generic_error("No covered files included in the report")); return Err(generic_error("No covered files included in the report"));
@ -568,6 +587,8 @@ pub async fn cover_files(
let transpiled_code = match file.media_type { let transpiled_code = match file.media_type {
MediaType::JavaScript MediaType::JavaScript
| MediaType::Unknown | MediaType::Unknown
| MediaType::Css
| MediaType::Wasm
| MediaType::Cjs | MediaType::Cjs
| MediaType::Mjs | MediaType::Mjs
| MediaType::Json => None, | MediaType::Json => None,
@ -577,7 +598,10 @@ pub async fn cover_files(
| MediaType::Mts | MediaType::Mts
| MediaType::Cts | MediaType::Cts
| MediaType::Tsx => { | MediaType::Tsx => {
Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) { let module_kind = ModuleKind::from_is_cjs(
cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?,
);
Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) {
Some(code) => code, Some(code) => code,
None => { None => {
return Err(anyhow!( return Err(anyhow!(
@ -588,7 +612,7 @@ pub async fn cover_files(
} }
}) })
} }
MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => { MediaType::SourceMap => {
unreachable!() unreachable!()
} }
}; };

View file

@ -21,10 +21,12 @@ use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_doc as doc; use deno_doc as doc;
use deno_doc::html::UrlResolveKind; use deno_doc::html::UrlResolveKind;
use deno_doc::html::UsageComposer;
use deno_doc::html::UsageComposerEntry;
use deno_graph::source::NullFileSystem; use deno_graph::source::NullFileSystem;
use deno_graph::EsParser;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::ModuleAnalyzer; use deno_graph::ModuleAnalyzer;
use deno_graph::ModuleParser;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use doc::html::ShortPath; use doc::html::ShortPath;
use doc::DocDiagnostic; use doc::DocDiagnostic;
@ -35,9 +37,12 @@ use std::sync::Arc;
const JSON_SCHEMA_VERSION: u8 = 1; const JSON_SCHEMA_VERSION: u8 = 1;
const PRISM_CSS: &str = include_str!("./doc/prism.css");
const PRISM_JS: &str = include_str!("./doc/prism.js");
async fn generate_doc_nodes_for_builtin_types( async fn generate_doc_nodes_for_builtin_types(
doc_flags: DocFlags, doc_flags: DocFlags,
parser: &dyn ModuleParser, parser: &dyn EsParser,
analyzer: &dyn ModuleAnalyzer, analyzer: &dyn ModuleAnalyzer,
) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> { ) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> {
let source_file_specifier = let source_file_specifier =
@ -96,7 +101,7 @@ pub async fn doc(
let module_info_cache = factory.module_info_cache()?; let module_info_cache = factory.module_info_cache()?;
let parsed_source_cache = factory.parsed_source_cache(); let parsed_source_cache = factory.parsed_source_cache();
let capturing_parser = parsed_source_cache.as_capturing_parser(); let capturing_parser = parsed_source_cache.as_capturing_parser();
let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache); let analyzer = module_info_cache.as_module_analyzer();
let doc_nodes_by_url = match doc_flags.source_files { let doc_nodes_by_url = match doc_flags.source_files {
DocSourceFileFlag::Builtin => { DocSourceFileFlag::Builtin => {
@ -312,10 +317,6 @@ impl deno_doc::html::HrefResolver for DocResolver {
None None
} }
fn resolve_usage(&self, current_resolve: UrlResolveKind) -> Option<String> {
current_resolve.get_file().map(|file| file.path.to_string())
}
fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> { fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> {
Some(location.filename.to_string()) Some(location.filename.to_string())
} }
@ -350,105 +351,30 @@ impl deno_doc::html::HrefResolver for DocResolver {
} }
} }
struct DenoDocResolver(bool); struct DocComposer;
impl deno_doc::html::HrefResolver for DenoDocResolver { impl UsageComposer for DocComposer {
fn resolve_path( fn is_single_mode(&self) -> bool {
true
}
fn compose(
&self, &self,
current: UrlResolveKind, current_resolve: UrlResolveKind,
target: UrlResolveKind, usage_to_md: deno_doc::html::UsageToMd,
) -> String { ) -> IndexMap<UsageComposerEntry, String> {
let path = deno_doc::html::href_path_resolve(current, target);
if self.0 {
if let Some(path) = path
.strip_suffix("index.html")
.or_else(|| path.strip_suffix(".html"))
{
return path.to_owned();
}
}
path
}
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
None
}
fn resolve_import_href(
&self,
_symbol: &[String],
_src: &str,
) -> Option<String> {
None
}
fn resolve_usage(&self, _current_resolve: UrlResolveKind) -> Option<String> {
None
}
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
None
}
fn resolve_external_jsdoc_module(
&self,
_module: &str,
_symbol: Option<&str>,
) -> Option<(String, String)> {
None
}
}
struct NodeDocResolver(bool);
impl deno_doc::html::HrefResolver for NodeDocResolver {
fn resolve_path(
&self,
current: UrlResolveKind,
target: UrlResolveKind,
) -> String {
let path = deno_doc::html::href_path_resolve(current, target);
if self.0 {
if let Some(path) = path
.strip_suffix("index.html")
.or_else(|| path.strip_suffix(".html"))
{
return path.to_owned();
}
}
path
}
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
None
}
fn resolve_import_href(
&self,
_symbol: &[String],
_src: &str,
) -> Option<String> {
None
}
fn resolve_usage(&self, current_resolve: UrlResolveKind) -> Option<String> {
current_resolve current_resolve
.get_file() .get_file()
.map(|file| format!("node:{}", file.path)) .map(|current_file| {
} IndexMap::from([(
UsageComposerEntry {
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> { name: "".to_string(),
None icon: None,
} },
usage_to_md(current_file.path.as_str(), None),
fn resolve_external_jsdoc_module( )])
&self, })
_module: &str, .unwrap_or_default()
_symbol: Option<&str>,
) -> Option<(String, String)> {
None
} }
} }
@ -461,30 +387,10 @@ fn generate_docs_directory(
let cwd = std::env::current_dir().context("Failed to get CWD")?; let cwd = std::env::current_dir().context("Failed to get CWD")?;
let output_dir_resolved = cwd.join(&html_options.output); let output_dir_resolved = cwd.join(&html_options.output);
let internal_env = std::env::var("DENO_INTERNAL_HTML_DOCS").ok();
let href_resolver: Rc<dyn deno_doc::html::HrefResolver> = if internal_env
.as_ref()
.is_some_and(|internal_html_docs| internal_html_docs == "node")
{
Rc::new(NodeDocResolver(html_options.strip_trailing_html))
} else if internal_env
.as_ref()
.is_some_and(|internal_html_docs| internal_html_docs == "deno")
|| deno_ns.is_empty()
{
Rc::new(DenoDocResolver(html_options.strip_trailing_html))
} else {
Rc::new(DocResolver {
deno_ns,
strip_trailing_html: html_options.strip_trailing_html,
})
};
let category_docs = let category_docs =
if let Some(category_docs_path) = &html_options.category_docs_path { if let Some(category_docs_path) = &html_options.category_docs_path {
let content = std::fs::read(category_docs_path)?; let content = std::fs::read(category_docs_path)?;
Some(deno_core::serde_json::from_slice(&content)?) Some(serde_json::from_slice(&content)?)
} else { } else {
None None
}; };
@ -493,7 +399,7 @@ fn generate_docs_directory(
&html_options.symbol_redirect_map_path &html_options.symbol_redirect_map_path
{ {
let content = std::fs::read(symbol_redirect_map_path)?; let content = std::fs::read(symbol_redirect_map_path)?;
Some(deno_core::serde_json::from_slice(&content)?) Some(serde_json::from_slice(&content)?)
} else { } else {
None None
}; };
@ -502,7 +408,7 @@ fn generate_docs_directory(
&html_options.default_symbol_map_path &html_options.default_symbol_map_path
{ {
let content = std::fs::read(default_symbol_map_path)?; let content = std::fs::read(default_symbol_map_path)?;
Some(deno_core::serde_json::from_slice(&content)?) Some(serde_json::from_slice(&content)?)
} else { } else {
None None
}; };
@ -511,17 +417,33 @@ fn generate_docs_directory(
package_name: html_options.name.clone(), package_name: html_options.name.clone(),
main_entrypoint: None, main_entrypoint: None,
rewrite_map, rewrite_map,
href_resolver, href_resolver: Rc::new(DocResolver {
usage_composer: None, deno_ns,
strip_trailing_html: html_options.strip_trailing_html,
}),
usage_composer: Rc::new(DocComposer),
category_docs, category_docs,
disable_search: internal_env.is_some(), disable_search: false,
symbol_redirect_map, symbol_redirect_map,
default_symbol_map, default_symbol_map,
markdown_renderer: deno_doc::html::comrak::create_renderer(
None, None, None,
),
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
head_inject: Some(Rc::new(|root| {
format!(
r#"<link href="{root}{}" rel="stylesheet" /><link href="{root}prism.css" rel="stylesheet" /><script src="{root}prism.js"></script>"#,
deno_doc::html::comrak::COMRAK_STYLESHEET_FILENAME
)
})),
}; };
let files = deno_doc::html::generate(options, doc_nodes_by_url) let mut files = deno_doc::html::generate(options, doc_nodes_by_url)
.context("Failed to generate HTML documentation")?; .context("Failed to generate HTML documentation")?;
files.insert("prism.js".to_string(), PRISM_JS.to_string());
files.insert("prism.css".to_string(), PRISM_CSS.to_string());
let path = &output_dir_resolved; let path = &output_dir_resolved;
let _ = std::fs::remove_dir_all(path); let _ = std::fs::remove_dir_all(path);
std::fs::create_dir(path) std::fs::create_dir(path)

3
cli/tools/doc/prism.css Normal file
View file

@ -0,0 +1,3 @@
/* PrismJS 1.29.0
https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript+bash+json+markdown+regex+rust+typescript */
code[class*=language-],pre[class*=language-]{color:#000;background:0 0;text-shadow:0 1px #fff;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}code[class*=language-] ::-moz-selection,code[class*=language-]::-moz-selection,pre[class*=language-] ::-moz-selection,pre[class*=language-]::-moz-selection{text-shadow:none;background:#b3d4fc}code[class*=language-] ::selection,code[class*=language-]::selection,pre[class*=language-] ::selection,pre[class*=language-]::selection{text-shadow:none;background:#b3d4fc}@media print{code[class*=language-],pre[class*=language-]{text-shadow:none}}pre[class*=language-]{overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#f5f2f0}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#708090}.token.punctuation{color:#999}.token.namespace{opacity:.7}.token.boolean,.token.constant,.token.deleted,.token.number,.token.property,.token.symbol,.token.tag{color:#905}.token.attr-name,.token.builtin,.token.char,.token.inserted,.token.selector,.token.string{color:#690}.language-css .token.string,.style .token.string,.token.entity,.token.operator,.token.url{color:#9a6e3a;background:hsla(0,0%,100%,.5)}.token.atrule,.token.attr-value,.token.keyword{color:#07a}.token.class-name,.token.function{color:#dd4a68}.token.important,.token.regex,.token.variable{color:#e90}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}

15
cli/tools/doc/prism.js Normal file

File diff suppressed because one or more lines are too long

View file

@ -83,6 +83,7 @@ pub async fn format(
file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen), file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| { move |flags, watcher_communicator, changed_paths| {
let fmt_flags = fmt_flags.clone(); let fmt_flags = fmt_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move { Ok(async move {
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
@ -227,6 +228,7 @@ fn collect_fmt_files(
}) })
.ignore_git_folder() .ignore_git_folder()
.ignore_node_modules() .ignore_node_modules()
.use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
} }
@ -270,6 +272,7 @@ fn format_markdown(
| "njk" | "njk"
| "yml" | "yml"
| "yaml" | "yaml"
| "sql"
) { ) {
// It's important to tell dprint proper file extension, otherwise // It's important to tell dprint proper file extension, otherwise
// it might parse the file twice. // it might parse the file twice.
@ -299,6 +302,13 @@ fn format_markdown(
} }
} }
"yml" | "yaml" => format_yaml(text, fmt_options), "yml" | "yaml" => format_yaml(text, fmt_options),
"sql" => {
if unstable_options.sql {
format_sql(text, fmt_options)
} else {
Ok(None)
}
}
_ => { _ => {
let mut codeblock_config = let mut codeblock_config =
get_resolved_typescript_config(fmt_options); get_resolved_typescript_config(fmt_options);
@ -353,6 +363,21 @@ fn format_yaml(
file_text: &str, file_text: &str,
fmt_options: &FmtOptionsConfig, fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> { ) -> Result<Option<String>, AnyError> {
let ignore_file = file_text
.lines()
.take_while(|line| line.starts_with('#'))
.any(|line| {
line
.strip_prefix('#')
.unwrap()
.trim()
.starts_with("deno-fmt-ignore-file")
});
if ignore_file {
return Ok(None);
}
let formatted_str = let formatted_str =
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options)) pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
.map_err(AnyError::from)?; .map_err(AnyError::from)?;
@ -486,7 +511,52 @@ pub fn format_html(
}) })
} }
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, or IPYNB file. pub fn format_sql(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let ignore_file = file_text
.lines()
.take_while(|line| line.starts_with("--"))
.any(|line| {
line
.strip_prefix("--")
.unwrap()
.trim()
.starts_with("deno-fmt-ignore-file")
});
if ignore_file {
return Ok(None);
}
let mut formatted_str = sqlformat::format(
file_text,
&sqlformat::QueryParams::None,
&sqlformat::FormatOptions {
ignore_case_convert: None,
indent: if fmt_options.use_tabs.unwrap_or_default() {
sqlformat::Indent::Tabs
} else {
sqlformat::Indent::Spaces(fmt_options.indent_width.unwrap_or(2))
},
// leave one blank line between queries.
lines_between_queries: 2,
uppercase: Some(true),
},
);
// Add single new line to the end of file.
formatted_str.push('\n');
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, IPYNB or SQL file.
pub fn format_file( pub fn format_file(
file_path: &Path, file_path: &Path,
file_text: &str, file_text: &str,
@ -521,6 +591,13 @@ pub fn format_file(
format_file(file_path, &file_text, fmt_options, unstable_options, None) format_file(file_path, &file_text, fmt_options, unstable_options, None)
}, },
), ),
"sql" => {
if unstable_options.sql {
format_sql(file_text, fmt_options)
} else {
Ok(None)
}
}
_ => { _ => {
let config = get_resolved_typescript_config(fmt_options); let config = get_resolved_typescript_config(fmt_options);
dprint_plugin_typescript::format_text( dprint_plugin_typescript::format_text(
@ -775,28 +852,26 @@ fn format_ensure_stable(
return Ok(Some(current_text)); return Ok(Some(current_text));
} }
Err(err) => { Err(err) => {
panic!( bail!(
concat!( concat!(
"Formatting succeeded initially, but failed when ensuring a ", "Formatting succeeded initially, but failed when ensuring a ",
"stable format. This indicates a bug in the formatter where ", "stable format. This indicates a bug in the formatter where ",
"the text it produces is not syntactically correct. As a temporary ", "the text it produces is not syntactically correct. As a temporary ",
"workaround you can ignore this file ({}).\n\n{:#}" "workaround you can ignore this file.\n\n{:#}"
), ),
file_path.display(),
err, err,
) )
} }
} }
count += 1; count += 1;
if count == 5 { if count == 5 {
panic!( bail!(
concat!( concat!(
"Formatting not stable. Bailed after {} tries. This indicates a bug ", "Formatting not stable. Bailed after {} tries. This indicates a bug ",
"in the formatter where it formats the file ({}) differently each time. As a ", "in the formatter where it formats the file differently each time. As a ",
"temporary workaround you can ignore this file." "temporary workaround you can ignore this file."
), ),
count, count,
file_path.display(),
) )
} }
} }
@ -1017,7 +1092,7 @@ fn get_resolved_markup_fmt_config(
max_attrs_per_line: None, max_attrs_per_line: None,
prefer_attrs_single_line: false, prefer_attrs_single_line: false,
html_normal_self_closing: None, html_normal_self_closing: None,
html_void_self_closing: Some(true), html_void_self_closing: None,
component_self_closing: None, component_self_closing: None,
svg_self_closing: None, svg_self_closing: None,
mathml_self_closing: None, mathml_self_closing: None,
@ -1194,12 +1269,15 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
| "yml" | "yml"
| "yaml" | "yaml"
| "ipynb" | "ipynb"
| "sql"
) )
}) })
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use test_util::assert_starts_with;
use super::*; use super::*;
#[test] #[test]
@ -1252,15 +1330,24 @@ mod test {
assert!(is_supported_ext_fmt(Path::new("foo.yaml"))); assert!(is_supported_ext_fmt(Path::new("foo.yaml")));
assert!(is_supported_ext_fmt(Path::new("foo.YaML"))); assert!(is_supported_ext_fmt(Path::new("foo.YaML")));
assert!(is_supported_ext_fmt(Path::new("foo.ipynb"))); assert!(is_supported_ext_fmt(Path::new("foo.ipynb")));
assert!(is_supported_ext_fmt(Path::new("foo.sql")));
assert!(is_supported_ext_fmt(Path::new("foo.Sql")));
assert!(is_supported_ext_fmt(Path::new("foo.sQl")));
assert!(is_supported_ext_fmt(Path::new("foo.sqL")));
assert!(is_supported_ext_fmt(Path::new("foo.SQL")));
} }
#[test] #[test]
#[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
fn test_format_ensure_stable_unstable_format() { fn test_format_ensure_stable_unstable_format() {
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { let err =
Ok(Some(format!("1{file_text}"))) format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
}) Ok(Some(format!("1{file_text}")))
.unwrap(); })
.unwrap_err();
assert_starts_with!(
err.to_string(),
"Formatting not stable. Bailed after 5 tries."
);
} }
#[test] #[test]
@ -1274,16 +1361,20 @@ mod test {
} }
#[test] #[test]
#[should_panic(expected = "Formatting succeeded initially, but failed when")]
fn test_format_ensure_stable_error_second() { fn test_format_ensure_stable_error_second() {
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { let err =
if file_text == "1" { format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
Ok(Some("11".to_string())) if file_text == "1" {
} else { Ok(Some("11".to_string()))
bail!("Error formatting.") } else {
} bail!("Error formatting.")
}) }
.unwrap(); })
.unwrap_err();
assert_starts_with!(
err.to_string(),
"Formatting succeeded initially, but failed when"
);
} }
#[test] #[test]

View file

@ -126,6 +126,7 @@ fn print_cache_info(
let registry_cache = dir.registries_folder_path(); let registry_cache = dir.registries_folder_path();
let mut origin_dir = dir.origin_data_folder_path(); let mut origin_dir = dir.origin_data_folder_path();
let deno_dir = dir.root_path_for_display().to_string(); let deno_dir = dir.root_path_for_display().to_string();
let web_cache_dir = crate::worker::get_cache_storage_dir();
if let Some(location) = &location { if let Some(location) = &location {
origin_dir = origin_dir =
@ -143,6 +144,7 @@ fn print_cache_info(
"typescriptCache": typescript_cache, "typescriptCache": typescript_cache,
"registryCache": registry_cache, "registryCache": registry_cache,
"originStorage": origin_dir, "originStorage": origin_dir,
"webCacheStorage": web_cache_dir,
}); });
if location.is_some() { if location.is_some() {
@ -177,6 +179,11 @@ fn print_cache_info(
colors::bold("Origin storage:"), colors::bold("Origin storage:"),
origin_dir.display() origin_dir.display()
); );
println!(
"{} {}",
colors::bold("Web cache storage:"),
web_cache_dir.display()
);
if location.is_some() { if location.is_some() {
println!( println!(
"{} {}", "{} {}",
@ -228,22 +235,31 @@ fn add_npm_packages_to_json(
.get_mut("dependencies") .get_mut("dependencies")
.and_then(|d| d.as_array_mut()); .and_then(|d| d.as_array_mut());
if let Some(dependencies) = dependencies { if let Some(dependencies) = dependencies {
for dep in dependencies.iter_mut() { for dep in dependencies.iter_mut().flat_map(|d| d.as_object_mut()) {
if let serde_json::Value::Object(dep) = dep { if let Some(specifier) = dep.get("specifier").and_then(|s| s.as_str())
let specifier = dep.get("specifier").and_then(|s| s.as_str()); {
if let Some(specifier) = specifier { if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) { if let Ok(pkg) = snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
if let Ok(pkg) = {
snapshot.resolve_pkg_from_pkg_req(npm_ref.req()) dep.insert(
{ "npmPackage".to_string(),
dep.insert( pkg.id.as_serialized().into(),
"npmPackage".to_string(), );
pkg.id.as_serialized().into(),
);
}
} }
} }
} }
// don't show this in the output unless someone needs it
if let Some(code) =
dep.get_mut("code").and_then(|c| c.as_object_mut())
{
code.remove("resolutionMode");
}
if let Some(types) =
dep.get_mut("types").and_then(|c| c.as_object_mut())
{
types.remove("resolutionMode");
}
} }
} }
} }
@ -446,6 +462,7 @@ impl<'a> GraphDisplayContext<'a> {
let maybe_cache_info = match root { let maybe_cache_info = match root {
Module::Js(module) => module.maybe_cache_info.as_ref(), Module::Js(module) => module.maybe_cache_info.as_ref(),
Module::Json(module) => module.maybe_cache_info.as_ref(), Module::Json(module) => module.maybe_cache_info.as_ref(),
Module::Wasm(module) => module.maybe_cache_info.as_ref(),
Module::Node(_) | Module::Npm(_) | Module::External(_) => None, Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
}; };
if let Some(cache_info) = maybe_cache_info { if let Some(cache_info) = maybe_cache_info {
@ -468,6 +485,7 @@ impl<'a> GraphDisplayContext<'a> {
let size = match m { let size = match m {
Module::Js(module) => module.size(), Module::Js(module) => module.size(),
Module::Json(module) => module.size(), Module::Json(module) => module.size(),
Module::Wasm(module) => module.size(),
Module::Node(_) | Module::Npm(_) | Module::External(_) => 0, Module::Node(_) | Module::Npm(_) | Module::External(_) => 0,
}; };
size as f64 size as f64
@ -530,7 +548,7 @@ impl<'a> GraphDisplayContext<'a> {
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode { fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode {
enum PackageOrSpecifier { enum PackageOrSpecifier {
Package(NpmResolutionPackage), Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier), Specifier(ModuleSpecifier),
} }
@ -538,7 +556,7 @@ impl<'a> GraphDisplayContext<'a> {
let package_or_specifier = match module.npm() { let package_or_specifier = match module.npm() {
Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) { Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) {
Some(package) => Package(package.clone()), Some(package) => Package(Box::new(package.clone())),
None => Specifier(module.specifier().clone()), // should never happen None => Specifier(module.specifier().clone()), // should never happen
}, },
None => Specifier(module.specifier().clone()), None => Specifier(module.specifier().clone()),
@ -567,6 +585,7 @@ impl<'a> GraphDisplayContext<'a> {
Specifier(_) => match module { Specifier(_) => match module {
Module::Js(module) => Some(module.size() as u64), Module::Js(module) => Some(module.size() as u64),
Module::Json(module) => Some(module.size() as u64), Module::Json(module) => Some(module.size() as u64),
Module::Wasm(module) => Some(module.size() as u64),
Module::Node(_) | Module::Npm(_) | Module::External(_) => None, Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
}, },
}; };
@ -580,8 +599,8 @@ impl<'a> GraphDisplayContext<'a> {
Package(package) => { Package(package) => {
tree_node.children.extend(self.build_npm_deps(package)); tree_node.children.extend(self.build_npm_deps(package));
} }
Specifier(_) => { Specifier(_) => match module {
if let Some(module) = module.js() { Module::Js(module) => {
if let Some(types_dep) = &module.maybe_types_dependency { if let Some(types_dep) = &module.maybe_types_dependency {
if let Some(child) = if let Some(child) =
self.build_resolved_info(&types_dep.dependency, true) self.build_resolved_info(&types_dep.dependency, true)
@ -593,7 +612,16 @@ impl<'a> GraphDisplayContext<'a> {
tree_node.children.extend(self.build_dep_info(dep)); tree_node.children.extend(self.build_dep_info(dep));
} }
} }
} Module::Wasm(module) => {
for dep in module.dependencies.values() {
tree_node.children.extend(self.build_dep_info(dep));
}
}
Module::Json(_)
| Module::Npm(_)
| Module::Node(_)
| Module::External(_) => {}
},
} }
} }
tree_node tree_node
@ -645,10 +673,12 @@ impl<'a> GraphDisplayContext<'a> {
let message = match err { let message = match err {
HttpsChecksumIntegrity(_) => "(checksum integrity error)", HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)", Decode(_) => "(loading decode error)",
Loader(err) => match deno_core::error::get_custom_error_class(err) { Loader(err) => {
Some("NotCapable") => "(not capable, requires --allow-import)", match deno_runtime::errors::get_error_class_name(err) {
_ => "(loading error)", Some("NotCapable") => "(not capable, requires --allow-import)",
}, _ => "(loading error)",
}
}
Jsr(_) => "(loading error)", Jsr(_) => "(loading error)",
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)", NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
Npm(_) => "(npm loading error)", Npm(_) => "(npm loading error)",
@ -656,7 +686,7 @@ impl<'a> GraphDisplayContext<'a> {
}; };
self.build_error_msg(specifier, message.as_ref()) self.build_error_msg(specifier, message.as_ref())
} }
ModuleError::ParseErr(_, _) => { ModuleError::ParseErr(_, _) | ModuleError::WasmParseErr(_, _) => {
self.build_error_msg(specifier, "(parsing error)") self.build_error_msg(specifier, "(parsing error)")
} }
ModuleError::UnsupportedImportAttributeType { .. } => { ModuleError::UnsupportedImportAttributeType { .. } => {

View file

@ -1,15 +1,29 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::InitFlags; use crate::args::InitFlags;
use crate::args::PackagesAllowedScripts;
use crate::args::PermissionFlags;
use crate::args::RunFlags;
use crate::colors; use crate::colors;
use color_print::cformat;
use color_print::cstr;
use deno_config::deno_json::NodeModulesDirMode;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_runtime::WorkerExecutionMode;
use log::info; use log::info;
use std::io::IsTerminal;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> { pub async fn init_project(init_flags: InitFlags) -> Result<i32, AnyError> {
if let Some(package) = &init_flags.package {
return init_npm(package, init_flags.package_args).await;
}
let cwd = let cwd =
std::env::current_dir().context("Can't read current working directory.")?; std::env::current_dir().context("Can't read current working directory.")?;
let dir = if let Some(dir) = &init_flags.dir { let dir = if let Some(dir) = &init_flags.dir {
@ -24,32 +38,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
create_file( create_file(
&dir, &dir,
"main.ts", "main.ts",
r#"import { type Route, route, serveDir } from "@std/http"; r#"import { serveDir } from "@std/http";
const routes: Route[] = [ const userPagePattern = new URLPattern({ pathname: "/users/:id" });
{ const staticPathPattern = new URLPattern({ pathname: "/static/*" });
pattern: new URLPattern({ pathname: "/" }),
handler: () => new Response("Home page"),
},
{
pattern: new URLPattern({ pathname: "/users/:id" }),
handler: (_req, _info, params) => new Response(params?.pathname.groups.id),
},
{
pattern: new URLPattern({ pathname: "/static/*" }),
handler: (req) => serveDir(req),
},
];
function defaultHandler(_req: Request) {
return new Response("Not found", { status: 404 });
}
const handler = route(routes, defaultHandler);
export default { export default {
fetch(req) { fetch(req) {
return handler(req); const url = new URL(req.url);
if (url.pathname === "/") {
return new Response("Home page");
}
const userPageMatch = userPagePattern.exec(url);
if (userPageMatch) {
return new Response(userPageMatch.pathname.groups.id);
}
if (staticPathPattern.test(url)) {
return serveDir(req);
}
return new Response("Not found", { status: 404 });
}, },
} satisfies Deno.ServeDefaultExport; } satisfies Deno.ServeDefaultExport;
"#, "#,
@ -238,7 +249,59 @@ Deno.test(function addTest() {
info!(" {}", colors::gray("# Run the tests")); info!(" {}", colors::gray("# Run the tests"));
info!(" deno test"); info!(" deno test");
} }
Ok(()) Ok(0)
}
async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
let script_name = format!("npm:create-{}", name);
fn print_manual_usage(script_name: &str, args: &[String]) -> i32 {
log::info!("{}", cformat!("You can initialize project manually by running <u>deno run {} {}</> and applying desired permissions.", script_name, args.join(" ")));
1
}
if std::io::stdin().is_terminal() {
log::info!(
cstr!("⚠️ Do you fully trust <y>{}</> package? Deno will invoke code from it with all permissions. Do you want to continue? <p(245)>[y/n]</>"),
script_name
);
loop {
let _ = std::io::stdout().write(b"> ")?;
std::io::stdout().flush()?;
let mut answer = String::new();
if std::io::stdin().read_line(&mut answer).is_ok() {
let answer = answer.trim().to_ascii_lowercase();
if answer != "y" {
return Ok(print_manual_usage(&script_name, &args));
} else {
break;
}
}
}
} else {
return Ok(print_manual_usage(&script_name, &args));
}
let new_flags = Flags {
permissions: PermissionFlags {
allow_all: true,
..Default::default()
},
allow_scripts: PackagesAllowedScripts::All,
argv: args,
node_modules_dir: Some(NodeModulesDirMode::Auto),
subcommand: DenoSubcommand::Run(RunFlags {
script: script_name,
..Default::default()
}),
..Default::default()
};
crate::tools::run::run_script(
WorkerExecutionMode::Run,
new_flags.into(),
None,
)
.await
} }
fn create_json_file( fn create_json_file(

View file

@ -3,6 +3,7 @@
use crate::args::resolve_no_prompt; use crate::args::resolve_no_prompt;
use crate::args::AddFlags; use crate::args::AddFlags;
use crate::args::CaData; use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::ConfigFlag; use crate::args::ConfigFlag;
use crate::args::Flags; use crate::args::Flags;
use crate::args::InstallFlags; use crate::args::InstallFlags;
@ -13,8 +14,11 @@ use crate::args::TypeCheckMode;
use crate::args::UninstallFlags; use crate::args::UninstallFlags;
use crate::args::UninstallKind; use crate::args::UninstallKind;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphContainer;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -354,12 +358,54 @@ async fn install_global(
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
// ensure the module is cached // ensure the module is cached
let factory = CliFactory::from_flags(flags.clone()); let factory = CliFactory::from_flags(flags.clone());
let cli_options = factory.cli_options()?;
let http_client = factory.http_client_provider();
let deps_http_cache = factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
deps_http_cache.clone(),
CacheSetting::ReloadAll,
true,
http_client.clone(),
Default::default(),
None,
);
let npmrc = factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver = Arc::new(NpmFetchResolver::new(
deps_file_fetcher.clone(),
npmrc.clone(),
));
let entry_text = install_flags_global.module_url.as_str();
if !cli_options.initial_cwd().join(entry_text).exists() {
// check for package requirement missing prefix
if let Ok(Err(package_req)) =
super::registry::AddRmPackageReq::parse(entry_text)
{
if jsr_resolver.req_to_nv(&package_req).await.is_some() {
bail!(
"{entry_text} is missing a prefix. Did you mean `{}`?",
crate::colors::yellow(format!("deno install -g jsr:{package_req}"))
);
} else if npm_resolver.req_to_nv(&package_req).await.is_some() {
bail!(
"{entry_text} is missing a prefix. Did you mean `{}`?",
crate::colors::yellow(format!("deno install -g npm:{package_req}"))
);
}
}
}
factory factory
.main_module_graph_container() .main_module_graph_container()
.await? .await?
.load_and_type_check_files(&[install_flags_global.module_url.clone()]) .load_and_type_check_files(&[install_flags_global.module_url.clone()])
.await?; .await?;
let http_client = factory.http_client_provider();
// create the install shim // create the install shim
create_install_shim(http_client, &flags, install_flags_global).await create_install_shim(http_client, &flags, install_flags_global).await
@ -1396,6 +1442,7 @@ mod tests {
.env_clear() .env_clear()
// use the deno binary in the target directory // use the deno binary in the target directory
.env("PATH", test_util::target_dir()) .env("PATH", test_util::target_dir())
.env("RUST_BACKTRACE", "1")
.spawn() .spawn()
.unwrap() .unwrap()
.wait() .wait()

View file

@ -61,7 +61,7 @@ pub async fn kernel(
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let main_module = let main_module =
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd()) resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd())
.unwrap(); .unwrap();
// TODO(bartlomieju): should we run with all permissions? // TODO(bartlomieju): should we run with all permissions?
let permissions = let permissions =

View file

@ -63,7 +63,7 @@ pub use rules::LintRuleProvider;
const JSON_SCHEMA_VERSION: u8 = 1; const JSON_SCHEMA_VERSION: u8 = 1;
static STDIN_FILE_NAME: &str = "$deno$stdin.ts"; static STDIN_FILE_NAME: &str = "$deno$stdin.mts";
pub async fn lint( pub async fn lint(
flags: Arc<Flags>, flags: Arc<Flags>,
@ -80,6 +80,7 @@ pub async fn lint(
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen), file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| { move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone(); let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move { Ok(async move {
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
@ -191,7 +192,7 @@ pub async fn lint(
linter.finish() linter.finish()
}; };
if !success { if !success {
std::process::exit(1); deno_runtime::exit(1);
} }
} }
@ -435,6 +436,7 @@ fn collect_lint_files(
}) })
.ignore_git_folder() .ignore_git_folder()
.ignore_node_modules() .ignore_node_modules()
.use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
} }

View file

@ -175,6 +175,7 @@ struct JsonLintReporter {
version: u8, version: u8,
diagnostics: Vec<JsonLintDiagnostic>, diagnostics: Vec<JsonLintDiagnostic>,
errors: Vec<LintError>, errors: Vec<LintError>,
checked_files: Vec<String>,
} }
impl JsonLintReporter { impl JsonLintReporter {
@ -183,6 +184,7 @@ impl JsonLintReporter {
version: JSON_SCHEMA_VERSION, version: JSON_SCHEMA_VERSION,
diagnostics: Vec::new(), diagnostics: Vec::new(),
errors: Vec::new(), errors: Vec::new(),
checked_files: Vec::new(),
} }
} }
} }
@ -209,6 +211,17 @@ impl LintReporter for JsonLintReporter {
code: d.code().to_string(), code: d.code().to_string(),
hint: d.hint().map(|h| h.to_string()), hint: d.hint().map(|h| h.to_string()),
}); });
let file_path = d
.specifier
.to_file_path()
.unwrap()
.to_string_lossy()
.to_string();
if !self.checked_files.contains(&file_path) {
self.checked_files.push(file_path);
}
} }
fn visit_error(&mut self, file_path: &str, err: &AnyError) { fn visit_error(&mut self, file_path: &str, err: &AnyError) {
@ -216,10 +229,15 @@ impl LintReporter for JsonLintReporter {
file_path: file_path.to_string(), file_path: file_path.to_string(),
message: err.to_string(), message: err.to_string(),
}); });
if !self.checked_files.contains(&file_path.to_string()) {
self.checked_files.push(file_path.to_string());
}
} }
fn close(&mut self, _check_count: usize) { fn close(&mut self, _check_count: usize) {
sort_diagnostics(&mut self.diagnostics); sort_diagnostics(&mut self.diagnostics);
self.checked_files.sort();
let json = serde_json::to_string_pretty(&self); let json = serde_json::to_string_pretty(&self);
#[allow(clippy::print_stdout)] #[allow(clippy::print_stdout)]
{ {

View file

@ -8,7 +8,7 @@ use std::sync::Arc;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError; use deno_graph::source::ResolveError;
use deno_graph::Range; use deno_graph::Range;
use deno_lint::diagnostic::LintDiagnosticDetails; use deno_lint::diagnostic::LintDiagnosticDetails;
@ -17,7 +17,7 @@ use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange; use deno_lint::diagnostic::LintFixChange;
use deno_lint::rules::LintRule; use deno_lint::rules::LintRule;
use deno_resolver::sloppy_imports::SloppyImportsResolution; use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode; use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use text_lines::LineAndColumnIndex; use text_lines::LineAndColumnIndex;
use crate::graph_util::CliJsrUrlProvider; use crate::graph_util::CliJsrUrlProvider;
@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule {
captures: Default::default(), captures: Default::default(),
}; };
// fill this and capture the sloppy imports in the resolver
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions { deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::All, graph_kind: deno_graph::GraphKind::All,
specifier: context.specifier().clone(), specifier: context.specifier().clone(),
@ -100,16 +101,16 @@ impl LintRule for NoSloppyImportsRule {
maybe_npm_resolver: None, maybe_npm_resolver: None,
}); });
for (range, sloppy_import) in resolver.captures.borrow_mut().drain() { for (referrer, sloppy_import) in resolver.captures.borrow_mut().drain() {
let start_range = let start_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex { context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.start.line, line_index: referrer.range.start.line,
column_index: range.start.character, column_index: referrer.range.start.character,
}); });
let end_range = let end_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex { context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.end.line, line_index: referrer.range.end.line,
column_index: range.end.character, column_index: referrer.range.end.character,
}); });
let source_range = SourceRange::new(start_range, end_range); let source_range = SourceRange::new(start_range, end_range);
context.add_diagnostic_details( context.add_diagnostic_details(
@ -182,7 +183,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
&self, &self,
specifier_text: &str, specifier_text: &str,
referrer_range: &Range, referrer_range: &Range,
mode: ResolutionMode, resolution_kind: ResolutionKind,
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> { ) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
let resolution = self let resolution = self
.workspace_resolver .workspace_resolver
@ -197,9 +198,9 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
specifier, .. specifier, ..
} => match self.sloppy_imports_resolver.resolve( } => match self.sloppy_imports_resolver.resolve(
&specifier, &specifier,
match mode { match resolution_kind {
ResolutionMode::Execution => SloppyImportsResolutionMode::Execution, ResolutionKind::Execution => SloppyImportsResolutionKind::Execution,
ResolutionMode::Types => SloppyImportsResolutionMode::Types, ResolutionKind::Types => SloppyImportsResolutionKind::Types,
}, },
) { ) {
Some(res) => { Some(res) => {

View file

@ -234,8 +234,8 @@ impl Diagnostic for PublishDiagnostic {
specifier: Cow::Borrowed(&referrer.specifier), specifier: Cow::Borrowed(&referrer.specifier),
text_info: Cow::Borrowed(text_info), text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::LineAndCol { source_pos: DiagnosticSourcePos::LineAndCol {
line: referrer.start.line, line: referrer.range.start.line,
column: referrer.start.character, column: referrer.range.start.character,
}, },
} }
} }
@ -300,7 +300,7 @@ impl Diagnostic for PublishDiagnostic {
text_info: &'a SourceTextInfo, text_info: &'a SourceTextInfo,
referrer: &'a deno_graph::Range, referrer: &'a deno_graph::Range,
) -> Option<DiagnosticSnippet<'a>> { ) -> Option<DiagnosticSnippet<'a>> {
if referrer.start.line == 0 && referrer.start.character == 0 { if referrer.range.start.line == 0 && referrer.range.start.character == 0 {
return None; // no range, probably a jsxImportSource import return None; // no range, probably a jsxImportSource import
} }
@ -310,12 +310,12 @@ impl Diagnostic for PublishDiagnostic {
style: DiagnosticSnippetHighlightStyle::Error, style: DiagnosticSnippetHighlightStyle::Error,
range: DiagnosticSourceRange { range: DiagnosticSourceRange {
start: DiagnosticSourcePos::LineAndCol { start: DiagnosticSourcePos::LineAndCol {
line: referrer.start.line, line: referrer.range.start.line,
column: referrer.start.character, column: referrer.range.start.character,
}, },
end: DiagnosticSourcePos::LineAndCol { end: DiagnosticSourcePos::LineAndCol {
line: referrer.end.line, line: referrer.range.end.line,
column: referrer.end.character, column: referrer.range.end.character,
}, },
}, },
description: Some("the specifier".into()), description: Some("the specifier".into()),

View file

@ -12,6 +12,7 @@ use std::sync::Arc;
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::deno_json::ConfigFile;
use deno_config::workspace::JsrPackageConfig; use deno_config::workspace::JsrPackageConfig;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
@ -67,8 +68,10 @@ use auth::get_auth_method;
use auth::AuthMethod; use auth::AuthMethod;
pub use pm::add; pub use pm::add;
pub use pm::cache_top_level_deps; pub use pm::cache_top_level_deps;
pub use pm::outdated;
pub use pm::remove; pub use pm::remove;
pub use pm::AddCommandName; pub use pm::AddCommandName;
pub use pm::AddRmPackageReq;
use publish_order::PublishOrderGraph; use publish_order::PublishOrderGraph;
use unfurl::SpecifierUnfurler; use unfurl::SpecifierUnfurler;
@ -89,13 +92,14 @@ pub async fn publish(
let cli_options = cli_factory.cli_options()?; let cli_options = cli_factory.cli_options()?;
let directory_path = cli_options.initial_cwd(); let directory_path = cli_options.initial_cwd();
let publish_configs = cli_options.start_dir.jsr_packages_for_publish(); let mut publish_configs = cli_options.start_dir.jsr_packages_for_publish();
if publish_configs.is_empty() { if publish_configs.is_empty() {
match cli_options.start_dir.maybe_deno_json() { match cli_options.start_dir.maybe_deno_json() {
Some(deno_json) => { Some(deno_json) => {
debug_assert!(!deno_json.is_package()); debug_assert!(!deno_json.is_package());
error_missing_exports_field(deno_json)?;
bail!( bail!(
"Missing 'name', 'version' and 'exports' field in '{}'.", "Missing 'name' or 'exports' field in '{}'.",
deno_json.specifier deno_json.specifier
); );
} }
@ -107,6 +111,18 @@ pub async fn publish(
} }
} }
} }
if let Some(version) = &publish_flags.set_version {
if publish_configs.len() > 1 {
bail!("Cannot use --set-version when publishing a workspace. Change your cwd to an individual package instead.");
}
if let Some(publish_config) = publish_configs.get_mut(0) {
let mut config_file = publish_config.config_file.as_ref().clone();
config_file.json.version = Some(version.clone());
publish_config.config_file = Arc::new(config_file);
}
}
let specifier_unfurler = Arc::new(SpecifierUnfurler::new( let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
if cli_options.unstable_sloppy_imports() { if cli_options.unstable_sloppy_imports() {
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
@ -403,43 +419,15 @@ impl PublishPreparer {
graph: Arc<deno_graph::ModuleGraph>, graph: Arc<deno_graph::ModuleGraph>,
diagnostics_collector: &PublishDiagnosticsCollector, diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<Rc<PreparedPublishPackage>, AnyError> { ) -> Result<Rc<PreparedPublishPackage>, AnyError> {
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
["mod.ts", "mod.js", "index.ts", "index.js"];
let deno_json = &package.config_file; let deno_json = &package.config_file;
let config_path = deno_json.specifier.to_file_path().unwrap(); let config_path = deno_json.specifier.to_file_path().unwrap();
let root_dir = config_path.parent().unwrap().to_path_buf(); let root_dir = config_path.parent().unwrap().to_path_buf();
let Some(version) = deno_json.json.version.clone() else { let version = deno_json.json.version.clone().ok_or_else(|| {
bail!("{} is missing 'version' field", deno_json.specifier); deno_core::anyhow::anyhow!(
}; "{} is missing 'version' field",
if deno_json.json.exports.is_none() { deno_json.specifier
let mut suggested_entrypoint = None; )
})?;
for entrypoint in SUGGESTED_ENTRYPOINTS {
if root_dir.join(entrypoint).exists() {
suggested_entrypoint = Some(entrypoint);
break;
}
}
let exports_content = format!(
r#"{{
"name": "{}",
"version": "{}",
"exports": "{}"
}}"#,
package.name,
version,
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
);
bail!(
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
package.name,
deno_json.specifier,
exports_content
);
}
let Some(name_no_at) = package.name.strip_prefix('@') else { let Some(name_no_at) = package.name.strip_prefix('@') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format"); bail!("Invalid package name, use '@<scope_name>/<package_name> format");
}; };
@ -1106,9 +1094,9 @@ fn collect_excluded_module_diagnostics(
let graph_specifiers = graph let graph_specifiers = graph
.modules() .modules()
.filter_map(|m| match m { .filter_map(|m| match m {
deno_graph::Module::Js(_) | deno_graph::Module::Json(_) => { deno_graph::Module::Js(_)
Some(m.specifier()) | deno_graph::Module::Json(_)
} | deno_graph::Module::Wasm(_) => Some(m.specifier()),
deno_graph::Module::Npm(_) deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_) | deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => None, | deno_graph::Module::External(_) => None,
@ -1271,6 +1259,36 @@ fn has_license_file<'a>(
}) })
} }
fn error_missing_exports_field(deno_json: &ConfigFile) -> Result<(), AnyError> {
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
["mod.ts", "mod.js", "index.ts", "index.js"];
let mut suggested_entrypoint = None;
for entrypoint in SUGGESTED_ENTRYPOINTS {
if deno_json.dir_path().join(entrypoint).exists() {
suggested_entrypoint = Some(entrypoint);
break;
}
}
let exports_content = format!(
r#"{{
"name": "{}",
"version": "{}",
"exports": "{}"
}}"#,
deno_json.json.name.as_deref().unwrap_or("@scope/name"),
deno_json.json.name.as_deref().unwrap_or("0.0.0"),
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
);
bail!(
"You did not specify an entrypoint in {}. Add `exports` mapping in the configuration file, eg:\n{}",
deno_json.specifier,
exports_content
);
}
#[allow(clippy::print_stderr)] #[allow(clippy::print_stderr)]
fn ring_bell() { fn ring_bell() {
// ASCII code for the bell character. // ASCII code for the bell character.

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -11,8 +12,11 @@ use deno_core::futures::StreamExt;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::Version;
use deno_semver::VersionReq; use deno_semver::VersionReq;
use deps::KeyPath;
use jsonc_parser::cst::CstObject; use jsonc_parser::cst::CstObject;
use jsonc_parser::cst::CstObjectProp; use jsonc_parser::cst::CstObjectProp;
use jsonc_parser::cst::CstRootNode; use jsonc_parser::cst::CstRootNode;
@ -29,10 +33,13 @@ use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver; use crate::npm::NpmFetchResolver;
mod cache_deps; mod cache_deps;
pub(crate) mod deps;
mod outdated;
pub use cache_deps::cache_top_level_deps; pub use cache_deps::cache_top_level_deps;
pub use outdated::outdated;
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone, Hash)]
enum ConfigKind { enum ConfigKind {
DenoJson, DenoJson,
PackageJson, PackageJson,
@ -83,6 +90,28 @@ impl ConfigUpdater {
self.cst.to_string() self.cst.to_string()
} }
fn get_property_for_mutation(
&mut self,
key_path: &KeyPath,
) -> Option<CstObjectProp> {
let mut current_node = self.root_object.clone();
self.modified = true;
for (i, part) in key_path.parts.iter().enumerate() {
let s = part.as_str();
if i < key_path.parts.len().saturating_sub(1) {
let object = current_node.object_value(s)?;
current_node = object;
} else {
// last part
return current_node.get(s);
}
}
None
}
fn add(&mut self, selected: SelectedPackage, dev: bool) { fn add(&mut self, selected: SelectedPackage, dev: bool) {
fn insert_index(object: &CstObject, searching_name: &str) -> usize { fn insert_index(object: &CstObject, searching_name: &str) -> usize {
object object
@ -333,6 +362,14 @@ fn load_configs(
Ok((cli_factory, npm_config, deno_config)) Ok((cli_factory, npm_config, deno_config))
} }
fn path_distance(a: &Path, b: &Path) -> usize {
let diff = pathdiff::diff_paths(a, b);
let Some(diff) = diff else {
return usize::MAX;
};
diff.components().count()
}
pub async fn add( pub async fn add(
flags: Arc<Flags>, flags: Arc<Flags>,
add_flags: AddFlags, add_flags: AddFlags,
@ -357,6 +394,21 @@ pub async fn add(
} }
} }
let start_dir = cli_factory.cli_options()?.start_dir.dir_path();
// only prefer to add npm deps to `package.json` if there isn't a closer deno.json.
// example: if deno.json is in the CWD and package.json is in the parent, we should add
// npm deps to deno.json, since it's closer
let prefer_npm_config = match (npm_config.as_ref(), deno_config.as_ref()) {
(Some(npm), Some(deno)) => {
let npm_distance = path_distance(&npm.path, &start_dir);
let deno_distance = path_distance(&deno.path, &start_dir);
npm_distance <= deno_distance
}
(Some(_), None) => true,
(None, _) => false,
};
let http_client = cli_factory.http_client_provider(); let http_client = cli_factory.http_client_provider();
let deps_http_cache = cli_factory.global_http_cache()?; let deps_http_cache = cli_factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new( let mut deps_file_fetcher = FileFetcher::new(
@ -431,15 +483,32 @@ pub async fn add(
match package_and_version { match package_and_version {
PackageAndVersion::NotFound { PackageAndVersion::NotFound {
package: package_name, package: package_name,
found_npm_package, help,
package_req, package_req,
} => { } => match help {
if found_npm_package { Some(NotFoundHelp::NpmPackage) => {
bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))); bail!(
} else { "{} was not found, but a matching npm package exists. Did you mean `{}`?",
bail!("{} was not found.", crate::colors::red(package_name)); crate::colors::red(package_name),
crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))
);
} }
} Some(NotFoundHelp::JsrPackage) => {
bail!(
"{} was not found, but a matching jsr package exists. Did you mean `{}`?",
crate::colors::red(package_name),
crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}"))
)
}
Some(NotFoundHelp::PreReleaseVersion(version)) => {
bail!(
"{} has only pre-release versions available. Try specifying a version: `{}`",
crate::colors::red(&package_name),
crate::colors::yellow(format!("deno {cmd_name} {package_name}@^{version}"))
)
}
None => bail!("{} was not found.", crate::colors::red(package_name)),
},
PackageAndVersion::Selected(selected) => { PackageAndVersion::Selected(selected) => {
selected_packages.push(selected); selected_packages.push(selected);
} }
@ -455,7 +524,7 @@ pub async fn add(
selected_package.selected_version selected_package.selected_version
); );
if selected_package.package_name.starts_with("npm:") { if selected_package.package_name.starts_with("npm:") && prefer_npm_config {
if let Some(npm) = &mut npm_config { if let Some(npm) = &mut npm_config {
npm.add(selected_package, dev); npm.add(selected_package, dev);
} else { } else {
@ -487,76 +556,144 @@ struct SelectedPackage {
selected_version: String, selected_version: String,
} }
enum NotFoundHelp {
NpmPackage,
JsrPackage,
PreReleaseVersion(Version),
}
enum PackageAndVersion { enum PackageAndVersion {
NotFound { NotFound {
package: String, package: String,
found_npm_package: bool,
package_req: PackageReq, package_req: PackageReq,
help: Option<NotFoundHelp>,
}, },
Selected(SelectedPackage), Selected(SelectedPackage),
} }
fn best_version<'a>(
versions: impl Iterator<Item = &'a Version>,
) -> Option<&'a Version> {
let mut maybe_best_version: Option<&Version> = None;
for version in versions {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| (*best_version).cmp(version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(version);
}
}
maybe_best_version
}
trait PackageInfoProvider {
const SPECIFIER_PREFIX: &str;
/// The help to return if a package is found by this provider
const HELP: NotFoundHelp;
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv>;
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version>;
}
impl PackageInfoProvider for Arc<JsrFetchResolver> {
const HELP: NotFoundHelp = NotFoundHelp::JsrPackage;
const SPECIFIER_PREFIX: &str = "jsr";
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
(**self).req_to_nv(req).await
}
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
let info = self.package_info(&req.name).await?;
best_version(
info
.versions
.iter()
.filter(|(_, version_info)| !version_info.yanked)
.map(|(version, _)| version),
)
.cloned()
}
}
impl PackageInfoProvider for Arc<NpmFetchResolver> {
const HELP: NotFoundHelp = NotFoundHelp::NpmPackage;
const SPECIFIER_PREFIX: &str = "npm";
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
(**self).req_to_nv(req).await
}
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
let info = self.package_info(&req.name).await?;
best_version(info.versions.keys()).cloned()
}
}
async fn find_package_and_select_version_for_req( async fn find_package_and_select_version_for_req(
jsr_resolver: Arc<JsrFetchResolver>, jsr_resolver: Arc<JsrFetchResolver>,
npm_resolver: Arc<NpmFetchResolver>, npm_resolver: Arc<NpmFetchResolver>,
add_package_req: AddRmPackageReq, add_package_req: AddRmPackageReq,
) -> Result<PackageAndVersion, AnyError> { ) -> Result<PackageAndVersion, AnyError> {
match add_package_req.value { async fn select<T: PackageInfoProvider, S: PackageInfoProvider>(
AddRmPackageReqValue::Jsr(req) => { main_resolver: T,
let jsr_prefixed_name = format!("jsr:{}", &req.name); fallback_resolver: S,
let Some(nv) = jsr_resolver.req_to_nv(&req).await else { add_package_req: AddRmPackageReq,
if npm_resolver.req_to_nv(&req).await.is_some() { ) -> Result<PackageAndVersion, AnyError> {
let req = match &add_package_req.value {
AddRmPackageReqValue::Jsr(req) => req,
AddRmPackageReqValue::Npm(req) => req,
};
let prefixed_name = format!("{}:{}", T::SPECIFIER_PREFIX, req.name);
let help_if_found_in_fallback = S::HELP;
let Some(nv) = main_resolver.req_to_nv(req).await else {
if fallback_resolver.req_to_nv(req).await.is_some() {
// it's in the other registry
return Ok(PackageAndVersion::NotFound {
package: prefixed_name,
help: Some(help_if_found_in_fallback),
package_req: req.clone(),
});
}
if req.version_req.version_text() == "*" {
if let Some(pre_release_version) =
main_resolver.latest_version(req).await
{
return Ok(PackageAndVersion::NotFound { return Ok(PackageAndVersion::NotFound {
package: jsr_prefixed_name, package: prefixed_name,
found_npm_package: true, package_req: req.clone(),
package_req: req, help: Some(NotFoundHelp::PreReleaseVersion(
pre_release_version.clone(),
)),
}); });
} }
}
return Ok(PackageAndVersion::NotFound { return Ok(PackageAndVersion::NotFound {
package: jsr_prefixed_name, package: prefixed_name,
found_npm_package: false, help: None,
package_req: req, package_req: req.clone(),
}); });
}; };
let range_symbol = if req.version_req.version_text().starts_with('~') { let range_symbol = if req.version_req.version_text().starts_with('~') {
"~" "~"
} else if req.version_req.version_text() == nv.version.to_string() { } else if req.version_req.version_text() == nv.version.to_string() {
"" ""
} else { } else {
"^" "^"
}; };
Ok(PackageAndVersion::Selected(SelectedPackage { Ok(PackageAndVersion::Selected(SelectedPackage {
import_name: add_package_req.alias, import_name: add_package_req.alias,
package_name: jsr_prefixed_name, package_name: prefixed_name,
version_req: format!("{}{}", range_symbol, &nv.version), version_req: format!("{}{}", range_symbol, &nv.version),
selected_version: nv.version.to_string(), selected_version: nv.version.to_string(),
})) }))
}
match &add_package_req.value {
AddRmPackageReqValue::Jsr(_) => {
select(jsr_resolver, npm_resolver, add_package_req).await
} }
AddRmPackageReqValue::Npm(req) => { AddRmPackageReqValue::Npm(_) => {
let npm_prefixed_name = format!("npm:{}", &req.name); select(npm_resolver, jsr_resolver, add_package_req).await
let Some(nv) = npm_resolver.req_to_nv(&req).await else {
return Ok(PackageAndVersion::NotFound {
package: npm_prefixed_name,
found_npm_package: false,
package_req: req,
});
};
let range_symbol = if req.version_req.version_text().starts_with('~') {
"~"
} else if req.version_req.version_text() == nv.version.to_string() {
""
} else {
"^"
};
Ok(PackageAndVersion::Selected(SelectedPackage {
import_name: add_package_req.alias,
package_name: npm_prefixed_name,
version_req: format!("{}{}", range_symbol, &nv.version),
selected_version: nv.version.to_string(),
}))
} }
} }
} }
@ -568,7 +705,7 @@ enum AddRmPackageReqValue {
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
struct AddRmPackageReq { pub struct AddRmPackageReq {
alias: String, alias: String,
value: AddRmPackageReqValue, value: AddRmPackageReqValue,
} }
@ -713,7 +850,7 @@ async fn npm_install_after_modification(
flags: Arc<Flags>, flags: Arc<Flags>,
// explicitly provided to prevent redownloading // explicitly provided to prevent redownloading
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>, jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
) -> Result<(), AnyError> { ) -> Result<CliFactory, AnyError> {
// clear the previously cached package.json from memory before reloading it // clear the previously cached package.json from memory before reloading it
node_resolver::PackageJsonThreadLocalCache::clear(); node_resolver::PackageJsonThreadLocalCache::clear();
@ -731,7 +868,7 @@ async fn npm_install_after_modification(
lockfile.write_if_changed()?; lockfile.write_if_changed()?;
} }
Ok(()) Ok(cli_factory)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -8,7 +8,7 @@ use crate::graph_container::ModuleGraphUpdatePermit;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_semver::package::PackageReq; use deno_semver::jsr::JsrPackageReqReference;
pub async fn cache_top_level_deps( pub async fn cache_top_level_deps(
// todo(dsherret): don't pass the factory into this function. Instead use ctor deps // todo(dsherret): don't pass the factory into this function. Instead use ctor deps
@ -44,7 +44,11 @@ pub async fn cache_top_level_deps(
let mut seen_reqs = std::collections::HashSet::new(); let mut seen_reqs = std::collections::HashSet::new();
for entry in import_map.imports().entries() { for entry in import_map.imports().entries().chain(
import_map
.scopes()
.flat_map(|scope| scope.imports.entries()),
) {
let Some(specifier) = entry.value else { let Some(specifier) = entry.value else {
continue; continue;
}; };
@ -52,15 +56,20 @@ pub async fn cache_top_level_deps(
match specifier.scheme() { match specifier.scheme() {
"jsr" => { "jsr" => {
let specifier_str = specifier.as_str(); let specifier_str = specifier.as_str();
let specifier_str = if let Ok(req) = JsrPackageReqReference::from_str(specifier_str) {
specifier_str.strip_prefix("jsr:").unwrap_or(specifier_str); if let Some(sub_path) = req.sub_path() {
if let Ok(req) = PackageReq::from_str(specifier_str) { if sub_path.ends_with('/') {
if !seen_reqs.insert(req.clone()) { continue;
}
roots.push(specifier.clone());
continue;
}
if !seen_reqs.insert(req.req().clone()) {
continue; continue;
} }
let jsr_resolver = jsr_resolver.clone(); let jsr_resolver = jsr_resolver.clone();
info_futures.push(async move { info_futures.push(async move {
if let Some(nv) = jsr_resolver.req_to_nv(&req).await { if let Some(nv) = jsr_resolver.req_to_nv(req.req()).await {
if let Some(info) = jsr_resolver.package_version_info(&nv).await if let Some(info) = jsr_resolver.package_version_info(&nv).await
{ {
return Some((specifier.clone(), info)); return Some((specifier.clone(), info));

Some files were not shown because too many files have changed in this diff Show more