0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-08 07:16:56 -05:00

Merge branch 'main' into feat_support_ctrl_close_on_windows

This commit is contained in:
David Sherret 2025-01-30 12:36:37 -05:00
commit ad281e35b1
1610 changed files with 59726 additions and 26529 deletions

View file

@ -13,7 +13,7 @@
}, },
"exec": { "exec": {
"commands": [{ "commands": [{
"command": "rustfmt --config imports_granularity=item", "command": "rustfmt --config imports_granularity=item --config group_imports=StdExternalCrate",
"exts": ["rs"] "exts": ["rs"]
}] }]
}, },

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// This file contains the implementation of a Github Action. Github uses // This file contains the implementation of a Github Action. Github uses
// Node.js v20.x to run actions, so this is Node code and not Deno code. // Node.js v20.x to run actions, so this is Node code and not Deno code.

View file

@ -1,11 +1,11 @@
#!/usr/bin/env -S deno run --allow-write=. --lock=./tools/deno.lock.json #!/usr/bin/env -S deno run --allow-write=. --lock=./tools/deno.lock.json
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { stringify } from "jsr:@std/yaml@^0.221/stringify"; import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 32; const cacheVersion = 37;
const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
@ -14,7 +14,7 @@ const windowsX86Runner = "windows-2022";
const windowsX86XlRunner = "windows-2022-xl"; const windowsX86XlRunner = "windows-2022-xl";
const macosX86Runner = "macos-13"; const macosX86Runner = "macos-13";
const macosArmRunner = "macos-14"; const macosArmRunner = "macos-14";
const selfHostedMacosArmRunner = "self-hosted"; const selfHostedMacosArmRunner = "ghcr.io/cirruslabs/macos-runner:sonoma";
const Runners = { const Runners = {
linuxX86: { linuxX86: {
@ -41,8 +41,14 @@ const Runners = {
macosArm: { macosArm: {
os: "macos", os: "macos",
arch: "aarch64", arch: "aarch64",
runner: macosArmRunner,
},
macosArmSelfHosted: {
os: "macos",
arch: "aarch64",
// Actually use self-hosted runner only in denoland/deno on `main` branch and for tags (release) builds.
runner: runner:
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`, `\${{ github.repository == 'denoland/deno' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
}, },
windowsX86: { windowsX86: {
os: "windows", os: "windows",
@ -124,9 +130,7 @@ cat /sysroot/.env
# to build because the object formats are not compatible. # to build because the object formats are not compatible.
echo " echo "
CARGO_PROFILE_BENCH_INCREMENTAL=false CARGO_PROFILE_BENCH_INCREMENTAL=false
CARGO_PROFILE_BENCH_LTO=false
CARGO_PROFILE_RELEASE_INCREMENTAL=false CARGO_PROFILE_RELEASE_INCREMENTAL=false
CARGO_PROFILE_RELEASE_LTO=false
RUSTFLAGS<<__1 RUSTFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-${llvmVersion} -C linker=clang-${llvmVersion}
@ -150,7 +154,7 @@ RUSTDOCFLAGS<<__1
$RUSTFLAGS $RUSTFLAGS
__1 __1
CC=/usr/bin/clang-${llvmVersion} CC=/usr/bin/clang-${llvmVersion}
CFLAGS=-flto=thin $CFLAGS CFLAGS=$CFLAGS
" > $GITHUB_ENV`, " > $GITHUB_ENV`,
}; };
@ -360,7 +364,7 @@ const ci = {
needs: ["pre_build"], needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}", if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": "${{ matrix.runner }}", "runs-on": "${{ matrix.runner }}",
"timeout-minutes": 180, "timeout-minutes": 240,
defaults: { defaults: {
run: { run: {
// GH actions does not fail fast by default on // GH actions does not fail fast by default on
@ -384,7 +388,7 @@ const ci = {
job: "test", job: "test",
profile: "debug", profile: "debug",
}, { }, {
...Runners.macosArm, ...Runners.macosArmSelfHosted,
job: "test", job: "test",
profile: "release", profile: "release",
skip_pr: true, skip_pr: true,
@ -486,7 +490,7 @@ const ci = {
}, },
{ {
name: "Cache Cargo home", name: "Cache Cargo home",
uses: "actions/cache@v4", uses: "cirruslabs/cache@v4",
with: { with: {
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci // See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir // Note that with the new sparse registry format, we no longer have to cache a `.git` dir
@ -648,6 +652,14 @@ const ci = {
"cache-path": "./target", "cache-path": "./target",
}, },
}, },
{
name: "Set up playwright cache",
uses: "actions/cache@v4",
with: {
path: "./.ms-playwright",
key: "playwright-${{ runner.os }}-${{ runner.arch }}",
},
},
{ {
name: "test_format.js", name: "test_format.js",
if: "matrix.job == 'lint' && matrix.os == 'linux'", if: "matrix.job == 'lint' && matrix.os == 'linux'",
@ -1093,6 +1105,26 @@ const ci = {
}, },
]), ]),
}, },
wasm: {
name: "build wasm32",
needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": ubuntuX86Runner,
"timeout-minutes": 30,
steps: skipJobsIfPrAndMarkedSkip([
...cloneRepoStep,
installRustStep,
{
name: "Install wasm target",
run: "rustup target add wasm32-unknown-unknown",
},
{
name: "Cargo build",
// we want this crate to be wasm compatible
run: "cargo build --target wasm32-unknown-unknown -p deno_resolver",
},
]),
},
"publish-canary": { "publish-canary": {
name: "publish canary", name: "publish canary",
"runs-on": ubuntuX86Runner, "runs-on": ubuntuX86Runner,

View file

@ -48,7 +48,7 @@ jobs:
- pre_build - pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}' if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
runs-on: '${{ matrix.runner }}' runs-on: '${{ matrix.runner }}'
timeout-minutes: 180 timeout-minutes: 240
defaults: defaults:
run: run:
shell: bash shell: bash
@ -68,12 +68,12 @@ jobs:
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}' runner: macos-14
job: test job: test
profile: debug profile: debug
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')) && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
@ -175,7 +175,7 @@ jobs:
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \ tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
-czvf target/release/deno_src.tar.gz -C .. deno -czvf target/release/deno_src.tar.gz -C .. deno
- name: Cache Cargo home - name: Cache Cargo home
uses: actions/cache@v4 uses: cirruslabs/cache@v4
with: with:
path: |- path: |-
~/.cargo/.crates.toml ~/.cargo/.crates.toml
@ -184,8 +184,8 @@ jobs:
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
~/.cargo/git/db ~/.cargo/git/db
key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' restore-keys: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)' if: '!(matrix.skip)'
@ -307,9 +307,7 @@ jobs:
# to build because the object formats are not compatible. # to build because the object formats are not compatible.
echo " echo "
CARGO_PROFILE_BENCH_INCREMENTAL=false CARGO_PROFILE_BENCH_INCREMENTAL=false
CARGO_PROFILE_BENCH_LTO=false
CARGO_PROFILE_RELEASE_INCREMENTAL=false CARGO_PROFILE_RELEASE_INCREMENTAL=false
CARGO_PROFILE_RELEASE_LTO=false
RUSTFLAGS<<__1 RUSTFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-19 -C linker=clang-19
@ -333,7 +331,7 @@ jobs:
$RUSTFLAGS $RUSTFLAGS
__1 __1
CC=/usr/bin/clang-19 CC=/usr/bin/clang-19
CFLAGS=-flto=thin $CFLAGS CFLAGS=$CFLAGS
" > $GITHUB_ENV " > $GITHUB_ENV
- name: Remove macOS cURL --ipv4 flag - name: Remove macOS cURL --ipv4 flag
run: |- run: |-
@ -379,12 +377,18 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
with: with:
cache-path: ./target cache-path: ./target
- name: Set up playwright cache
uses: actions/cache@v4
with:
path: ./.ms-playwright
key: 'playwright-${{ runner.os }}-${{ runner.arch }}'
if: '!(matrix.skip)'
- name: test_format.js - name: test_format.js
if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')' if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')'
run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check
@ -689,7 +693,34 @@ jobs:
!./target/*/gn_root !./target/*/gn_root
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
wasm:
name: build wasm32
needs:
- pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
- name: Configure git
run: |-
git config --global core.symlinks true
git config --global fetch.parallel 32
if: '!(matrix.skip)'
- name: Clone repository
uses: actions/checkout@v4
with:
fetch-depth: 5
submodules: false
if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)'
- name: Install wasm target
run: rustup target add wasm32-unknown-unknown
if: '!(matrix.skip)'
- name: Cargo build
run: cargo build --target wasm32-unknown-unknown -p deno_resolver
if: '!(matrix.skip)'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04

5
.gitignore vendored
View file

@ -35,4 +35,7 @@ junit.xml
# Jupyter files # Jupyter files
.ipynb_checkpoints/ .ipynb_checkpoints/
Untitled*.ipynb Untitled*.ipynb
# playwright browser binary cache
/.ms-playwright

920
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,13 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. # Copyright 2018-2025 the Deno authors. MIT license.
[workspace] [workspace]
resolver = "2" resolver = "2"
members = [ members = [
"bench_util", "bench_util",
"cli", "cli",
"cli/lib",
"cli/rt",
"cli/snapshot",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
@ -48,58 +51,64 @@ repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.44.0", features = ["transpiling"] } deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.327.0" } deno_core = { version = "0.335.0" }
deno_bench_util = { version = "0.178.0", path = "./bench_util" } deno_bench_util = { version = "0.181.0", path = "./bench_util" }
deno_config = { version = "=0.40.0", features = ["workspace", "sync"] } deno_config = { version = "=0.47.1", features = ["workspace"] }
deno_lockfile = "=0.23.2" deno_lockfile = "=0.24.0"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] } deno_media_type = { version = "=0.2.5", features = ["module_specifier"] }
deno_npm = "=0.26.0" deno_npm = "=0.27.2"
deno_path_util = "=0.2.2" deno_path_util = "=0.3.1"
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" } deno_permissions = { version = "0.46.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.192.0", path = "./runtime" } deno_runtime = { version = "0.195.0", path = "./runtime" }
deno_semver = "=0.6.1" deno_semver = "=0.7.1"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.114.0", path = "./ext/napi/sym" } napi_sym = { version = "0.117.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.4" denokv_proto = "0.9.0"
denokv_remote = "0.8.4" denokv_remote = "0.9.0"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features # denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.4" } denokv_sqlite = { default-features = false, version = "0.9.0" }
# exts # exts
deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.181.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.116.0", path = "./ext/cache" } deno_cache = { version = "0.119.0", path = "./ext/cache" }
deno_canvas = { version = "0.53.0", path = "./ext/canvas" } deno_canvas = { version = "0.56.0", path = "./ext/canvas" }
deno_console = { version = "0.184.0", path = "./ext/console" } deno_console = { version = "0.187.0", path = "./ext/console" }
deno_cron = { version = "0.64.0", path = "./ext/cron" } deno_cron = { version = "0.67.0", path = "./ext/cron" }
deno_crypto = { version = "0.198.0", path = "./ext/crypto" } deno_crypto = { version = "0.201.0", path = "./ext/crypto" }
deno_fetch = { version = "0.208.0", path = "./ext/fetch" } deno_fetch = { version = "0.211.0", path = "./ext/fetch" }
deno_ffi = { version = "0.171.0", path = "./ext/ffi" } deno_ffi = { version = "0.174.0", path = "./ext/ffi" }
deno_fs = { version = "0.94.0", path = "./ext/fs" } deno_fs = { version = "0.97.0", path = "./ext/fs" }
deno_http = { version = "0.182.0", path = "./ext/http" } deno_http = { version = "0.185.0", path = "./ext/http" }
deno_io = { version = "0.94.0", path = "./ext/io" } deno_io = { version = "0.97.0", path = "./ext/io" }
deno_kv = { version = "0.92.0", path = "./ext/kv" } deno_kv = { version = "0.95.0", path = "./ext/kv" }
deno_napi = { version = "0.115.0", path = "./ext/napi" } deno_napi = { version = "0.118.0", path = "./ext/napi" }
deno_net = { version = "0.176.0", path = "./ext/net" } deno_net = { version = "0.179.0", path = "./ext/net" }
deno_node = { version = "0.122.0", path = "./ext/node" } deno_node = { version = "0.125.0", path = "./ext/node" }
deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" } deno_os = { version = "0.4.0", path = "./ext/os" }
deno_tls = { version = "0.171.0", path = "./ext/tls" } deno_process = { version = "0.2.0", path = "./ext/process" }
deno_url = { version = "0.184.0", path = "./ext/url" } deno_telemetry = { version = "0.9.0", path = "./ext/telemetry" }
deno_web = { version = "0.215.0", path = "./ext/web" } deno_tls = { version = "0.174.0", path = "./ext/tls" }
deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" } deno_url = { version = "0.187.0", path = "./ext/url" }
deno_webidl = { version = "0.184.0", path = "./ext/webidl" } deno_web = { version = "0.218.0", path = "./ext/web" }
deno_websocket = { version = "0.189.0", path = "./ext/websocket" } deno_webgpu = { version = "0.154.0", path = "./ext/webgpu" }
deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" } deno_webidl = { version = "0.187.0", path = "./ext/webidl" }
deno_websocket = { version = "0.192.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.182.0", path = "./ext/webstorage" }
# resolvers # workspace libraries
deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" } deno_lib = { version = "0.3.0", path = "./cli/lib" }
deno_resolver = { version = "0.15.0", path = "./resolvers/deno" } deno_npm_cache = { version = "0.6.0", path = "./resolvers/npm_cache" }
node_resolver = { version = "0.22.0", path = "./resolvers/node" } deno_resolver = { version = "0.18.0", path = "./resolvers/deno" }
deno_snapshots = { version = "0.2.0", path = "./cli/snapshot" }
node_resolver = { version = "0.25.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
async-once-cell = "0.5.4"
async-stream = "0.3"
async-trait = "0.1.73" async-trait = "0.1.73"
base32 = "=0.5.1" base32 = "=0.5.1"
base64 = "0.21.7" base64 = "0.21.7"
@ -108,19 +117,20 @@ boxed_error = "0.2.3"
brotli = "6.0.0" brotli = "6.0.0"
bytes = "1.4.0" bytes = "1.4.0"
cache_control = "=0.2.0" cache_control = "=0.2.0"
capacity_builder = "0.1.3" capacity_builder = "0.5.0"
cbc = { version = "=0.1.2", features = ["alloc"] } cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS. # Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now() # Instead use util::time::utc_now()
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] } chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
color-print = "0.3.5" color-print = "0.3.5"
console_static_text = "=0.8.1" console_static_text = "=0.8.1"
ctr = { version = "0.9.2", features = ["alloc"] }
dashmap = "5.5.3" dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.1" data-url = "=0.3.1"
deno_cache_dir = "=0.15.0" deno_cache_dir = "=0.17.0"
deno_error = "=0.5.2" deno_error = "=0.5.5"
deno_package_json = { version = "0.2.1", default-features = false } deno_package_json = { version = "=0.4.2", default-features = false }
deno_unsync = "0.4.2" deno_unsync = "0.4.2"
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
@ -134,14 +144,14 @@ flate2 = { version = "1.0.30", default-features = false }
fs3 = "0.5.0" fs3 = "0.5.0"
futures = "0.3.21" futures = "0.3.21"
glob = "0.3.1" glob = "0.3.1"
h2 = "0.4.4" h2 = "0.4.6"
hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] } hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] }
http = "1.0" http = "1.0"
http-body = "1.0" http-body = "1.0"
http-body-util = "0.1.2" http-body-util = "0.1.2"
http_v02 = { package = "http", version = "0.2.9" } http_v02 = { package = "http", version = "0.2.9" }
httparse = "1.8.0" httparse = "1.8.0"
hyper = { version = "1.4.1", features = ["full"] } hyper = { version = "1.6.0", features = ["full"] }
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] } hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
@ -150,6 +160,7 @@ ipnet = "2.3"
jsonc-parser = { version = "=0.26.2", features = ["serde"] } jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.168" libc = "0.2.168"
libsui = "0.5.0"
libz-sys = { version = "1.1.20", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
log = { version = "0.4.20", features = ["kv"] } log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
@ -193,6 +204,7 @@ slab = "0.4"
smallvec = "1.8" smallvec = "1.8"
socket2 = { version = "0.5.3", features = ["all"] } socket2 = { version = "0.5.3", features = ["all"] }
spki = "0.7.2" spki = "0.7.2"
sys_traits = "=0.1.8"
tar = "=0.4.40" tar = "=0.4.40"
tempfile = "3.4.0" tempfile = "3.4.0"
termcolor = "1.1.3" termcolor = "1.1.3"
@ -209,9 +221,10 @@ tower-service = "0.3.2"
twox-hash = "=1.6.3" twox-hash = "=1.6.3"
url = { version = "2.5", features = ["serde", "expose_internals"] } url = { version = "2.5", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] } uuid = { version = "1.3.0", features = ["v4"] }
walkdir = "=2.3.2"
webpki-root-certs = "0.26.5" webpki-root-certs = "0.26.5"
webpki-roots = "0.26" webpki-roots = "0.26"
which = "4.2.5" which = "6"
yoke = { version = "0.7.4", features = ["derive"] } yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] } zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4" zstd = "=0.12.4"
@ -239,7 +252,7 @@ syn = { version = "2", features = ["full", "extra-traits"] }
nix = "=0.27.1" nix = "=0.27.1"
# windows deps # windows deps
junction = "=0.2.0" junction = "=1.2.0"
winapi = "=0.3.9" winapi = "=0.3.9"
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] } windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] }
winres = "=0.1.12" winres = "=0.1.12"

View file

@ -1,6 +1,6 @@
MIT License MIT License
Copyright 2018-2024 the Deno authors Copyright 2018-2025 the Deno authors
Permission is hereby granted, free of charge, to any person obtaining a copy of Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in this software and associated documentation files (the "Software"), to deal in

View file

@ -6,8 +6,8 @@
<img align="right" src="https://deno.land/logo.svg" height="150px" alt="the deno mascot dinosaur standing in the rain"> <img align="right" src="https://deno.land/logo.svg" height="150px" alt="the deno mascot dinosaur standing in the rain">
[Deno](https://www.deno.com) [Deno](https://deno.com)
([/ˈdiːnoʊ/](http://ipa-reader.xyz/?text=%CB%88di%CB%90no%CA%8A), pronounced ([/ˈdiːnoʊ/](https://ipa-reader.com/?text=%CB%88di%CB%90no%CA%8A), pronounced
`dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure `dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure
defaults and a great developer experience. It's built on [V8](https://v8.dev/), defaults and a great developer experience. It's built on [V8](https://v8.dev/),
[Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/). [Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/).

View file

@ -6,6 +6,124 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.1.7 / 2025.01.21
- fix(deps): update yanked crates (#27512)
- fix(ext/node): GCM auth tag check on DechiperIv#final (#27733)
- fix(ext/node): add FileHandle#sync (#27677)
- fix(ext/node): propagate socket error to client request object (#27678)
- fix(ext/node): tls.connect regression (#27707)
- fix(ext/os): pass SignalState to web worker (#27741)
- fix(install/global): remove importMap field from specified config file
(#27744)
- fix: use 'getrandom' feature for 'sys_traits' crate
- perf(compile): remove swc from denort (#27721)
### 2.1.6 / 2025.01.16
- fix(check/lsp): correctly resolve compilerOptions.types (#27686)
- fix(check/lsp): fix bugs with tsc type resolution, allow npm packages to
augment `ImportMeta` (#27690)
- fix(compile): store embedded fs case sensitivity (#27653)
- fix(compile/windows): better handling of deno_dir on different drive letter
than code (#27654)
- fix(ext/console): change Temporal color (#27684)
- fix(ext/node): add `writev` method to `FileHandle` (#27563)
- fix(ext/node): add chown method to FileHandle class (#27638)
- fix(ext/node): apply `@npmcli/agent` workaround to `npm-check-updates`
(#27639)
- fix(ext/node): fix playwright http client (#27662)
- fix(ext/node): show bare-node-builtin hint when using an import map (#27632)
- fix(ext/node): use primordials in `ext/node/polyfills/_fs_common.ts` (#27589)
- fix(lsp): handle pathless untitled URIs (#27637)
- fix(lsp/check): don't resolve unknown media types to a `.js` extension
(#27631)
- fix(node): Prevent node:child_process from always inheriting the parent
environment (#27343) (#27340)
- fix(node/fs): add utimes method to the FileHandle class (#27582)
- fix(outdated): Use `latest` tag even when it's the same as the current version
(#27699)
- fix(outdated): retain strict semver specifier when updating (#27701)
### 2.1.5 / 2025.01.09
- feat(unstable): implement QUIC (#21942)
- feat(unstable): add JS linting plugin infrastructure (#27416)
- feat(unstable): add OTEL MeterProvider (#27240)
- feat(unstable): no config npm:@opentelemetry/api integration (#27541)
- feat(unstable): replace SpanExporter with TracerProvider (#27473)
- feat(unstable): support selectors in JS lint plugins (#27452)
- fix(check): line-break between diagnostic message chain entries (#27543)
- fix(check): move module not found errors to typescript diagnostics (#27533)
- fix(compile): analyze modules in directory specified in --include (#27296)
- fix(compile): be more deterministic when compiling the same code in different
directories (#27395)
- fix(compile): display embedded file sizes and total (#27360)
- fix(compile): output contents of embedded file system (#27302)
- fix(ext/fetch): better error message when body resource is unavailable
(#27429)
- fix(ext/fetch): retry some http/2 errors (#27417)
- fix(ext/fs): do not throw for bigint ctime/mtime/atime (#27453)
- fix(ext/http): improve error message when underlying resource of request body
unavailable (#27463)
- fix(ext/net): update moka cache to avoid potential panic in `Deno.resolveDns`
on some laptops with Ryzen CPU (#27572)
- fix(ext/node): fix `fs.access`/`fs.promises.access` with `X_OK` mode parameter
on Windows (#27407)
- fix(ext/node): fix `os.cpus()` on Linux (#27592)
- fix(ext/node): RangeError timingSafeEqual with different byteLength (#27470)
- fix(ext/node): add `truncate` method to the `FileHandle` class (#27389)
- fix(ext/node): add support of any length IV for aes-(128|256)-gcm ciphers
(#27476)
- fix(ext/node): convert brotli chunks with proper byte offset (#27455)
- fix(ext/node): do not exit worker thread when there is pending async op
(#27378)
- fix(ext/node): have `process` global available in Node context (#27562)
- fix(ext/node): make getCiphers return supported ciphers (#27466)
- fix(ext/node): sort list of built-in modules alphabetically (#27410)
- fix(ext/node): support createConnection option in node:http.request() (#25470)
- fix(ext/node): support private key export in JWK format (#27325)
- fix(ext/web): add `[[ErrorData]]` slot to `DOMException` (#27342)
- fix(ext/websocket): Fix close code without reason (#27578)
- fix(jsr): Wasm imports fail to load (#27594)
- fix(kv): improve backoff error message and inline documentation (#27537)
- fix(lint): fix single char selectors being ignored (#27576)
- fix(lockfile): include dependencies listed in external import map in lockfile
(#27337)
- fix(lsp): css preprocessor formatting (#27526)
- fix(lsp): don't skip dirs with enabled subdirs (#27580)
- fix(lsp): include "node:" prefix for node builtin auto-imports (#27404)
- fix(lsp): respect "typescript.suggestionActions.enabled" setting (#27373)
- fix(lsp): rewrite imports for 'Move to a new file' action (#27427)
- fix(lsp): sql and component file formatting (#27350)
- fix(lsp): use verbatim specifier for URL auto-imports (#27605)
- fix(no-slow-types): handle rest param with internal assignments (#27581)
- fix(node/fs): add a chmod method to the FileHandle class (#27522)
- fix(node): add missing `inspector/promises` (#27491)
- fix(node): handle cjs exports with escaped chars (#27438)
- fix(npm): deterministically output tags to initialized file (#27514)
- fix(npm): search node_modules folder for package matching npm specifier
(#27345)
- fix(outdated): ensure "Latest" version is greater than "Update" version
(#27390)
- fix(outdated): support updating dependencies in external import maps (#27339)
- fix(permissions): implicit `--allow-import` when using `--cached-only`
(#27530)
- fix(publish): infer literal types in const contexts (#27425)
- fix(task): properly handle task name wildcards with --recursive (#27396)
- fix(task): support tasks without commands (#27191)
- fix(unstable): don't error on non-existing attrs or type attr (#27456)
- fix: FastString v8_string() should error when cannot allocated (#27375)
- fix: deno_resolver crate without 'sync' feature (#27403)
- fix: incorrect memory info free/available bytes on mac (#27460)
- fix: upgrade deno_doc to 0.161.3 (#27377)
- perf(fs/windows): stat - only open file once (#27487)
- perf(node/fs/copy): reduce metadata lookups copying directory (#27495)
- perf: don't store duplicate info for ops in the snapshot (#27430)
- perf: remove now needless canonicalization getting closest package.json
(#27437)
- perf: upgrade to deno_semver 0.7 (#27426)
### 2.1.4 / 2024.12.11 ### 2.1.4 / 2024.12.11
- feat(unstable): support caching npm dependencies only as they're needed - feat(unstable): support caching npm dependencies only as they're needed

View file

@ -1,8 +1,8 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. # Copyright 2018-2025 the Deno authors. MIT license.
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.178.0" version = "0.181.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -7,7 +7,6 @@ use deno_bench_util::bench_js_sync;
use deno_bench_util::bench_or_profile; use deno_bench_util::bench_or_profile;
use deno_bench_util::bencher::benchmark_group; use deno_bench_util::bencher::benchmark_group;
use deno_bench_util::bencher::Bencher; use deno_bench_util::bencher::Bencher;
use deno_core::Extension; use deno_core::Extension;
#[op2] #[op2]

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_bench_util::bench_js_sync_with; use deno_bench_util::bench_js_sync_with;
use deno_bench_util::bench_or_profile; use deno_bench_util::bench_or_profile;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use bencher::Bencher; use bencher::Bencher;
use deno_core::v8; use deno_core::v8;
use deno_core::Extension; use deno_core::Extension;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
mod js_runtime; mod js_runtime;
mod profiling; mod profiling;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use bencher::DynBenchFn; use bencher::DynBenchFn;
use bencher::StaticBenchFn; use bencher::StaticBenchFn;
use bencher::TestDescAndFn; use bencher::TestDescAndFn;

View file

@ -1,8 +1,8 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. # Copyright 2018-2025 the Deno authors. MIT license.
[package] [package]
name = "deno" name = "deno"
version = "2.1.4" version = "2.1.7"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -16,11 +16,6 @@ name = "deno"
path = "main.rs" path = "main.rs"
doc = false doc = false
[[bin]]
name = "denort"
path = "mainrt.rs"
doc = false
[[test]] [[test]]
name = "integration" name = "integration"
path = "integration_tests_runner.rs" path = "integration_tests_runner.rs"
@ -49,7 +44,7 @@ dhat-heap = ["dhat"]
upgrade = [] upgrade = []
# A dev feature to disable creations and loading of snapshots in favor of # A dev feature to disable creations and loading of snapshots in favor of
# loading JS sources at runtime. # loading JS sources at runtime.
hmr = ["deno_runtime/hmr"] hmr = ["deno_runtime/hmr", "deno_snapshots/disable"]
# Vendor zlib as zlib-ng # Vendor zlib as zlib-ng
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"] __vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
@ -60,8 +55,11 @@ lazy-regex.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
zstd.workspace = true zstd.workspace = true
glibc_version = "0.1.2"
flate2 = { workspace = true, features = ["default"] } flate2 = { workspace = true, features = ["default"] }
deno_error.workspace = true
[target.'cfg(unix)'.build-dependencies]
glibc_version = "0.1.2"
[target.'cfg(windows)'.build-dependencies] [target.'cfg(windows)'.build-dependencies]
winapi.workspace = true winapi.workspace = true
@ -69,25 +67,28 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir.workspace = true deno_cache_dir = { workspace = true, features = ["sync"] }
deno_config.workspace = true deno_config = { workspace = true, features = ["sync", "workspace"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] } deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
deno_error.workspace = true deno_error.workspace = true
deno_graph = { version = "=0.86.4" } deno_graph = { version = "=0.87.2" }
deno_lint = { version = "=0.68.2", features = ["docs"] } deno_lib.workspace = true
deno_lint = { version = "0.70.0" }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] }
deno_npm.workspace = true deno_npm.workspace = true
deno_npm_cache.workspace = true deno_npm_cache.workspace = true
deno_package_json.workspace = true deno_package_json = { workspace = true, features = ["sync"] }
deno_path_util.workspace = true deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] } deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true deno_semver.workspace = true
deno_snapshots = { workspace = true }
deno_task_shell = "=0.20.2" deno_task_shell = "=0.20.2"
deno_telemetry.workspace = true deno_telemetry.workspace = true
deno_terminal.workspace = true deno_terminal.workspace = true
libsui = "0.5.0" libsui.workspace = true
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14" anstream = "0.6.14"
@ -113,7 +114,6 @@ dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.3" dprint-plugin-typescript = "=0.93.3"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`. # If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
@ -124,7 +124,7 @@ http.workspace = true
http-body.workspace = true http-body.workspace = true
http-body-util.workspace = true http-body-util.workspace = true
hyper-util.workspace = true hyper-util.workspace = true
import_map = { version = "=0.20.1", features = ["ext"] } import_map = { version = "=0.21.0", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser = { workspace = true, features = ["cst", "serde"] } jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] } jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
@ -154,10 +154,10 @@ rustyline-derive = "=0.7.0"
serde.workspace = true serde.workspace = true
serde_repr.workspace = true serde_repr.workspace = true
sha2.workspace = true sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] } spki = { version = "0.7", features = ["pem"] }
sqlformat = "=0.3.2" sqlformat = "=0.3.2"
strsim = "0.11.1" strsim = "0.11.1"
sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] }
tar.workspace = true tar.workspace = true
tempfile.workspace = true tempfile.workspace = true
text-size = "=1.1.0" text-size = "=1.1.0"
@ -170,7 +170,7 @@ tracing = { version = "0.1", features = ["log", "default"] }
twox-hash.workspace = true twox-hash.workspace = true
typed-arena = "=2.0.2" typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] } uuid = { workspace = true, features = ["serde"] }
walkdir = "=2.3.2" walkdir.workspace = true
which.workspace = true which.workspace = true
zeromq.workspace = true zeromq.workspace = true
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
@ -182,10 +182,12 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
nix.workspace = true nix.workspace = true
shell-escape = "=0.1.5"
[dev-dependencies] [dev-dependencies]
deno_bench_util.workspace = true deno_bench_util.workspace = true
pretty_assertions.workspace = true pretty_assertions.workspace = true
sys_traits = { workspace = true, features = ["memory"] }
test_util.workspace = true test_util.workspace = true
[package.metadata.winres] [package.metadata.winres]

View file

@ -1,68 +1,28 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::Arc;
use deno_config::deno_json::TsConfigForEmit; use deno_ast::SourceMapOption;
use deno_config::deno_json::CompilerOptionsParseError;
use deno_config::deno_json::TsConfig;
use deno_config::deno_json::TsConfigType;
use deno_config::deno_json::TsConfigWithIgnoredOptions;
use deno_config::deno_json::TsTypeLib;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirectory;
use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::unsync::sync::AtomicFlag;
use deno_core::url::Url;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use once_cell::sync::OnceCell;
#[cfg(test)] // happens to only be used by the tests at the moment use crate::util::collections::FolderScopedMap;
pub struct DenoConfigFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
#[cfg(test)]
impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
fn read_to_string_lossy(
&self,
path: &std::path::Path,
) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
self
.0
.read_text_file_lossy_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn stat_sync(
&self,
path: &std::path::Path,
) -> Result<deno_config::fs::FsMetadata, std::io::Error> {
self
.0
.stat_sync(path)
.map(|stat| deno_config::fs::FsMetadata {
is_file: stat.is_file,
is_directory: stat.is_directory,
is_symlink: stat.is_symlink,
})
.map_err(|err| err.into_io_error())
}
fn read_dir(
&self,
path: &std::path::Path,
) -> Result<Vec<deno_config::fs::FsDirEntry>, std::io::Error> {
self
.0
.read_dir_sync(path)
.map_err(|err| err.into_io_error())
.map(|entries| {
entries
.into_iter()
.map(|e| deno_config::fs::FsDirEntry {
path: path.join(e.name),
metadata: deno_config::fs::FsMetadata {
is_file: e.is_file,
is_directory: e.is_directory,
is_symlink: e.is_symlink,
},
})
.collect()
})
}
}
pub fn import_map_deps( pub fn import_map_deps(
import_map: &serde_json::Value, import_map: &serde_json::Value,
@ -158,17 +118,261 @@ fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
} }
} }
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) { fn check_warn_tsconfig(
if let Some(ignored_options) = &ts_config.maybe_ignored_options { ts_config: &TsConfigWithIgnoredOptions,
log::warn!("{}", ignored_options); logged_warnings: &LoggedWarnings,
) {
for ignored_options in &ts_config.ignored_options {
if ignored_options
.maybe_specifier
.as_ref()
.map(|s| logged_warnings.folders.insert(s.clone()))
.unwrap_or(true)
{
log::warn!("{}", ignored_options);
}
} }
let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else { let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else {
return; return;
}; };
if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) { if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true))
&& logged_warnings.experimental_decorators.raise()
{
log::warn!( log::warn!(
"{} experimentalDecorators compiler option is deprecated and may be removed at any time", "{} experimentalDecorators compiler option is deprecated and may be removed at any time",
deno_runtime::colors::yellow("Warning"), deno_runtime::colors::yellow("Warning"),
); );
} }
} }
#[derive(Debug)]
pub struct TranspileAndEmitOptions {
pub transpile: deno_ast::TranspileOptions,
pub emit: deno_ast::EmitOptions,
// stored ahead of time so we don't have to recompute this a lot
pub pre_computed_hash: u64,
}
#[derive(Debug, Default)]
struct LoggedWarnings {
experimental_decorators: AtomicFlag,
folders: dashmap::DashSet<Url>,
}
#[derive(Default, Debug)]
struct MemoizedValues {
deno_window_check_tsconfig: OnceCell<Arc<TsConfig>>,
deno_worker_check_tsconfig: OnceCell<Arc<TsConfig>>,
emit_tsconfig: OnceCell<Arc<TsConfig>>,
transpile_options: OnceCell<Arc<TranspileAndEmitOptions>>,
}
#[derive(Debug)]
pub struct TsConfigFolderInfo {
pub dir: WorkspaceDirectory,
logged_warnings: Arc<LoggedWarnings>,
memoized: MemoizedValues,
}
impl TsConfigFolderInfo {
pub fn lib_tsconfig(
&self,
lib: TsTypeLib,
) -> Result<&Arc<TsConfig>, CompilerOptionsParseError> {
let cell = match lib {
TsTypeLib::DenoWindow => &self.memoized.deno_window_check_tsconfig,
TsTypeLib::DenoWorker => &self.memoized.deno_worker_check_tsconfig,
};
cell.get_or_try_init(|| {
let tsconfig_result = self
.dir
.to_resolved_ts_config(TsConfigType::Check { lib })?;
check_warn_tsconfig(&tsconfig_result, &self.logged_warnings);
Ok(Arc::new(tsconfig_result.ts_config))
})
}
pub fn emit_tsconfig(
&self,
) -> Result<&Arc<TsConfig>, CompilerOptionsParseError> {
self.memoized.emit_tsconfig.get_or_try_init(|| {
let tsconfig_result =
self.dir.to_resolved_ts_config(TsConfigType::Emit)?;
check_warn_tsconfig(&tsconfig_result, &self.logged_warnings);
Ok(Arc::new(tsconfig_result.ts_config))
})
}
pub fn transpile_options(
&self,
) -> Result<&Arc<TranspileAndEmitOptions>, CompilerOptionsParseError> {
self.memoized.transpile_options.get_or_try_init(|| {
let ts_config = self.emit_tsconfig()?;
ts_config_to_transpile_and_emit_options(ts_config.as_ref().clone())
.map(Arc::new)
.map_err(|source| CompilerOptionsParseError {
specifier: self
.dir
.maybe_deno_json()
.map(|d| d.specifier.clone())
.unwrap_or_else(|| {
// will never happen because each dir should have a
// deno.json if we got here
debug_assert!(false);
self.dir.dir_url().as_ref().clone()
}),
source,
})
})
}
}
#[derive(Debug)]
pub struct TsConfigResolver {
map: FolderScopedMap<TsConfigFolderInfo>,
}
impl TsConfigResolver {
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
// separate the workspace into directories that have a tsconfig
let root_dir = workspace.resolve_member_dir(workspace.root_dir());
let logged_warnings = Arc::new(LoggedWarnings::default());
let mut map = FolderScopedMap::new(TsConfigFolderInfo {
dir: root_dir,
logged_warnings: logged_warnings.clone(),
memoized: Default::default(),
});
for (url, folder) in workspace.config_folders() {
let folder_has_compiler_options = folder
.deno_json
.as_ref()
.map(|d| d.json.compiler_options.is_some())
.unwrap_or(false);
if url != workspace.root_dir() && folder_has_compiler_options {
let dir = workspace.resolve_member_dir(url);
map.insert(
url.clone(),
TsConfigFolderInfo {
dir,
logged_warnings: logged_warnings.clone(),
memoized: Default::default(),
},
);
}
}
Self { map }
}
pub fn check_js_for_specifier(&self, specifier: &Url) -> bool {
self.folder_for_specifier(specifier).dir.check_js()
}
pub fn deno_lint_config(
&self,
specifier: &Url,
) -> Result<DenoLintConfig, AnyError> {
let transpile_options =
&self.transpile_and_emit_options(specifier)?.transpile;
// don't bother storing this in a cell because deno_lint requires an owned value
Ok(DenoLintConfig {
default_jsx_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_factory.clone()),
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_fragment_factory.clone()),
})
}
pub fn transpile_and_emit_options(
&self,
specifier: &Url,
) -> Result<&Arc<TranspileAndEmitOptions>, CompilerOptionsParseError> {
let value = self.map.get_for_specifier(specifier);
value.transpile_options()
}
pub fn folder_for_specifier(&self, specifier: &Url) -> &TsConfigFolderInfo {
self.folder_for_specifier_str(specifier.as_str())
}
pub fn folder_for_specifier_str(
&self,
specifier: &str,
) -> &TsConfigFolderInfo {
self.map.get_for_specifier_str(specifier)
}
pub fn folder_count(&self) -> usize {
self.map.count()
}
}
impl deno_graph::CheckJsResolver for TsConfigResolver {
fn resolve(&self, specifier: &deno_graph::ModuleSpecifier) -> bool {
self.check_js_for_specifier(specifier)
}
}
fn ts_config_to_transpile_and_emit_options(
config: deno_config::deno_json::TsConfig,
) -> Result<TranspileAndEmitOptions, serde_json::Error> {
let options: deno_config::deno_json::EmitConfigOptions =
serde_json::from_value(config.0)?;
let imports_not_used_as_values =
match options.imports_not_used_as_values.as_str() {
"preserve" => deno_ast::ImportsNotUsedAsValues::Preserve,
"error" => deno_ast::ImportsNotUsedAsValues::Error,
_ => deno_ast::ImportsNotUsedAsValues::Remove,
};
let (transform_jsx, jsx_automatic, jsx_development, precompile_jsx) =
match options.jsx.as_str() {
"react" => (true, false, false, false),
"react-jsx" => (true, true, false, false),
"react-jsxdev" => (true, true, true, false),
"precompile" => (false, false, false, true),
_ => (false, false, false, false),
};
let source_map = if options.inline_source_map {
SourceMapOption::Inline
} else if options.source_map {
SourceMapOption::Separate
} else {
SourceMapOption::None
};
let transpile = deno_ast::TranspileOptions {
use_ts_decorators: options.experimental_decorators,
use_decorators_proposal: !options.experimental_decorators,
emit_metadata: options.emit_decorator_metadata,
imports_not_used_as_values,
jsx_automatic,
jsx_development,
jsx_factory: options.jsx_factory,
jsx_fragment_factory: options.jsx_fragment_factory,
jsx_import_source: options.jsx_import_source,
precompile_jsx,
precompile_jsx_skip_elements: options.jsx_precompile_skip_elements,
precompile_jsx_dynamic_props: None,
transform_jsx,
var_decl_imports: false,
// todo(dsherret): support verbatim_module_syntax here properly
verbatim_module_syntax: false,
};
let emit = deno_ast::EmitOptions {
inline_sources: options.inline_sources,
remove_comments: false,
source_map,
source_map_base: None,
source_map_file: None,
};
let transpile_and_emit_options_hash = {
let mut hasher = FastInsecureHasher::new_without_deno_version();
hasher.write_hashable(&transpile);
hasher.write_hashable(&emit);
hasher.finish()
};
Ok(TranspileAndEmitOptions {
transpile,
emit,
pre_computed_hash: transpile_and_emit_options_hash,
})
}

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet; use std::collections::HashSet;
use std::env; use std::env;
use std::ffi::OsString; use std::ffi::OsString;
@ -32,9 +31,12 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path; use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_permissions::SysDescriptor; use deno_runtime::deno_permissions::SysDescriptor;
use deno_telemetry::OtelConfig; use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig; use deno_telemetry::OtelConsoleConfig;
@ -43,11 +45,8 @@ use log::Level;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use crate::args::resolve_no_prompt;
use crate::util::fs::canonicalize_path;
use super::flags_net; use super::flags_net;
use super::jsr_url; use crate::util::fs::canonicalize_path;
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
pub enum ConfigFlag { pub enum ConfigFlag {
@ -502,6 +501,52 @@ impl DenoSubcommand {
| Self::Lsp | Self::Lsp
) )
} }
pub fn npm_system_info(&self) -> NpmSystemInfo {
match self {
DenoSubcommand::Compile(CompileFlags {
target: Some(target),
..
}) => {
// the values of NpmSystemInfo align with the possible values for the
// `arch` and `platform` fields of Node.js' `process` global:
// https://nodejs.org/api/process.html
match target.as_str() {
"aarch64-apple-darwin" => NpmSystemInfo {
os: "darwin".into(),
cpu: "arm64".into(),
},
"aarch64-unknown-linux-gnu" => NpmSystemInfo {
os: "linux".into(),
cpu: "arm64".into(),
},
"x86_64-apple-darwin" => NpmSystemInfo {
os: "darwin".into(),
cpu: "x64".into(),
},
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
os: "linux".into(),
cpu: "x64".into(),
},
"x86_64-pc-windows-msvc" => NpmSystemInfo {
os: "win32".into(),
cpu: "x64".into(),
},
value => {
log::warn!(
concat!(
"Not implemented npm system info for target '{}'. Using current ",
"system default. This may impact architecture specific dependencies."
),
value,
);
NpmSystemInfo::default()
}
}
}
_ => NpmSystemInfo::default(),
}
}
} }
impl Default for DenoSubcommand { impl Default for DenoSubcommand {
@ -551,15 +596,6 @@ impl Default for TypeCheckMode {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
// Info needed to run NPM lifecycle scripts // Info needed to run NPM lifecycle scripts
#[derive(Clone, Debug, Eq, PartialEq, Default)] #[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct LifecycleScriptsConfig { pub struct LifecycleScriptsConfig {
@ -587,19 +623,6 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
} }
} }
#[derive(
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
#[derive(Clone, Debug, Eq, PartialEq, Default)] #[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct InternalFlags { pub struct InternalFlags {
/// Used when the language server is configured with an /// Used when the language server is configured with an
@ -693,97 +716,6 @@ impl PermissionFlags {
|| self.deny_write.is_some() || self.deny_write.is_some()
|| self.allow_import.is_some() || self.allow_import.is_some()
} }
pub fn to_options(&self, cli_arg_urls: &[Cow<Url>]) -> PermissionsOptions {
fn handle_allow<T: Default>(
allow_all: bool,
value: Option<T>,
) -> Option<T> {
if allow_all {
assert!(value.is_none());
Some(T::default())
} else {
value
}
}
fn handle_imports(
cli_arg_urls: &[Cow<Url>],
imports: Option<Vec<String>>,
) -> Option<Vec<String>> {
if imports.is_some() {
return imports;
}
let builtin_allowed_import_hosts = [
"jsr.io:443",
"deno.land:443",
"esm.sh:443",
"cdn.jsdelivr.net:443",
"raw.githubusercontent.com:443",
"gist.githubusercontent.com:443",
];
let mut imports =
Vec::with_capacity(builtin_allowed_import_hosts.len() + 1);
imports
.extend(builtin_allowed_import_hosts.iter().map(|s| s.to_string()));
// also add the JSR_URL env var
if let Some(jsr_host) = allow_import_host_from_url(jsr_url()) {
imports.push(jsr_host);
}
// include the cli arg urls
for url in cli_arg_urls {
if let Some(host) = allow_import_host_from_url(url) {
imports.push(host);
}
}
Some(imports)
}
PermissionsOptions {
allow_all: self.allow_all,
allow_env: handle_allow(self.allow_all, self.allow_env.clone()),
deny_env: self.deny_env.clone(),
allow_net: handle_allow(self.allow_all, self.allow_net.clone()),
deny_net: self.deny_net.clone(),
allow_ffi: handle_allow(self.allow_all, self.allow_ffi.clone()),
deny_ffi: self.deny_ffi.clone(),
allow_read: handle_allow(self.allow_all, self.allow_read.clone()),
deny_read: self.deny_read.clone(),
allow_run: handle_allow(self.allow_all, self.allow_run.clone()),
deny_run: self.deny_run.clone(),
allow_sys: handle_allow(self.allow_all, self.allow_sys.clone()),
deny_sys: self.deny_sys.clone(),
allow_write: handle_allow(self.allow_all, self.allow_write.clone()),
deny_write: self.deny_write.clone(),
allow_import: handle_imports(
cli_arg_urls,
handle_allow(self.allow_all, self.allow_import.clone()),
),
prompt: !resolve_no_prompt(self),
}
}
}
/// Gets the --allow-import host from the provided url
fn allow_import_host_from_url(url: &Url) -> Option<String> {
let host = url.host()?;
if let Some(port) = url.port() {
Some(format!("{}:{}", host, port))
} else {
use deno_core::url::Host::*;
match host {
Domain(domain) if domain == "jsr.io" && url.scheme() == "https" => None,
_ => match url.scheme() {
"https" => Some(format!("{}:443", host)),
"http" => Some(format!("{}:80", host)),
_ => None,
},
}
}
} }
fn join_paths(allowlist: &[String], d: &str) -> String { fn join_paths(allowlist: &[String], d: &str) -> String {
@ -1006,6 +938,8 @@ impl Flags {
OtelConfig { OtelConfig {
tracing_enabled: !disabled tracing_enabled: !disabled
&& otel_var("OTEL_DENO_TRACING").unwrap_or(default), && otel_var("OTEL_DENO_TRACING").unwrap_or(default),
metrics_enabled: !disabled
&& otel_var("OTEL_DENO_METRICS").unwrap_or(default),
console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() { console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() {
Ok(_) if disabled => OtelConsoleConfig::Ignore, Ok(_) if disabled => OtelConsoleConfig::Ignore,
Ok("ignore") => OtelConsoleConfig::Ignore, Ok("ignore") => OtelConsoleConfig::Ignore,
@ -1578,14 +1512,15 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) {
} }
pub fn clap_root() -> Command { pub fn clap_root() -> Command {
debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION);
let long_version = format!( let long_version = format!(
"{} ({}, {}, {})\nv8 {}\ntypescript {}", "{} ({}, {}, {})\nv8 {}\ntypescript {}",
crate::version::DENO_VERSION_INFO.deno, DENO_VERSION_INFO.deno,
crate::version::DENO_VERSION_INFO.release_channel.name(), DENO_VERSION_INFO.release_channel.name(),
env!("PROFILE"), env!("PROFILE"),
env!("TARGET"), env!("TARGET"),
deno_core::v8::VERSION_STRING, deno_core::v8::VERSION_STRING,
crate::version::DENO_VERSION_INFO.typescript DENO_VERSION_INFO.typescript
); );
run_args(Command::new("deno"), true) run_args(Command::new("deno"), true)
@ -1601,7 +1536,7 @@ pub fn clap_root() -> Command {
) )
.color(ColorChoice::Auto) .color(ColorChoice::Auto)
.term_width(800) .term_width(800)
.version(crate::version::DENO_VERSION_INFO.deno) .version(DENO_VERSION_INFO.deno)
.long_version(long_version) .long_version(long_version)
.disable_version_flag(true) .disable_version_flag(true)
.disable_help_flag(true) .disable_help_flag(true)
@ -4368,7 +4303,7 @@ impl CommandExt for Command {
let mut cmd = self.arg( let mut cmd = self.arg(
Arg::new("unstable") Arg::new("unstable")
.long("unstable") .long("unstable")
.help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features .help(cstr!("The `--unstable` flag has been deprecated. Use granular `--unstable-*` flags instead
<p(245)>To view the list of individual unstable feature flags, run this command again with --help=unstable</>")) <p(245)>To view the list of individual unstable feature flags, run this command again with --help=unstable</>"))
.action(ArgAction::SetTrue) .action(ArgAction::SetTrue)
.hide(matches!(cfg, UnstableArgsConfig::None)) .hide(matches!(cfg, UnstableArgsConfig::None))
@ -6057,9 +5992,10 @@ pub fn resolve_urls(urls: Vec<String>) -> Vec<String> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use super::*;
/// Creates vector of strings, Vec<String> /// Creates vector of strings, Vec<String>
macro_rules! svec { macro_rules! svec {
($($x:expr),* $(,)?) => (vec![$($x.to_string().into()),*]); ($($x:expr),* $(,)?) => (vec![$($x.to_string().into()),*]);
@ -11547,8 +11483,6 @@ mod tests {
..Default::default() ..Default::default()
} }
); );
// just make sure this doesn't panic
let _ = flags.permissions.to_options(&[]);
} }
#[test] #[test]
@ -11624,29 +11558,6 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
) )
} }
#[test]
fn test_allow_import_host_from_url() {
fn parse(text: &str) -> Option<String> {
allow_import_host_from_url(&Url::parse(text).unwrap())
}
assert_eq!(parse("https://jsr.io"), None);
assert_eq!(
parse("http://127.0.0.1:4250"),
Some("127.0.0.1:4250".to_string())
);
assert_eq!(parse("http://jsr.io"), Some("jsr.io:80".to_string()));
assert_eq!(
parse("https://example.com"),
Some("example.com:443".to_string())
);
assert_eq!(
parse("http://example.com"),
Some("example.com:80".to_string())
);
assert_eq!(parse("file:///example.com"), None);
}
#[test] #[test]
fn allow_all_conflicts_allow_perms() { fn allow_all_conflicts_allow_perms() {
let flags = [ let flags = [

View file

@ -1,9 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::net::IpAddr;
use std::str::FromStr;
use deno_core::url::Url; use deno_core::url::Url;
use deno_runtime::deno_permissions::NetDescriptor; use deno_runtime::deno_permissions::NetDescriptor;
use std::net::IpAddr;
use std::str::FromStr;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct ParsePortError(String); pub struct ParsePortError(String);

View file

@ -1,24 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
pub async fn resolve_import_map_value_from_specifier(
specifier: &Url,
file_fetcher: &CliFileFetcher,
) -> Result<serde_json::Value, AnyError> {
if specifier.scheme() == "data" {
let data_url_text =
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
Ok(serde_json::from_str(&data_url_text)?)
} else {
let file = TextDecodedFile::decode(
file_fetcher.fetch_bypass_permissions(specifier).await?,
)?;
Ok(serde_json::from_str(&file.source)?)
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet; use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
@ -10,20 +10,20 @@ use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard; use deno_core::parking_lot::MutexGuard;
use deno_core::serde_json; use deno_core::serde_json;
use deno_error::JsErrorBox;
use deno_lockfile::Lockfile;
use deno_lockfile::WorkspaceMemberConfig; use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use crate::args::deno_json::import_map_deps; use crate::args::deno_json::import_map_deps;
use crate::cache;
use crate::util::fs::atomic_write_file_with_retries;
use crate::Flags;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::InstallFlags; use crate::args::InstallFlags;
use crate::cache;
use deno_lockfile::Lockfile; use crate::sys::CliSys;
use crate::Flags;
#[derive(Debug)] #[derive(Debug)]
pub struct CliLockfileReadFromPathOptions { pub struct CliLockfileReadFromPathOptions {
@ -35,6 +35,7 @@ pub struct CliLockfileReadFromPathOptions {
#[derive(Debug)] #[derive(Debug)]
pub struct CliLockfile { pub struct CliLockfile {
sys: CliSys,
lockfile: Mutex<Lockfile>, lockfile: Mutex<Lockfile>,
pub filename: PathBuf, pub filename: PathBuf,
frozen: bool, frozen: bool,
@ -59,6 +60,16 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
} }
} }
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum AtomicWriteFileWithRetriesError {
#[class(inherit)]
#[error(transparent)]
Changed(JsErrorBox),
#[class(inherit)]
#[error("Failed writing lockfile")]
Io(#[source] std::io::Error),
}
impl CliLockfile { impl CliLockfile {
/// Get the inner deno_lockfile::Lockfile. /// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<Lockfile> { pub fn lock(&self) -> Guard<Lockfile> {
@ -78,12 +89,16 @@ impl CliLockfile {
self.lockfile.lock().overwrite self.lockfile.lock().overwrite
} }
pub fn write_if_changed(&self) -> Result<(), AnyError> { pub fn write_if_changed(
&self,
) -> Result<(), AtomicWriteFileWithRetriesError> {
if self.skip_write { if self.skip_write {
return Ok(()); return Ok(());
} }
self.error_if_changed()?; self
.error_if_changed()
.map_err(AtomicWriteFileWithRetriesError::Changed)?;
let mut lockfile = self.lockfile.lock(); let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else { let Some(bytes) = lockfile.resolve_write_bytes() else {
return Ok(()); // nothing to do return Ok(()); // nothing to do
@ -91,16 +106,18 @@ impl CliLockfile {
// do an atomic write to reduce the chance of multiple deno // do an atomic write to reduce the chance of multiple deno
// processes corrupting the file // processes corrupting the file
atomic_write_file_with_retries( atomic_write_file_with_retries(
&self.sys,
&lockfile.filename, &lockfile.filename,
bytes, &bytes,
cache::CACHE_PERM, cache::CACHE_PERM,
) )
.context("Failed writing lockfile.")?; .map_err(AtomicWriteFileWithRetriesError::Io)?;
lockfile.has_content_changed = false; lockfile.has_content_changed = false;
Ok(()) Ok(())
} }
pub fn discover( pub fn discover(
sys: &CliSys,
flags: &Flags, flags: &Flags,
workspace: &Workspace, workspace: &Workspace,
maybe_external_import_map: Option<&serde_json::Value>, maybe_external_import_map: Option<&serde_json::Value>,
@ -163,11 +180,14 @@ impl CliLockfile {
.unwrap_or(false) .unwrap_or(false)
}); });
let lockfile = Self::read_from_path(CliLockfileReadFromPathOptions { let lockfile = Self::read_from_path(
file_path, sys,
frozen, CliLockfileReadFromPathOptions {
skip_write: flags.internal.lockfile_skip_write, file_path,
})?; frozen,
skip_write: flags.internal.lockfile_skip_write,
},
)?;
// initialize the lockfile with the workspace's configuration // initialize the lockfile with the workspace's configuration
let root_url = workspace.root_dir(); let root_url = workspace.root_dir();
@ -223,6 +243,7 @@ impl CliLockfile {
} }
pub fn read_from_path( pub fn read_from_path(
sys: &CliSys,
opts: CliLockfileReadFromPathOptions, opts: CliLockfileReadFromPathOptions,
) -> Result<CliLockfile, AnyError> { ) -> Result<CliLockfile, AnyError> {
let lockfile = match std::fs::read_to_string(&opts.file_path) { let lockfile = match std::fs::read_to_string(&opts.file_path) {
@ -241,6 +262,7 @@ impl CliLockfile {
} }
}; };
Ok(CliLockfile { Ok(CliLockfile {
sys: sys.clone(),
filename: lockfile.filename.clone(), filename: lockfile.filename.clone(),
lockfile: Mutex::new(lockfile), lockfile: Mutex::new(lockfile),
frozen: opts.frozen, frozen: opts.frozen,
@ -248,7 +270,7 @@ impl CliLockfile {
}) })
} }
pub fn error_if_changed(&self) -> Result<(), AnyError> { pub fn error_if_changed(&self) -> Result<(), JsErrorBox> {
if !self.frozen { if !self.frozen {
return Ok(()); return Ok(());
} }
@ -260,9 +282,7 @@ impl CliLockfile {
let diff = crate::util::diff::diff(&contents, &new_contents); let diff = crate::util::diff::diff(&contents, &new_contents);
// has an extra newline at the end // has an extra newline at the end
let diff = diff.trim_end(); let diff = diff.trim_end();
Err(deno_core::anyhow::anyhow!( Err(JsErrorBox::generic(format!("The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}")))
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
))
} else { } else {
Ok(()) Ok(())
} }

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -11,19 +11,20 @@ use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq; use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::VersionReq; use deno_semver::VersionReq;
use thiserror::Error; use thiserror::Error;
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmRemotePkg { pub struct InstallNpmRemotePkg {
pub alias: Option<String>, pub alias: Option<StackString>,
pub base_dir: PathBuf, pub base_dir: PathBuf,
pub req: PackageReq, pub req: PackageReq,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmWorkspacePkg { pub struct InstallNpmWorkspacePkg {
pub alias: Option<String>, pub alias: Option<StackString>,
pub target_dir: PathBuf, pub target_dir: PathBuf,
} }
@ -31,7 +32,7 @@ pub struct InstallNpmWorkspacePkg {
#[error("Failed to install '{}'\n at {}", alias, location)] #[error("Failed to install '{}'\n at {}", alias, location)]
pub struct PackageJsonDepValueParseWithLocationError { pub struct PackageJsonDepValueParseWithLocationError {
pub location: Url, pub location: Url,
pub alias: String, pub alias: StackString,
#[source] #[source]
pub source: PackageJsonDepValueParseError, pub source: PackageJsonDepValueParseError,
} }
@ -100,10 +101,8 @@ impl NpmInstallDepsProvider {
let mut pkg_pkgs = Vec::with_capacity( let mut pkg_pkgs = Vec::with_capacity(
deps.dependencies.len() + deps.dev_dependencies.len(), deps.dependencies.len() + deps.dev_dependencies.len(),
); );
for (alias, dep) in deps for (alias, dep) in
.dependencies deps.dependencies.iter().chain(deps.dev_dependencies.iter())
.into_iter()
.chain(deps.dev_dependencies.into_iter())
{ {
let dep = match dep { let dep = match dep {
Ok(dep) => dep, Ok(dep) => dep,
@ -111,8 +110,8 @@ impl NpmInstallDepsProvider {
pkg_json_dep_errors.push( pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError { PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(), location: pkg_json.specifier(),
alias, alias: alias.clone(),
source: err, source: err.clone(),
}, },
); );
continue; continue;
@ -121,28 +120,28 @@ impl NpmInstallDepsProvider {
match dep { match dep {
PackageJsonDepValue::Req(pkg_req) => { PackageJsonDepValue::Req(pkg_req) => {
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| { let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_req(&pkg_req) pkg.matches_req(pkg_req)
// do not resolve to the current package // do not resolve to the current package
&& pkg.pkg_json.path != pkg_json.path && pkg.pkg_json.path != pkg_json.path
}); });
if let Some(pkg) = workspace_pkg { if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg { workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias), alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), target_dir: pkg.pkg_json.dir_path().to_path_buf(),
}); });
} else { } else {
pkg_pkgs.push(InstallNpmRemotePkg { pkg_pkgs.push(InstallNpmRemotePkg {
alias: Some(alias), alias: Some(alias.clone()),
base_dir: pkg_json.dir_path().to_path_buf(), base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req, req: pkg_req.clone(),
}); });
} }
} }
PackageJsonDepValue::Workspace(workspace_version_req) => { PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req { let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => { PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
version_req version_req.clone()
} }
PackageJsonDepWorkspaceReq::Tilde PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => { | PackageJsonDepWorkspaceReq::Caret => {
@ -150,10 +149,10 @@ impl NpmInstallDepsProvider {
} }
}; };
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| { if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(&alias, &version_req) pkg.matches_name_and_version_req(alias, &version_req)
}) { }) {
workspace_pkgs.push(InstallNpmWorkspacePkg { workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias), alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(), target_dir: pkg.pkg_json.dir_path().to_path_buf(),
}); });
} }

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
const cacheName = "cache-v1"; const cacheName = "cache-v1";
const cache = await caches.open(cacheName); const cache = await caches.open(cacheName);

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
Deno.bench("echo deno", async () => { Deno.bench("echo deno", async () => {
await new Deno.Command("echo", { args: ["deno"] }).output(); await new Deno.Command("echo", { args: ["deno"] }).output();

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console // deno-lint-ignore-file no-console

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// v8 builtin that's close to the upper bound non-NOPs // v8 builtin that's close to the upper bound non-NOPs
Deno.bench("date_now", { n: 5e5 }, () => { Deno.bench("date_now", { n: 5e5 }, () => {

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
let total = 5; let total = 5;
let current = ""; let current = "";

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
/** @jsx h */ /** @jsx h */
import results from "./deno.json" assert { type: "json" }; import results from "./deno.json" assert { type: "json" };

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,14 +1,15 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::path::Path;
use std::str::FromStr;
use std::time::Duration;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use lsp_types::Uri; use lsp_types::Uri;
use std::collections::HashMap;
use std::path::Path;
use std::str::FromStr;
use std::time::Duration;
use test_util::lsp::LspClientBuilder; use test_util::lsp::LspClientBuilder;
use test_util::PathRef; use test_util::PathRef;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_bench_util::bencher::benchmark_group; use deno_bench_util::bencher::benchmark_group;
use deno_bench_util::bencher::benchmark_main; use deno_bench_util::bencher::benchmark_main;

View file

@ -1,11 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
#![allow(clippy::print_stdout)] #![allow(clippy::print_stdout)]
#![allow(clippy::print_stderr)] #![allow(clippy::print_stderr)]
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::From; use std::convert::From;
use std::env; use std::env;
@ -15,6 +12,10 @@ use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::process::Stdio; use std::process::Stdio;
use std::time::SystemTime; use std::time::SystemTime;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use test_util::PathRef; use test_util::PathRef;
mod lsp; mod lsp;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { loadTestLibrary } from "../../../tests/napi/common.js"; import { loadTestLibrary } from "../../../tests/napi/common.js";

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { bench, run } from "mitata"; import { bench, run } from "mitata";
import { createRequire } from "module"; import { createRequire } from "module";

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

36
cli/bench/sqlite.js Normal file
View file

@ -0,0 +1,36 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console
import { DatabaseSync } from "node:sqlite";
import fs from "node:fs";
function bench(name, fun, count = 10000) {
const start = Date.now();
for (let i = 0; i < count; i++) fun();
const elapsed = Date.now() - start;
const rate = Math.floor(count / (elapsed / 1000));
console.log(` ${name}: time ${elapsed} ms rate ${rate}`);
}
for (const name of [":memory:", "test.db"]) {
console.log(`Benchmarking ${name}`);
try {
fs.unlinkSync(name);
} catch {
// Ignore
}
const db = new DatabaseSync(name);
db.exec("CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)");
bench("prepare", () => db.prepare("SELECT * FROM test"));
bench("exec", () => db.exec("INSERT INTO test (name) VALUES ('foo')"));
const stmt = db.prepare("SELECT * FROM test");
bench("get", () => stmt.get());
const stmt2 = db.prepare("SELECT * FROM test WHERE id = ?");
bench("get (integer bind)", () => stmt2.get(1));
bench("all", () => stmt.all(), 1000);
}

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// From https://github.com/just-js/benchmarks/tree/main/01-stdio // From https://github.com/just-js/benchmarks/tree/main/01-stdio
#include <stdlib.h> #include <stdlib.h>

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// //
// From https://github.com/just-js/benchmarks/tree/main/01-stdio // From https://github.com/just-js/benchmarks/tree/main/01-stdio

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
const listener = Deno.listen({ port: 4500 }); const listener = Deno.listen({ port: 4500 });
const response = new TextEncoder().encode( const response = new TextEncoder().encode(

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console // deno-lint-ignore-file no-console

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file no-console no-process-globals // deno-lint-ignore-file no-console no-process-globals
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;

View file

@ -1,59 +1,24 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use deno_core::snapshot::*; use deno_core::snapshot::*;
use deno_runtime::*; use deno_runtime::*;
mod shared;
mod ts { mod ts {
use super::*;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
#[derive(Debug, Serialize)] use deno_core::op2;
#[serde(rename_all = "camelCase")] use deno_core::v8;
struct BuildInfoResponse { use deno_core::OpState;
build_specifier: String, use deno_error::JsErrorBox;
libs: Vec<String>, use serde::Serialize;
}
#[op2] use super::*;
#[serde]
fn op_build_info(state: &mut OpState) -> BuildInfoResponse {
let build_specifier = "asset:///bootstrap.ts".to_string();
let build_libs = state
.borrow::<Vec<&str>>()
.iter()
.map(|s| s.to_string())
.collect();
BuildInfoResponse {
build_specifier,
libs: build_libs,
}
}
#[op2(fast)]
fn op_is_node_file() -> bool {
false
}
#[op2]
#[string]
fn op_script_version(
_state: &mut OpState,
#[string] _arg: &str,
) -> Result<Option<String>, AnyError> {
Ok(Some("1".to_string()))
}
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -70,59 +35,56 @@ mod ts {
fn op_load( fn op_load(
state: &mut OpState, state: &mut OpState,
#[string] load_specifier: &str, #[string] load_specifier: &str,
) -> Result<LoadResponse, AnyError> { ) -> Result<LoadResponse, JsErrorBox> {
let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>(); let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>();
let path_dts = state.borrow::<PathBuf>(); let path_dts = state.borrow::<PathBuf>();
let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts"); let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts");
let build_specifier = "asset:///bootstrap.ts";
// we need a basic file to send to tsc to warm it up. // specifiers come across as `asset:///lib.{lib_name}.d.ts` and we need to
if load_specifier == build_specifier { // parse out just the name so we can lookup the asset.
Ok(LoadResponse { if let Some(caps) = re_asset.captures(load_specifier) {
data: r#"Deno.writeTextFile("hello.txt", "hello deno!");"#.to_string(),
version: "1".to_string(),
// this corresponds to `ts.ScriptKind.TypeScript`
script_kind: 3,
})
// specifiers come across as `asset:///lib.{lib_name}.d.ts` and we need to
// parse out just the name so we can lookup the asset.
} else if let Some(caps) = re_asset.captures(load_specifier) {
if let Some(lib) = caps.get(1).map(|m| m.as_str()) { if let Some(lib) = caps.get(1).map(|m| m.as_str()) {
// if it comes from an op crate, we were supplied with the path to the // if it comes from an op crate, we were supplied with the path to the
// file. // file.
let path = if let Some(op_crate_lib) = op_crate_libs.get(lib) { let path = if let Some(op_crate_lib) = op_crate_libs.get(lib) {
PathBuf::from(op_crate_lib).canonicalize()? PathBuf::from(op_crate_lib)
.canonicalize()
.map_err(JsErrorBox::from_err)?
// otherwise we will generate the path ourself // otherwise we will generate the path ourself
} else { } else {
path_dts.join(format!("lib.{lib}.d.ts")) path_dts.join(format!("lib.{lib}.d.ts"))
}; };
let data = std::fs::read_to_string(path)?; let data =
Ok(LoadResponse { std::fs::read_to_string(path).map_err(JsErrorBox::from_err)?;
return Ok(LoadResponse {
data, data,
version: "1".to_string(), version: "1".to_string(),
// this corresponds to `ts.ScriptKind.TypeScript` // this corresponds to `ts.ScriptKind.TypeScript`
script_kind: 3, script_kind: 3,
}) });
} else {
Err(custom_error(
"InvalidSpecifier",
format!("An invalid specifier was requested: {}", load_specifier),
))
} }
} else {
Err(custom_error(
"InvalidSpecifier",
format!("An invalid specifier was requested: {}", load_specifier),
))
} }
Err(JsErrorBox::new(
"InvalidSpecifier",
format!("An invalid specifier was requested: {}", load_specifier),
))
} }
deno_core::extension!(deno_tsc, deno_core::extension!(deno_tsc,
ops = [op_build_info, op_is_node_file, op_load, op_script_version], ops = [
op_load,
],
esm_entry_point = "ext:deno_tsc/99_main_compiler.js",
esm = [
dir "tsc",
"97_ts_host.js",
"98_lsp.js",
"99_main_compiler.js",
],
js = [ js = [
dir "tsc", dir "tsc",
"00_typescript.js", "00_typescript.js",
"99_main_compiler.js",
], ],
options = { options = {
op_crate_libs: HashMap<&'static str, PathBuf>, op_crate_libs: HashMap<&'static str, PathBuf>,
@ -268,6 +230,28 @@ mod ts {
) )
.unwrap(); .unwrap();
// Leak to satisfy type-checker. It's okay since it's only run once for a build script.
let build_libs_ = Box::leak(Box::new(build_libs.clone()));
let runtime_cb = Box::new(|rt: &mut deno_core::JsRuntimeForSnapshot| {
let scope = &mut rt.handle_scope();
let context = scope.get_current_context();
let global = context.global(scope);
let name = v8::String::new(scope, "snapshot").unwrap();
let snapshot_fn_val = global.get(scope, name.into()).unwrap();
let snapshot_fn: v8::Local<v8::Function> =
snapshot_fn_val.try_into().unwrap();
let undefined = v8::undefined(scope);
let build_libs = build_libs_.clone();
let build_libs_v8 =
deno_core::serde_v8::to_v8(scope, build_libs).unwrap();
snapshot_fn
.call(scope, undefined.into(), &[build_libs_v8])
.unwrap();
});
let output = create_snapshot( let output = create_snapshot(
CreateSnapshotOptions { CreateSnapshotOptions {
cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"), cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"),
@ -278,7 +262,7 @@ mod ts {
path_dts, path_dts,
)], )],
extension_transpiler: None, extension_transpiler: None,
with_runtime_cb: None, with_runtime_cb: Some(runtime_cb),
skip_op_registration: false, skip_op_registration: false,
}, },
None, None,
@ -306,57 +290,6 @@ mod ts {
println!("cargo:rerun-if-changed={}", path.display()); println!("cargo:rerun-if-changed={}", path.display());
} }
} }
pub(crate) fn version() -> String {
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
let version_text = " version = \"";
for line in file_text.lines() {
if let Some(index) = line.find(version_text) {
let remaining_line = &line[index + version_text.len()..];
return remaining_line[..remaining_line.find('"').unwrap()].to_string();
}
}
panic!("Could not find ts version.")
}
}
#[cfg(not(feature = "hmr"))]
fn create_cli_snapshot(snapshot_path: PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions;
let snapshot_options = SnapshotOptions {
ts_version: ts::version(),
v8_version: deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(),
};
deno_runtime::snapshot::create_runtime_snapshot(
snapshot_path,
snapshot_options,
vec![],
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
} }
fn main() { fn main() {
@ -366,7 +299,7 @@ fn main() {
} }
deno_napi::print_linker_flags("deno"); deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort"); deno_webgpu::print_linker_flags("deno");
// Host snapshots won't work when cross compiling. // Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap(); let target = env::var("TARGET").unwrap();
@ -385,51 +318,15 @@ fn main() {
} }
println!("cargo:rerun-if-env-changed=DENO_CANARY"); println!("cargo:rerun-if-env-changed=DENO_CANARY");
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash());
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&git_commit_hash()[..7]
);
let ts_version = ts::version();
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
println!("cargo:rerun-if-env-changed=TS_VERSION");
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
}
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin"); let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
ts::create_compiler_snapshot(compiler_snapshot_path, &c); ts::create_compiler_snapshot(compiler_snapshot_path, &c);
#[cfg(not(feature = "hmr"))]
{
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
create_cli_snapshot(cli_snapshot_path);
}
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
{ {
let mut res = winres::WindowsResource::new(); let mut res = winres::WindowsResource::new();

90
cli/cache/cache_db.rs vendored
View file

@ -1,20 +1,20 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::unsync::spawn_blocking;
use deno_runtime::deno_webstorage::rusqlite;
use deno_runtime::deno_webstorage::rusqlite::Connection;
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
use deno_runtime::deno_webstorage::rusqlite::Params;
use once_cell::sync::OnceCell;
use std::io::IsTerminal; use std::io::IsTerminal;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use super::FastInsecureHasher; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::unsync::spawn_blocking;
use deno_lib::util::hash::FastInsecureHasher;
use deno_runtime::deno_webstorage::rusqlite;
use deno_runtime::deno_webstorage::rusqlite::Connection;
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
use deno_runtime::deno_webstorage::rusqlite::Params;
use once_cell::sync::OnceCell;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CacheDBHash(u64); pub struct CacheDBHash(u64);
@ -24,12 +24,12 @@ impl CacheDBHash {
Self(hash) Self(hash)
} }
pub fn from_source(source: impl std::hash::Hash) -> Self { pub fn from_hashable(hashable: impl std::hash::Hash) -> Self {
Self::new( Self::new(
// always write in the deno version just in case // always write in the deno version just in case
// the clearing on deno version change doesn't work // the clearing on deno version change doesn't work
FastInsecureHasher::new_deno_versioned() FastInsecureHasher::new_deno_versioned()
.write_hashable(source) .write_hashable(hashable)
.finish(), .finish(),
) )
} }
@ -232,7 +232,7 @@ impl CacheDB {
config: &CacheDBConfiguration, config: &CacheDBConfiguration,
conn: &Connection, conn: &Connection,
version: &str, version: &str,
) -> Result<(), AnyError> { ) -> Result<(), rusqlite::Error> {
let sql = config.create_combined_sql(); let sql = config.create_combined_sql();
conn.execute_batch(&sql)?; conn.execute_batch(&sql)?;
@ -265,7 +265,7 @@ impl CacheDB {
fn open_connection_and_init( fn open_connection_and_init(
&self, &self,
path: Option<&Path>, path: Option<&Path>,
) -> Result<Connection, AnyError> { ) -> Result<Connection, rusqlite::Error> {
let conn = self.actually_open_connection(path)?; let conn = self.actually_open_connection(path)?;
Self::initialize_connection(self.config, &conn, self.version)?; Self::initialize_connection(self.config, &conn, self.version)?;
Ok(conn) Ok(conn)
@ -368,7 +368,9 @@ impl CacheDB {
fn open_connection( fn open_connection(
config: &CacheDBConfiguration, config: &CacheDBConfiguration,
path: Option<&Path>, path: Option<&Path>,
open_connection_and_init: impl Fn(Option<&Path>) -> Result<Connection, AnyError>, open_connection_and_init: impl Fn(
Option<&Path>,
) -> Result<Connection, rusqlite::Error>,
) -> Result<ConnectionState, AnyError> { ) -> Result<ConnectionState, AnyError> {
// Success on first try? We hope that this is the case. // Success on first try? We hope that this is the case.
let err = match open_connection_and_init(path) { let err = match open_connection_and_init(path) {
@ -379,9 +381,20 @@ fn open_connection(
let Some(path) = path.as_ref() else { let Some(path) = path.as_ref() else {
// If an in-memory DB fails, that's game over // If an in-memory DB fails, that's game over
log::error!("Failed to initialize in-memory cache database."); log::error!("Failed to initialize in-memory cache database.");
return Err(err); return Err(err.into());
}; };
// reduce logging for readonly file system
if let rusqlite::Error::SqliteFailure(ffi_err, _) = &err {
if ffi_err.code == rusqlite::ErrorCode::ReadOnly {
log::debug!(
"Failed creating cache db. Folder readonly: {}",
path.display()
);
return handle_failure_mode(config, err, open_connection_and_init);
}
}
// ensure the parent directory exists // ensure the parent directory exists
if let Some(parent) = path.parent() { if let Some(parent) = path.parent() {
match std::fs::create_dir_all(parent) { match std::fs::create_dir_all(parent) {
@ -410,10 +423,11 @@ fn open_connection(
// Failed, try deleting it // Failed, try deleting it
let is_tty = std::io::stderr().is_terminal(); let is_tty = std::io::stderr().is_terminal();
log::log!( log::log!(
if is_tty { log::Level::Warn } else { log::Level::Trace }, if is_tty { log::Level::Warn } else { log::Level::Trace },
"Could not initialize cache database '{}', deleting and retrying... ({err:?})", "Could not initialize cache database '{}', deleting and retrying... ({err:?})",
path.to_string_lossy() path.to_string_lossy()
); );
if std::fs::remove_file(path).is_ok() { if std::fs::remove_file(path).is_ok() {
// Try a third time if we successfully deleted it // Try a third time if we successfully deleted it
let res = open_connection_and_init(Some(path)); let res = open_connection_and_init(Some(path));
@ -422,6 +436,11 @@ fn open_connection(
}; };
} }
log_failure_mode(path, is_tty, config);
handle_failure_mode(config, err, open_connection_and_init)
}
fn log_failure_mode(path: &Path, is_tty: bool, config: &CacheDBConfiguration) {
match config.on_failure { match config.on_failure {
CacheFailure::InMemory => { CacheFailure::InMemory => {
log::log!( log::log!(
@ -431,9 +450,8 @@ fn open_connection(
log::Level::Trace log::Level::Trace
}, },
"Failed to open cache file '{}', opening in-memory cache.", "Failed to open cache file '{}', opening in-memory cache.",
path.to_string_lossy() path.display()
); );
Ok(ConnectionState::Connected(open_connection_and_init(None)?))
} }
CacheFailure::Blackhole => { CacheFailure::Blackhole => {
log::log!( log::log!(
@ -443,23 +461,36 @@ fn open_connection(
log::Level::Trace log::Level::Trace
}, },
"Failed to open cache file '{}', performance may be degraded.", "Failed to open cache file '{}', performance may be degraded.",
path.to_string_lossy() path.display()
); );
Ok(ConnectionState::Blackhole)
} }
CacheFailure::Error => { CacheFailure::Error => {
log::error!( log::error!(
"Failed to open cache file '{}', expect further errors.", "Failed to open cache file '{}', expect further errors.",
path.to_string_lossy() path.display()
); );
Err(err)
} }
} }
} }
fn handle_failure_mode(
config: &CacheDBConfiguration,
err: rusqlite::Error,
open_connection_and_init: impl Fn(
Option<&Path>,
) -> Result<Connection, rusqlite::Error>,
) -> Result<ConnectionState, AnyError> {
match config.on_failure {
CacheFailure::InMemory => {
Ok(ConnectionState::Connected(open_connection_and_init(None)?))
}
CacheFailure::Blackhole => Ok(ConnectionState::Blackhole),
CacheFailure::Error => Err(err.into()),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use deno_core::anyhow::anyhow;
use test_util::TempDir; use test_util::TempDir;
use super::*; use super::*;
@ -520,7 +551,8 @@ mod tests {
let path = temp_dir.path().join("data").to_path_buf(); let path = temp_dir.path().join("data").to_path_buf();
let state = open_connection(&TEST_DB, Some(path.as_path()), |maybe_path| { let state = open_connection(&TEST_DB, Some(path.as_path()), |maybe_path| {
match maybe_path { match maybe_path {
Some(_) => Err(anyhow!("fail")), // this error was chosen because it was an error easy to construct
Some(_) => Err(rusqlite::Error::SqliteSingleThreadedMode),
None => Ok(Connection::open_in_memory().unwrap()), None => Ok(Connection::open_in_memory().unwrap()),
} }
}) })

13
cli/cache/caches.rs vendored
View file

@ -1,19 +1,20 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_lib::version::DENO_VERSION_INFO;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::check::TYPE_CHECK_CACHE_DB; use super::check::TYPE_CHECK_CACHE_DB;
use super::code_cache::CODE_CACHE_DB; use super::code_cache::CODE_CACHE_DB;
use super::deno_dir::DenoDirProvider;
use super::fast_check::FAST_CHECK_CACHE_DB; use super::fast_check::FAST_CHECK_CACHE_DB;
use super::incremental::INCREMENTAL_CACHE_DB; use super::incremental::INCREMENTAL_CACHE_DB;
use super::module_info::MODULE_INFO_CACHE_DB; use super::module_info::MODULE_INFO_CACHE_DB;
use super::node::NODE_ANALYSIS_CACHE_DB; use super::node::NODE_ANALYSIS_CACHE_DB;
use crate::cache::DenoDirProvider;
pub struct Caches { pub struct Caches {
dir_provider: Arc<DenoDirProvider>, dir_provider: Arc<DenoDirProvider>,
@ -48,13 +49,9 @@ impl Caches {
cell cell
.get_or_init(|| { .get_or_init(|| {
if let Some(path) = path { if let Some(path) = path {
CacheDB::from_path( CacheDB::from_path(config, path, DENO_VERSION_INFO.deno)
config,
path,
crate::version::DENO_VERSION_INFO.deno,
)
} else { } else {
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno) CacheDB::in_memory(config, DENO_VERSION_INFO.deno)
} }
}) })
.clone() .clone()

9
cli/cache/check.rs vendored
View file

@ -1,12 +1,13 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_runtime::deno_webstorage::rusqlite::params;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash; use super::cache_db::CacheDBHash;
use super::cache_db::CacheFailure; use super::cache_db::CacheFailure;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_runtime::deno_webstorage::rusqlite::params;
pub static TYPE_CHECK_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration { pub static TYPE_CHECK_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: concat!( table_initializer: concat!(

View file

@ -1,14 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_runtime::code_cache; use deno_runtime::code_cache;
use deno_runtime::deno_webstorage::rusqlite::params; use deno_runtime::deno_webstorage::rusqlite::params;
use crate::worker::CliCodeCache;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash; use super::cache_db::CacheDBHash;
@ -86,12 +82,6 @@ impl CodeCache {
} }
} }
impl CliCodeCache for CodeCache {
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
self
}
}
impl code_cache::CodeCache for CodeCache { impl code_cache::CodeCache for CodeCache {
fn get_sync( fn get_sync(
&self, &self,

176
cli/cache/deno_dir.rs vendored
View file

@ -1,33 +1,40 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use once_cell::sync::OnceCell;
use super::DiskCache;
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::DenoDirResolutionError;
use super::DiskCache;
use crate::factory::CliDenoDirPathProvider;
use crate::sys::CliSys;
/// Lazily creates the deno dir which might be useful in scenarios /// Lazily creates the deno dir which might be useful in scenarios
/// where functionality wants to continue if the DENO_DIR can't be created. /// where functionality wants to continue if the DENO_DIR can't be created.
pub struct DenoDirProvider { pub struct DenoDirProvider {
maybe_custom_root: Option<PathBuf>, deno_dir_path_provider: Arc<CliDenoDirPathProvider>,
deno_dir: OnceCell<std::io::Result<DenoDir>>, sys: CliSys,
deno_dir: once_cell::sync::OnceCell<DenoDir>,
} }
impl DenoDirProvider { impl DenoDirProvider {
pub fn new(maybe_custom_root: Option<PathBuf>) -> Self { pub fn new(
sys: CliSys,
deno_dir_path_provider: Arc<CliDenoDirPathProvider>,
) -> Self {
Self { Self {
maybe_custom_root, sys,
deno_dir_path_provider,
deno_dir: Default::default(), deno_dir: Default::default(),
} }
} }
pub fn get_or_create(&self) -> Result<&DenoDir, std::io::Error> { pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> {
self self.deno_dir.get_or_try_init(|| {
.deno_dir let path = self.deno_dir_path_provider.get_or_create()?;
.get_or_init(|| DenoDir::new(self.maybe_custom_root.clone())) Ok(DenoDir::new(self.sys.clone(), path.clone()))
.as_ref() })
.map_err(|err| std::io::Error::new(err.kind(), err.to_string()))
} }
} }
@ -42,36 +49,14 @@ pub struct DenoDir {
} }
impl DenoDir { impl DenoDir {
pub fn new(maybe_custom_root: Option<PathBuf>) -> std::io::Result<Self> { pub fn new(sys: CliSys, root: PathBuf) -> Self {
let maybe_custom_root =
maybe_custom_root.or_else(|| env::var("DENO_DIR").map(String::into).ok());
let root: PathBuf = if let Some(root) = maybe_custom_root {
root
} else if let Some(cache_dir) = dirs::cache_dir() {
// We use the OS cache dir because all files deno writes are cache files
// Once that changes we need to start using different roots if DENO_DIR
// is not set, and keep a single one if it is.
cache_dir.join("deno")
} else if let Some(home_dir) = dirs::home_dir() {
// fallback path
home_dir.join(".deno")
} else {
panic!("Could not set the Deno root directory")
};
let root = if root.is_absolute() {
root
} else {
std::env::current_dir()?.join(root)
};
assert!(root.is_absolute()); assert!(root.is_absolute());
let gen_path = root.join("gen"); let gen_path = root.join("gen");
let deno_dir = Self { Self {
root, root,
gen_cache: DiskCache::new(&gen_path), gen_cache: DiskCache::new(sys, gen_path),
}; }
Ok(deno_dir)
} }
/// The root directory of the DENO_DIR for display purposes only. /// The root directory of the DENO_DIR for display purposes only.
@ -166,112 +151,3 @@ impl DenoDir {
self.root.join("dl") self.root.join("dl")
} }
} }
/// To avoid the poorly managed dirs crate
#[cfg(not(windows))]
pub mod dirs {
use std::path::PathBuf;
pub fn cache_dir() -> Option<PathBuf> {
if cfg!(target_os = "macos") {
home_dir().map(|h| h.join("Library/Caches"))
} else {
std::env::var_os("XDG_CACHE_HOME")
.map(PathBuf::from)
.or_else(|| home_dir().map(|h| h.join(".cache")))
}
}
pub fn home_dir() -> Option<PathBuf> {
std::env::var_os("HOME")
.and_then(|h| if h.is_empty() { None } else { Some(h) })
.or_else(|| {
// TODO(bartlomieju):
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
fallback()
}
})
.map(PathBuf::from)
}
// This piece of code is taken from the deprecated home_dir() function in Rust's standard library: https://github.com/rust-lang/rust/blob/master/src/libstd/sys/unix/os.rs#L579
// The same code is used by the dirs crate
unsafe fn fallback() -> Option<std::ffi::OsString> {
let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) {
n if n < 0 => 512_usize,
n => n as usize,
};
let mut buf = Vec::with_capacity(amt);
let mut passwd: libc::passwd = std::mem::zeroed();
let mut result = std::ptr::null_mut();
match libc::getpwuid_r(
libc::getuid(),
&mut passwd,
buf.as_mut_ptr(),
buf.capacity(),
&mut result,
) {
0 if !result.is_null() => {
let ptr = passwd.pw_dir as *const _;
let bytes = std::ffi::CStr::from_ptr(ptr).to_bytes().to_vec();
Some(std::os::unix::ffi::OsStringExt::from_vec(bytes))
}
_ => None,
}
}
}
/// To avoid the poorly managed dirs crate
// Copied from
// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164
// MIT license. Copyright (c) 2018-2019 dirs-rs contributors
#[cfg(windows)]
pub mod dirs {
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use std::path::PathBuf;
use winapi::shared::winerror;
use winapi::um::combaseapi;
use winapi::um::knownfolders;
use winapi::um::shlobj;
use winapi::um::shtypes;
use winapi::um::winbase;
use winapi::um::winnt;
fn known_folder(folder_id: shtypes::REFKNOWNFOLDERID) -> Option<PathBuf> {
// SAFETY: winapi calls
unsafe {
let mut path_ptr: winnt::PWSTR = std::ptr::null_mut();
let result = shlobj::SHGetKnownFolderPath(
folder_id,
0,
std::ptr::null_mut(),
&mut path_ptr,
);
if result == winerror::S_OK {
let len = winbase::lstrlenW(path_ptr) as usize;
let path = std::slice::from_raw_parts(path_ptr, len);
let ostr: OsString = OsStringExt::from_wide(path);
combaseapi::CoTaskMemFree(path_ptr as *mut winapi::ctypes::c_void);
Some(PathBuf::from(ostr))
} else {
None
}
}
}
pub fn cache_dir() -> Option<PathBuf> {
known_folder(&knownfolders::FOLDERID_LocalAppData)
}
pub fn home_dir() -> Option<PathBuf> {
if let Some(userprofile) = std::env::var_os("USERPROFILE") {
if !userprofile.is_empty() {
return Some(PathBuf::from(userprofile));
}
}
known_folder(&knownfolders::FOLDERID_Profile)
}
}

View file

@ -1,31 +1,32 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use super::CACHE_PERM;
use crate::util::fs::atomic_write_file_with_retries;
use deno_cache_dir::url_to_filename;
use deno_core::url::Host;
use deno_core::url::Url;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::fs;
use std::path::Component; use std::path::Component;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::path::Prefix; use std::path::Prefix;
use std::str; use std::str;
use deno_cache_dir::url_to_filename;
use deno_cache_dir::CACHE_PERM;
use deno_core::url::Host;
use deno_core::url::Url;
use deno_path_util::fs::atomic_write_file_with_retries;
use sys_traits::FsRead;
use crate::sys::CliSys;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct DiskCache { pub struct DiskCache {
sys: CliSys,
pub location: PathBuf, pub location: PathBuf,
} }
impl DiskCache { impl DiskCache {
/// `location` must be an absolute path. /// `location` must be an absolute path.
pub fn new(location: &Path) -> Self { pub fn new(sys: CliSys, location: PathBuf) -> Self {
assert!(location.is_absolute()); assert!(location.is_absolute());
Self { Self { sys, location }
location: location.to_owned(),
}
} }
fn get_cache_filename(&self, url: &Url) -> Option<PathBuf> { fn get_cache_filename(&self, url: &Url) -> Option<PathBuf> {
@ -115,25 +116,29 @@ impl DiskCache {
pub fn get(&self, filename: &Path) -> std::io::Result<Vec<u8>> { pub fn get(&self, filename: &Path) -> std::io::Result<Vec<u8>> {
let path = self.location.join(filename); let path = self.location.join(filename);
fs::read(path) Ok(self.sys.fs_read(path)?.into_owned())
} }
pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> { pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
let path = self.location.join(filename); let path = self.location.join(filename);
atomic_write_file_with_retries(&path, data, CACHE_PERM) atomic_write_file_with_retries(&self.sys, &path, data, CACHE_PERM)
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; // ok, testing
#[allow(clippy::disallowed_types)]
use sys_traits::impls::RealSys;
use test_util::TempDir; use test_util::TempDir;
use super::*;
#[test] #[test]
fn test_set_get_cache_file() { fn test_set_get_cache_file() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let sub_dir = temp_dir.path().join("sub_dir"); let sub_dir = temp_dir.path().join("sub_dir");
let cache = DiskCache::new(&sub_dir.to_path_buf()); let cache = DiskCache::new(RealSys, sub_dir.to_path_buf());
let path = PathBuf::from("foo/bar.txt"); let path = PathBuf::from("foo/bar.txt");
cache.set(&path, b"hello").unwrap(); cache.set(&path, b"hello").unwrap();
assert_eq!(cache.get(&path).unwrap(), b"hello"); assert_eq!(cache.get(&path).unwrap(), b"hello");
@ -147,7 +152,7 @@ mod tests {
PathBuf::from("/deno_dir/") PathBuf::from("/deno_dir/")
}; };
let cache = DiskCache::new(&cache_location); let cache = DiskCache::new(RealSys, cache_location);
let mut test_cases = vec![ let mut test_cases = vec![
( (
@ -203,7 +208,7 @@ mod tests {
} else { } else {
"/foo" "/foo"
}; };
let cache = DiskCache::new(&PathBuf::from(p)); let cache = DiskCache::new(RealSys, PathBuf::from(p));
let mut test_cases = vec![ let mut test_cases = vec![
( (
@ -251,7 +256,7 @@ mod tests {
PathBuf::from("/deno_dir/") PathBuf::from("/deno_dir/")
}; };
let cache = DiskCache::new(&cache_location); let cache = DiskCache::new(RealSys, cache_location);
let mut test_cases = vec!["unknown://localhost/test.ts"]; let mut test_cases = vec!["unknown://localhost/test.ts"];

11
cli/cache/emit.rs vendored
View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf; use std::path::PathBuf;
@ -6,6 +6,7 @@ use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::unsync::sync::AtomicFlag; use deno_core::unsync::sync::AtomicFlag;
use deno_lib::version::DENO_VERSION_INFO;
use super::DiskCache; use super::DiskCache;
@ -23,7 +24,7 @@ impl EmitCache {
disk_cache, disk_cache,
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer { file_serializer: EmitFileSerializer {
cli_version: crate::version::DENO_VERSION_INFO.deno, cli_version: DENO_VERSION_INFO.deno,
}, },
} }
} }
@ -147,7 +148,7 @@ impl EmitFileSerializer {
// it's ok to use an insecure hash here because // it's ok to use an insecure hash here because
// if someone can change the emit source then they // if someone can change the emit source then they
// can also change the version hash // can also change the version hash
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
.write(bytes) .write(bytes)
// emit should not be re-used between cli versions // emit should not be re-used between cli versions
.write_str(self.cli_version) .write_str(self.cli_version)
@ -160,11 +161,13 @@ mod test {
use test_util::TempDir; use test_util::TempDir;
use super::*; use super::*;
use crate::sys::CliSys;
#[test] #[test]
pub fn emit_cache_general_use() { pub fn emit_cache_general_use() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let disk_cache = DiskCache::new(temp_dir.path().as_path()); let disk_cache =
DiskCache::new(CliSys::default(), temp_dir.path().to_path_buf());
let cache = EmitCache { let cache = EmitCache {
disk_cache: disk_cache.clone(), disk_cache: disk_cache.clone(),
file_serializer: EmitFileSerializer { file_serializer: EmitFileSerializer {

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_graph::FastCheckCacheItem; use deno_graph::FastCheckCacheItem;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
@ -34,12 +34,16 @@ pub static INCREMENTAL_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
pub struct IncrementalCache(IncrementalCacheInner); pub struct IncrementalCache(IncrementalCacheInner);
impl IncrementalCache { impl IncrementalCache {
pub fn new<TState: std::hash::Hash>( pub fn new(
db: CacheDB, db: CacheDB,
state: &TState, state_hash: CacheDBHash,
initial_file_paths: &[PathBuf], initial_file_paths: &[PathBuf],
) -> Self { ) -> Self {
IncrementalCache(IncrementalCacheInner::new(db, state, initial_file_paths)) IncrementalCache(IncrementalCacheInner::new(
db,
state_hash,
initial_file_paths,
))
} }
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool { pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
@ -67,12 +71,11 @@ struct IncrementalCacheInner {
} }
impl IncrementalCacheInner { impl IncrementalCacheInner {
pub fn new<TState: std::hash::Hash>( pub fn new(
db: CacheDB, db: CacheDB,
state: &TState, state_hash: CacheDBHash,
initial_file_paths: &[PathBuf], initial_file_paths: &[PathBuf],
) -> Self { ) -> Self {
let state_hash = CacheDBHash::from_source(state);
let sql_cache = SqlIncrementalCache::new(db, state_hash); let sql_cache = SqlIncrementalCache::new(db, state_hash);
Self::from_sql_incremental_cache(sql_cache, initial_file_paths) Self::from_sql_incremental_cache(sql_cache, initial_file_paths)
} }
@ -112,13 +115,13 @@ impl IncrementalCacheInner {
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool { pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
match self.previous_hashes.get(file_path) { match self.previous_hashes.get(file_path) {
Some(hash) => *hash == CacheDBHash::from_source(file_text), Some(hash) => *hash == CacheDBHash::from_hashable(file_text),
None => false, None => false,
} }
} }
pub fn update_file(&self, file_path: &Path, file_text: &str) { pub fn update_file(&self, file_path: &Path, file_text: &str) {
let hash = CacheDBHash::from_source(file_text); let hash = CacheDBHash::from_hashable(file_text);
if let Some(previous_hash) = self.previous_hashes.get(file_path) { if let Some(previous_hash) = self.previous_hashes.get(file_path) {
if *previous_hash == hash { if *previous_hash == hash {
return; // do not bother updating the db file because nothing has changed return; // do not bother updating the db file because nothing has changed
@ -262,7 +265,7 @@ mod test {
let sql_cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1)); let sql_cache = SqlIncrementalCache::new(conn, CacheDBHash::new(1));
let file_path = PathBuf::from("/mod.ts"); let file_path = PathBuf::from("/mod.ts");
let file_text = "test"; let file_text = "test";
let file_hash = CacheDBHash::from_source(file_text); let file_hash = CacheDBHash::from_hashable(file_text);
sql_cache.set_source_hash(&file_path, file_hash).unwrap(); sql_cache.set_source_hash(&file_path, file_hash).unwrap();
let cache = IncrementalCacheInner::from_sql_incremental_cache( let cache = IncrementalCacheInner::from_sql_incremental_cache(
sql_cache, sql_cache,

191
cli/cache/mod.rs vendored
View file

@ -1,19 +1,13 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use crate::args::jsr_url; use std::collections::HashMap;
use crate::file_fetcher::CliFetchNoFollowErrorKind; use std::path::PathBuf;
use crate::file_fetcher::CliFileFetcher; use std::sync::Arc;
use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind; use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
use deno_cache_dir::file_fetcher::FileOrRedirect; use deno_cache_dir::file_fetcher::FileOrRedirect;
use deno_core::error::AnyError;
use deno_core::futures; use deno_core::futures;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
@ -21,21 +15,21 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse; use deno_graph::source::LoadResponse;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_runtime::deno_fs; use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker; use node_resolver::InNpmPackageChecker;
use std::borrow::Cow;
use std::collections::HashMap; use crate::args::jsr_url;
use std::path::Path; use crate::file_fetcher::CliFetchNoFollowErrorKind;
use std::path::PathBuf; use crate::file_fetcher::CliFileFetcher;
use std::sync::Arc; use crate::file_fetcher::FetchNoFollowOptions;
use std::time::SystemTime; use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::sys::CliSys;
mod cache_db; mod cache_db;
mod caches; mod caches;
mod check; mod check;
mod code_cache; mod code_cache;
mod common;
mod deno_dir; mod deno_dir;
mod disk_cache; mod disk_cache;
mod emit; mod emit;
@ -49,8 +43,8 @@ pub use cache_db::CacheDBHash;
pub use caches::Caches; pub use caches::Caches;
pub use check::TypeCheckCache; pub use check::TypeCheckCache;
pub use code_cache::CodeCache; pub use code_cache::CodeCache;
pub use common::FastInsecureHasher; /// Permissions used to save a file in the disk caches.
pub use deno_dir::dirs::home_dir; pub use deno_cache_dir::CACHE_PERM;
pub use deno_dir::DenoDir; pub use deno_dir::DenoDir;
pub use deno_dir::DenoDirProvider; pub use deno_dir::DenoDirProvider;
pub use disk_cache::DiskCache; pub use disk_cache::DiskCache;
@ -62,123 +56,10 @@ pub use node::NodeAnalysisCache;
pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::LazyGraphSourceParser;
pub use parsed_source::ParsedSourceCache; pub use parsed_source::ParsedSourceCache;
/// Permissions used to save a file in the disk caches. pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<CliSys>;
pub const CACHE_PERM: u32 = 0o644; pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache<CliSys>;
#[derive(Debug, Clone)]
pub struct RealDenoCacheEnv;
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
std::fs::read(path).map(Cow::Owned)
}
fn atomic_write_file(
&self,
path: &Path,
bytes: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
}
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
crate::util::fs::canonicalize_path(path)
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(path)
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
match std::fs::metadata(path) {
Ok(metadata) => Ok(Some(
metadata.modified().unwrap_or_else(|_| SystemTime::now()),
)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
}
fn is_file(&self, path: &Path) -> bool {
path.is_file()
}
fn time_now(&self) -> SystemTime {
SystemTime::now()
}
}
#[derive(Debug, Clone)]
pub struct DenoCacheEnvFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
self
.0
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn atomic_write_file(
&self,
path: &Path,
bytes: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.0,
write_mode: CACHE_PERM,
},
path,
bytes,
)
}
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
}
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
self
.0
.mkdir_sync(path, true, None)
.map_err(|e| e.into_io_error())
}
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
self
.0
.stat_sync(path)
.map(|stat| {
stat
.mtime
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
})
.map_err(|e| e.into_io_error())
}
fn is_file(&self, path: &Path) -> bool {
self.0.is_file_sync(path)
}
fn time_now(&self) -> SystemTime {
SystemTime::now()
}
}
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
pub type LocalLspHttpCache =
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
pub use deno_cache_dir::HttpCache; pub use deno_cache_dir::HttpCache;
use deno_error::JsErrorBox;
pub struct FetchCacherOptions { pub struct FetchCacherOptions {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
@ -192,11 +73,11 @@ pub struct FetchCacherOptions {
pub struct FetchCacher { pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, in_npm_pkg_checker: DenoInNpmPackageChecker,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, permissions: PermissionsContainer,
sys: CliSys,
is_deno_publish: bool, is_deno_publish: bool,
cache_info_enabled: bool, cache_info_enabled: bool,
} }
@ -204,18 +85,18 @@ pub struct FetchCacher {
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, in_npm_pkg_checker: DenoInNpmPackageChecker,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
sys: CliSys,
options: FetchCacherOptions, options: FetchCacherOptions,
) -> Self { ) -> Self {
Self { Self {
file_fetcher, file_fetcher,
fs,
global_http_cache, global_http_cache,
in_npm_pkg_checker, in_npm_pkg_checker,
module_info_cache, module_info_cache,
sys,
file_header_overrides: options.file_header_overrides, file_header_overrides: options.file_header_overrides,
permissions: options.permissions, permissions: options.permissions,
is_deno_publish: options.is_deno_publish, is_deno_publish: options.is_deno_publish,
@ -238,11 +119,7 @@ impl FetchCacher {
} else if specifier.scheme() == "file" { } else if specifier.scheme() == "file" {
specifier.to_file_path().ok() specifier.to_file_path().ok()
} else { } else {
#[allow(deprecated)] self.global_http_cache.local_path_for_url(specifier).ok()
self
.global_http_cache
.get_global_cache_filepath(specifier)
.ok()
} }
} }
} }
@ -277,9 +154,8 @@ impl Loader for FetchCacher {
// symlinked to `/my-project-2/node_modules`), so first we checked if the path // symlinked to `/my-project-2/node_modules`), so first we checked if the path
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare // is in a node_modules dir to avoid needlessly canonicalizing, then now compare
// against the canonicalized specifier. // against the canonicalized specifier.
let specifier = crate::node::resolve_specifier_into_node_modules( let specifier = node_resolver::resolve_specifier_into_node_modules(
specifier, &self.sys, specifier,
self.fs.as_ref(),
); );
if self.in_npm_pkg_checker.in_npm_package(&specifier) { if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Box::pin(futures::future::ready(Ok(Some( return Box::pin(futures::future::ready(Ok(Some(
@ -312,9 +188,9 @@ impl Loader for FetchCacher {
LoaderCacheSetting::Use => None, LoaderCacheSetting::Use => None,
LoaderCacheSetting::Reload => { LoaderCacheSetting::Reload => {
if matches!(file_fetcher.cache_setting(), CacheSetting::Only) { if matches!(file_fetcher.cache_setting(), CacheSetting::Only) {
return Err(deno_core::anyhow::anyhow!( return Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic(
"Could not resolve version constraint using only cached data. Try running again without --cached-only" "Could not resolve version constraint using only cached data. Try running again without --cached-only"
)); ))));
} }
Some(CacheSetting::ReloadAll) Some(CacheSetting::ReloadAll)
} }
@ -380,28 +256,27 @@ impl Loader for FetchCacher {
FetchNoFollowErrorKind::CacheSave { .. } | FetchNoFollowErrorKind::CacheSave { .. } |
FetchNoFollowErrorKind::UnsupportedScheme { .. } | FetchNoFollowErrorKind::UnsupportedScheme { .. } |
FetchNoFollowErrorKind::RedirectHeaderParse { .. } | FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)), FetchNoFollowErrorKind::InvalidHeader { .. } => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err)))),
FetchNoFollowErrorKind::NotCached { .. } => { FetchNoFollowErrorKind::NotCached { .. } => {
if options.cache_setting == LoaderCacheSetting::Only { if options.cache_setting == LoaderCacheSetting::Only {
Ok(None) Ok(None)
} else { } else {
Err(AnyError::from(err)) Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err))))
} }
}, },
FetchNoFollowErrorKind::ChecksumIntegrity(err) => { FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it // convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph // enhances it if this is passed to deno_graph
Err( Err(
deno_graph::source::ChecksumIntegrityError { deno_graph::source::LoadError::ChecksumIntegrity(deno_graph::source::ChecksumIntegrityError {
actual: err.actual, actual: err.actual,
expected: err.expected, expected: err.expected,
} }),
.into(),
) )
} }
} }
}, },
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)), CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(permission_check_error)))),
} }
}) })
} }
@ -416,7 +291,7 @@ impl Loader for FetchCacher {
module_info: &deno_graph::ModuleInfo, module_info: &deno_graph::ModuleInfo,
) { ) {
log::debug!("Caching module info for {}", specifier); log::debug!("Caching module info for {}", specifier);
let source_hash = CacheDBHash::from_source(source); let source_hash = CacheDBHash::from_hashable(source);
let result = self.module_info_cache.set_module_info( let result = self.module_info_cache.set_module_info(
specifier, specifier,
media_type, media_type,

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc; use std::sync::Arc;
@ -194,7 +194,7 @@ impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache // attempt to load from the cache
let source_hash = CacheDBHash::from_source(source); let source_hash = CacheDBHash::from_hashable(source);
if let Some(info) = if let Some(info) =
self.load_cached_module_info(specifier, media_type, source_hash) self.load_cached_module_info(specifier, media_type, source_hash)
{ {
@ -228,7 +228,7 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
media_type: MediaType, media_type: MediaType,
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache // attempt to load from the cache
let source_hash = CacheDBHash::from_source(&source); let source_hash = CacheDBHash::from_hashable(&source);
if let Some(info) = if let Some(info) =
self.load_cached_module_info(specifier, media_type, source_hash) self.load_cached_module_info(specifier, media_type, source_hash)
{ {

5
cli/cache/node.rs vendored
View file

@ -1,15 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_runtime::deno_webstorage::rusqlite::params; use deno_runtime::deno_webstorage::rusqlite::params;
use crate::node::CliCjsAnalysis;
use super::cache_db::CacheDB; use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheFailure; use super::cache_db::CacheFailure;
use super::CacheDBHash; use super::CacheDBHash;
use crate::node::CliCjsAnalysis;
pub static NODE_ANALYSIS_CACHE_DB: CacheDBConfiguration = pub static NODE_ANALYSIS_CACHE_DB: CacheDBConfiguration =
CacheDBConfiguration { CacheDBConfiguration {

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
/// <https://chromedevtools.github.io/devtools-protocol/tot/> /// <https://chromedevtools.github.io/devtools-protocol/tot/>
use deno_core::serde_json::Value; use deno_core::serde_json::Value;

View file

@ -4,6 +4,7 @@ disallowed-methods = [
] ]
disallowed-types = [ disallowed-types = [
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
{ path = "sys_traits::impls::RealSys", reason = "use crate::sys::CliSys instead" },
] ]
ignore-interior-mutability = [ ignore-interior-mutability = [
"lsp_types::Uri", "lsp_types::Uri",

View file

@ -1,9 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use crate::cache::EmitCache; use std::sync::Arc;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::resolver::CjsTracker;
use deno_ast::EmittedSourceText; use deno_ast::EmittedSourceText;
use deno_ast::ModuleKind; use deno_ast::ModuleKind;
@ -14,46 +11,43 @@ use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions; use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult; use deno_ast::TranspileResult;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::Module; use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use std::sync::Arc; use deno_lib::util::hash::FastInsecureHasher;
use crate::args::deno_json::TranspileAndEmitOptions;
use crate::args::deno_json::TsConfigResolver;
use crate::cache::EmitCache;
use crate::cache::ParsedSourceCache;
use crate::resolver::CliCjsTracker;
#[derive(Debug)] #[derive(Debug)]
pub struct Emitter { pub struct Emitter {
cjs_tracker: Arc<CjsTracker>, cjs_tracker: Arc<CliCjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options: tsconfig_resolver: Arc<TsConfigResolver>,
Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>,
// cached hash of the transpile and emit options
transpile_and_emit_options_hash: u64,
} }
impl Emitter { impl Emitter {
pub fn new( pub fn new(
cjs_tracker: Arc<CjsTracker>, cjs_tracker: Arc<CliCjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions, tsconfig_resolver: Arc<TsConfigResolver>,
emit_options: deno_ast::EmitOptions,
) -> Self { ) -> Self {
let transpile_and_emit_options_hash = {
let mut hasher = FastInsecureHasher::new_without_deno_version();
hasher.write_hashable(&transpile_options);
hasher.write_hashable(&emit_options);
hasher.finish()
};
Self { Self {
cjs_tracker, cjs_tracker,
emit_cache, emit_cache,
parsed_source_cache, parsed_source_cache,
transpile_and_emit_options: Arc::new((transpile_options, emit_options)), tsconfig_resolver,
transpile_and_emit_options_hash,
} }
} }
@ -100,38 +94,49 @@ impl Emitter {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind, module_kind: deno_ast::ModuleKind,
source: &str, source: &str,
) -> Option<String> { ) -> Result<Option<String>, AnyError> {
let source_hash = self.get_source_hash(module_kind, source); let transpile_and_emit_options = self
self.emit_cache.get_emit_code(specifier, source_hash) .tsconfig_resolver
.transpile_and_emit_options(specifier)?;
let source_hash =
self.get_source_hash(module_kind, transpile_and_emit_options, source);
Ok(self.emit_cache.get_emit_code(specifier, source_hash))
} }
pub async fn emit_parsed_source( pub async fn emit_parsed_source(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind, module_kind: ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<String, AnyError> { ) -> Result<String, EmitParsedSourceHelperError> {
let transpile_and_emit_options = self
.tsconfig_resolver
.transpile_and_emit_options(specifier)?;
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, module_kind, source) { match helper.pre_emit_parsed_source(
specifier,
module_kind,
transpile_and_emit_options,
source,
) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
let transpile_and_emit_options = let transpile_and_emit_options = transpile_and_emit_options.clone();
self.transpile_and_emit_options.clone();
let transpiled_source = deno_core::unsync::spawn_blocking({ let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source = source.clone(); let source = source.clone();
move || -> Result<_, AnyError> { move || {
EmitParsedSourceHelper::transpile( EmitParsedSourceHelper::transpile(
&parsed_source_cache, &parsed_source_cache,
&specifier, &specifier,
media_type, media_type,
module_kind, module_kind,
source.clone(), source.clone(),
&transpile_and_emit_options.0, &transpile_and_emit_options.transpile,
&transpile_and_emit_options.1, &transpile_and_emit_options.emit,
) )
.map(|r| r.text) .map(|r| r.text)
} }
@ -154,10 +159,18 @@ impl Emitter {
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind, module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<String, AnyError> { ) -> Result<String, EmitParsedSourceHelperError> {
let transpile_and_emit_options = self
.tsconfig_resolver
.transpile_and_emit_options(specifier)?;
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, module_kind, source) { match helper.pre_emit_parsed_source(
specifier,
module_kind,
transpile_and_emit_options,
source,
) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpiled_source = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
@ -166,8 +179,8 @@ impl Emitter {
media_type, media_type,
module_kind, module_kind,
source.clone(), source.clone(),
&self.transpile_and_emit_options.0, &transpile_and_emit_options.transpile,
&self.transpile_and_emit_options.1, &transpile_and_emit_options.emit,
)? )?
.text; .text;
helper.post_emit_parsed_source( helper.post_emit_parsed_source(
@ -187,7 +200,10 @@ impl Emitter {
module_kind: deno_ast::ModuleKind, module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<(String, String), AnyError> { ) -> Result<(String, String), AnyError> {
let mut emit_options = self.transpile_and_emit_options.1.clone(); let transpile_and_emit_options = self
.tsconfig_resolver
.transpile_and_emit_options(specifier)?;
let mut emit_options = transpile_and_emit_options.emit.clone();
emit_options.inline_sources = false; emit_options.inline_sources = false;
emit_options.source_map = SourceMapOption::Separate; emit_options.source_map = SourceMapOption::Separate;
// strip off the path to have more deterministic builds as we don't care // strip off the path to have more deterministic builds as we don't care
@ -199,7 +215,7 @@ impl Emitter {
media_type, media_type,
module_kind, module_kind,
source.clone(), source.clone(),
&self.transpile_and_emit_options.0, &transpile_and_emit_options.transpile,
&emit_options, &emit_options,
)?; )?;
Ok((source.text, source.source_map.unwrap())) Ok((source.text, source.source_map.unwrap()))
@ -209,7 +225,7 @@ impl Emitter {
pub async fn load_and_emit_for_hmr( pub async fn load_and_emit_for_hmr(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<String, AnyError> { ) -> Result<String, CoreError> {
let media_type = MediaType::from_specifier(specifier); let media_type = MediaType::from_specifier(specifier);
let source_code = tokio::fs::read_to_string( let source_code = tokio::fs::read_to_string(
ModuleSpecifier::to_file_path(specifier).unwrap(), ModuleSpecifier::to_file_path(specifier).unwrap(),
@ -224,25 +240,34 @@ impl Emitter {
let source_arc: Arc<str> = source_code.into(); let source_arc: Arc<str> = source_code.into();
let parsed_source = self let parsed_source = self
.parsed_source_cache .parsed_source_cache
.remove_or_parse_module(specifier, source_arc, media_type)?; .remove_or_parse_module(specifier, source_arc, media_type)
.map_err(JsErrorBox::from_err)?;
// HMR doesn't work with embedded source maps for some reason, so set // HMR doesn't work with embedded source maps for some reason, so set
// the option to not use them (though you should test this out because // the option to not use them (though you should test this out because
// this statement is probably wrong) // this statement is probably wrong)
let mut options = self.transpile_and_emit_options.1.clone(); let transpile_and_emit_options = self
.tsconfig_resolver
.transpile_and_emit_options(specifier)
.map_err(JsErrorBox::from_err)?;
let mut options = transpile_and_emit_options.emit.clone();
options.source_map = SourceMapOption::None; options.source_map = SourceMapOption::None;
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( let is_cjs = self
specifier, .cjs_tracker
media_type, .is_cjs_with_known_is_script(
parsed_source.compute_is_script(), specifier,
)?; media_type,
parsed_source.compute_is_script(),
)
.map_err(JsErrorBox::from_err)?;
let transpiled_source = parsed_source let transpiled_source = parsed_source
.transpile( .transpile(
&self.transpile_and_emit_options.0, &transpile_and_emit_options.transpile,
&deno_ast::TranspileModuleOptions { &deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)), module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
}, },
&options, &options,
)? )
.map_err(JsErrorBox::from_err)?
.into_source(); .into_source();
Ok(transpiled_source.text) Ok(transpiled_source.text)
} }
@ -267,10 +292,15 @@ impl Emitter {
/// A hashing function that takes the source code and uses the global emit /// A hashing function that takes the source code and uses the global emit
/// options then generates a string hash which can be stored to /// options then generates a string hash which can be stored to
/// determine if the cached emit is valid or not. /// determine if the cached emit is valid or not.
fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 { fn get_source_hash(
&self,
module_kind: ModuleKind,
transpile_and_emit: &TranspileAndEmitOptions,
source_text: &str,
) -> u64 {
FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash
.write_str(source_text) .write_str(source_text)
.write_u64(self.transpile_and_emit_options_hash) .write_u64(transpile_and_emit.pre_computed_hash)
.write_hashable(module_kind) .write_hashable(module_kind)
.finish() .finish()
} }
@ -281,6 +311,24 @@ enum PreEmitResult {
NotCached { source_hash: u64 }, NotCached { source_hash: u64 },
} }
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum EmitParsedSourceHelperError {
#[class(inherit)]
#[error(transparent)]
CompilerOptionsParse(
#[from] deno_config::deno_json::CompilerOptionsParseError,
),
#[class(inherit)]
#[error(transparent)]
ParseDiagnostic(#[from] deno_ast::ParseDiagnostic),
#[class(inherit)]
#[error(transparent)]
Transpile(#[from] deno_ast::TranspileError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
/// Helper to share code between async and sync emit_parsed_source methods. /// Helper to share code between async and sync emit_parsed_source methods.
struct EmitParsedSourceHelper<'a>(&'a Emitter); struct EmitParsedSourceHelper<'a>(&'a Emitter);
@ -289,9 +337,13 @@ impl<'a> EmitParsedSourceHelper<'a> {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind, module_kind: deno_ast::ModuleKind,
transpile_and_emit_options: &TranspileAndEmitOptions,
source: &Arc<str>, source: &Arc<str>,
) -> PreEmitResult { ) -> PreEmitResult {
let source_hash = self.0.get_source_hash(module_kind, source); let source_hash =
self
.0
.get_source_hash(module_kind, transpile_and_emit_options, source);
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
@ -310,7 +362,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
source: Arc<str>, source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<EmittedSourceText, AnyError> { ) -> Result<EmittedSourceText, EmitParsedSourceHelperError> {
// nothing else needs the parsed source at this point, so remove from // nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned // the cache in order to not transpile owned
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
@ -350,7 +402,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
// todo(dsherret): this is a temporary measure until we have swc erroring for this // todo(dsherret): this is a temporary measure until we have swc erroring for this
fn ensure_no_import_assertion( fn ensure_no_import_assertion(
parsed_source: &deno_ast::ParsedSource, parsed_source: &deno_ast::ParsedSource,
) -> Result<(), AnyError> { ) -> Result<(), JsErrorBox> {
fn has_import_assertion(text: &str) -> bool { fn has_import_assertion(text: &str) -> bool {
// good enough // good enough
text.contains(" assert ") && !text.contains(" with ") text.contains(" assert ") && !text.contains(" with ")
@ -359,7 +411,7 @@ fn ensure_no_import_assertion(
fn create_err( fn create_err(
parsed_source: &deno_ast::ParsedSource, parsed_source: &deno_ast::ParsedSource,
range: SourceRange, range: SourceRange,
) -> AnyError { ) -> JsErrorBox {
let text_info = parsed_source.text_info_lazy(); let text_info = parsed_source.text_info_lazy();
let loc = text_info.line_and_column_display(range.start); let loc = text_info.line_and_column_display(range.start);
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string(); let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
@ -372,7 +424,7 @@ fn ensure_no_import_assertion(
loc.line_number, loc.line_number,
loc.column_number, loc.column_number,
)); ));
deno_core::anyhow::anyhow!("{}", msg) JsErrorBox::generic(msg)
} }
let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else { let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {

View file

@ -1,119 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
//! There are many types of errors in Deno:
//! - AnyError: a generic wrapper that can encapsulate any type of error.
//! - JsError: a container for the error message and stack trace for exceptions
//! thrown in JavaScript code. We use this to pretty-print stack traces.
//! - Diagnostic: these are errors that originate in TypeScript's compiler.
//! They're similar to JsError, in that they have line numbers. But
//! Diagnostics are compile-time type errors, whereas JsErrors are runtime
//! exceptions.
use deno_ast::ParseDiagnostic;
use deno_core::error::AnyError;
use deno_graph::source::ResolveError;
use deno_graph::ModuleError;
use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use import_map::ImportMapError;
fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
"URIError"
}
fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
"SyntaxError"
}
fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError;
match err {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
get_resolution_error_class(err)
}
ModuleGraphError::ModuleError(err) => match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
"NotFound"
}
ModuleError::LoadingErr(_, _, err) => match err {
ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()),
ModuleLoadError::HttpsChecksumIntegrity(_)
| ModuleLoadError::TooManyRedirects => "Error",
ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound",
ModuleLoadError::Decode(_) => "TypeError",
ModuleLoadError::Npm(err) => match err {
NpmLoadError::NotSupportedEnvironment
| NpmLoadError::PackageReqResolution(_)
| NpmLoadError::RegistryInfo(_) => "Error",
NpmLoadError::PackageReqReferenceParse(_) => "TypeError",
},
ModuleLoadError::Jsr(err) => match err {
JsrLoadError::UnsupportedManifestChecksum
| JsrLoadError::PackageFormat(_) => "TypeError",
JsrLoadError::ContentLoadExternalSpecifier
| JsrLoadError::ContentLoad(_)
| JsrLoadError::ContentChecksumIntegrity(_)
| JsrLoadError::PackageManifestLoad(_, _)
| JsrLoadError::PackageVersionManifestChecksumIntegrity(..)
| JsrLoadError::PackageVersionManifestLoad(_, _)
| JsrLoadError::RedirectInPackage(_) => "Error",
JsrLoadError::PackageNotFound(_)
| JsrLoadError::PackageReqNotFound(_)
| JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound",
},
},
},
}
}
fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
match err {
ResolutionError::ResolverError { error, .. } => {
use ResolveError::*;
match error.as_ref() {
Specifier(_) => "TypeError",
Other(e) => get_error_class_name(e),
}
}
_ => "TypeError",
}
}
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e)
.or_else(|| {
e.downcast_ref::<ImportMapError>()
.map(get_import_map_error_class)
})
.or_else(|| {
e.downcast_ref::<ParseDiagnostic>()
.map(get_diagnostic_class)
})
.or_else(|| {
e.downcast_ref::<ModuleGraphError>()
.map(get_module_graph_error_class)
})
.or_else(|| {
e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class)
})
.or_else(|| {
e.downcast_ref::<std::num::TryFromIntError>()
.map(get_try_from_int_error_class)
})
.unwrap_or("Error")
}

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use crate::cache::HttpCache; use std::borrow::Cow;
use crate::cache::RealDenoCacheEnv; use std::collections::HashMap;
use crate::colors; use std::sync::Arc;
use crate::http_util::get_response_body_with_progress;
use crate::http_util::HttpClientProvider;
use crate::util::progress_bar::ProgressBar;
use boxed_error::Boxed; use boxed_error::Boxed;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -27,7 +24,6 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_error::JsError; use deno_error::JsError;
use deno_graph::source::LoaderChecksum; use deno_graph::source::LoaderChecksum;
use deno_runtime::deno_permissions::CheckSpecifierKind; use deno_runtime::deno_permissions::CheckSpecifierKind;
use deno_runtime::deno_permissions::PermissionCheckError; use deno_runtime::deno_permissions::PermissionCheckError;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
@ -35,12 +31,15 @@ use deno_runtime::deno_web::BlobStore;
use http::header; use http::header;
use http::HeaderMap; use http::HeaderMap;
use http::StatusCode; use http::StatusCode;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::sync::Arc;
use thiserror::Error; use thiserror::Error;
use crate::cache::HttpCache;
use crate::colors;
use crate::http_util::get_response_body_with_progress;
use crate::http_util::HttpClientProvider;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct TextDecodedFile { pub struct TextDecodedFile {
pub media_type: MediaType, pub media_type: MediaType,
@ -61,11 +60,10 @@ impl TextDecodedFile {
file.maybe_headers.as_ref(), file.maybe_headers.as_ref(),
); );
let specifier = file.url; let specifier = file.url;
match deno_graph::source::decode_source( let charset = maybe_charset.unwrap_or_else(|| {
&specifier, deno_media_type::encoding::detect_charset(&specifier, &file.source)
file.source, });
maybe_charset, match deno_media_type::encoding::decode_arc_source(charset, file.source) {
) {
Ok(source) => Ok(TextDecodedFile { Ok(source) => Ok(TextDecodedFile {
media_type, media_type,
specifier, specifier,
@ -268,7 +266,7 @@ pub struct FetchNoFollowOptions<'a> {
type DenoCacheDirFileFetcher = deno_cache_dir::file_fetcher::FileFetcher< type DenoCacheDirFileFetcher = deno_cache_dir::file_fetcher::FileFetcher<
BlobStoreAdapter, BlobStoreAdapter,
RealDenoCacheEnv, CliSys,
HttpClientAdapter, HttpClientAdapter,
>; >;
@ -280,9 +278,11 @@ pub struct CliFileFetcher {
} }
impl CliFileFetcher { impl CliFileFetcher {
#[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
http_cache: Arc<dyn HttpCache>, http_cache: Arc<dyn HttpCache>,
http_client_provider: Arc<HttpClientProvider>, http_client_provider: Arc<HttpClientProvider>,
sys: CliSys,
blob_store: Arc<BlobStore>, blob_store: Arc<BlobStore>,
progress_bar: Option<ProgressBar>, progress_bar: Option<ProgressBar>,
allow_remote: bool, allow_remote: bool,
@ -290,9 +290,10 @@ impl CliFileFetcher {
download_log_level: log::Level, download_log_level: log::Level,
) -> Self { ) -> Self {
let memory_files = Arc::new(MemoryFiles::default()); let memory_files = Arc::new(MemoryFiles::default());
let auth_tokens = AuthTokens::new_from_sys(&sys);
let file_fetcher = DenoCacheDirFileFetcher::new( let file_fetcher = DenoCacheDirFileFetcher::new(
BlobStoreAdapter(blob_store), BlobStoreAdapter(blob_store),
RealDenoCacheEnv, sys,
http_cache, http_cache,
HttpClientAdapter { HttpClientAdapter {
http_client_provider: http_client_provider.clone(), http_client_provider: http_client_provider.clone(),
@ -303,7 +304,7 @@ impl CliFileFetcher {
FileFetcherOptions { FileFetcherOptions {
allow_remote, allow_remote,
cache_setting, cache_setting,
auth_tokens: AuthTokens::new(env::var("DENO_AUTH_TOKENS").ok()), auth_tokens,
}, },
); );
Self { Self {
@ -496,11 +497,6 @@ fn validate_scheme(specifier: &Url) -> Result<(), UnsupportedSchemeError> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::cache::GlobalHttpCache;
use crate::cache::RealDenoCacheEnv;
use crate::http_util::HttpClientProvider;
use super::*;
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind; use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
use deno_cache_dir::file_fetcher::HttpClient; use deno_cache_dir::file_fetcher::HttpClient;
use deno_core::resolve_url; use deno_core::resolve_url;
@ -508,6 +504,10 @@ mod tests {
use deno_runtime::deno_web::InMemoryBlobPart; use deno_runtime::deno_web::InMemoryBlobPart;
use test_util::TempDir; use test_util::TempDir;
use super::*;
use crate::cache::GlobalHttpCache;
use crate::http_util::HttpClientProvider;
fn setup( fn setup(
cache_setting: CacheSetting, cache_setting: CacheSetting,
maybe_temp_dir: Option<TempDir>, maybe_temp_dir: Option<TempDir>,
@ -538,10 +538,11 @@ mod tests {
let temp_dir = maybe_temp_dir.unwrap_or_default(); let temp_dir = maybe_temp_dir.unwrap_or_default();
let location = temp_dir.path().join("remote").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let blob_store: Arc<BlobStore> = Default::default(); let blob_store: Arc<BlobStore> = Default::default();
let cache = Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)); let cache = Arc::new(GlobalHttpCache::new(CliSys::default(), location));
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
cache.clone(), cache.clone(),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
blob_store.clone(), blob_store.clone(),
None, None,
true, true,
@ -584,7 +585,7 @@ mod tests {
// in deno_graph // in deno_graph
async fn test_fetch_remote_encoded( async fn test_fetch_remote_encoded(
fixture: &str, fixture: &str,
charset: &str, expected_charset: &str,
expected: &str, expected: &str,
) { ) {
let url_str = format!("http://127.0.0.1:4545/encoding/{fixture}"); let url_str = format!("http://127.0.0.1:4545/encoding/{fixture}");
@ -596,15 +597,20 @@ mod tests {
Some(&headers), Some(&headers),
); );
assert_eq!( assert_eq!(
deno_graph::source::decode_source(&specifier, file.source, maybe_charset) deno_media_type::encoding::decode_arc_source(
.unwrap() maybe_charset.unwrap_or_else(|| {
.as_ref(), deno_media_type::encoding::detect_charset(&specifier, &file.source)
}),
file.source
)
.unwrap()
.as_ref(),
expected expected
); );
assert_eq!(media_type, MediaType::TypeScript); assert_eq!(media_type, MediaType::TypeScript);
assert_eq!( assert_eq!(
headers.get("content-type").unwrap(), headers.get("content-type").unwrap(),
&format!("application/typescript;charset={charset}") &format!("application/typescript;charset={expected_charset}")
); );
} }
@ -613,9 +619,12 @@ mod tests {
let specifier = ModuleSpecifier::from_file_path(p).unwrap(); let specifier = ModuleSpecifier::from_file_path(p).unwrap();
let (file, _) = test_fetch(&specifier).await; let (file, _) = test_fetch(&specifier).await;
assert_eq!( assert_eq!(
deno_graph::source::decode_source(&specifier, file.source, None) deno_media_type::encoding::decode_arc_source(
.unwrap() deno_media_type::encoding::detect_charset(&specifier, &file.source),
.as_ref(), file.source
)
.unwrap()
.as_ref(),
expected expected
); );
} }
@ -751,11 +760,9 @@ mod tests {
// invocation and indicates to "cache bust". // invocation and indicates to "cache bust".
let location = temp_dir.path().join("remote").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(CliSys::default(), location)),
location,
crate::cache::RealDenoCacheEnv,
)),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -780,14 +787,13 @@ mod tests {
let specifier = let specifier =
resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4545/subdir/mismatch_ext.ts").unwrap();
let http_cache = Arc::new(GlobalHttpCache::new( let http_cache =
location.clone(), Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone()));
crate::cache::RealDenoCacheEnv,
));
let file_modified_01 = { let file_modified_01 = {
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
http_cache.clone(), http_cache.clone(),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -807,11 +813,9 @@ mod tests {
let file_modified_02 = { let file_modified_02 = {
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(CliSys::default(), location)),
location,
crate::cache::RealDenoCacheEnv,
)),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -937,15 +941,14 @@ mod tests {
resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4548/subdir/mismatch_ext.ts").unwrap();
let redirected_specifier = let redirected_specifier =
resolve_url("http://localhost:4546/subdir/mismatch_ext.ts").unwrap(); resolve_url("http://localhost:4546/subdir/mismatch_ext.ts").unwrap();
let http_cache = Arc::new(GlobalHttpCache::new( let http_cache =
location.clone(), Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone()));
crate::cache::RealDenoCacheEnv,
));
let metadata_file_modified_01 = { let metadata_file_modified_01 = {
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
http_cache.clone(), http_cache.clone(),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -968,6 +971,7 @@ mod tests {
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
http_cache.clone(), http_cache.clone(),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -1072,11 +1076,9 @@ mod tests {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("remote").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher = CliFileFetcher::new( let file_fetcher = CliFileFetcher::new(
Arc::new(GlobalHttpCache::new( Arc::new(GlobalHttpCache::new(CliSys::default(), location)),
location,
crate::cache::RealDenoCacheEnv,
)),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
false, false,
@ -1110,8 +1112,9 @@ mod tests {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
let location = temp_dir.path().join("remote").to_path_buf(); let location = temp_dir.path().join("remote").to_path_buf();
let file_fetcher_01 = CliFileFetcher::new( let file_fetcher_01 = CliFileFetcher::new(
Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone())),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,
@ -1119,8 +1122,9 @@ mod tests {
log::Level::Info, log::Level::Info,
); );
let file_fetcher_02 = CliFileFetcher::new( let file_fetcher_02 = CliFileFetcher::new(
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)), Arc::new(GlobalHttpCache::new(CliSys::default(), location)),
Arc::new(HttpClientProvider::new(None, None)), Arc::new(HttpClientProvider::new(None, None)),
CliSys::default(),
Default::default(), Default::default(),
None, None,
true, true,

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc; use std::sync::Arc;

View file

@ -1,6 +1,49 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::error::Error;
use std::path::PathBuf;
use std::sync::Arc;
use deno_config::deno_json;
use deno_config::deno_json::CompilerOptionTypesDeserializeError;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::JsrPackageConfig;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_graph::source::Loader;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_graph::CheckJsOption;
use deno_graph::FillFromLockfileOptions;
use deno_graph::GraphKind;
use deno_graph::JsrLoadError;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::WorkspaceFastCheckOption;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::SmallStackString;
use crate::args::config_to_deno_graph_workspace_member; use crate::args::config_to_deno_graph_workspace_member;
use crate::args::deno_json::TsConfigResolver;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
@ -12,57 +55,24 @@ use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache; use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::colors; use crate::colors;
use crate::errors::get_error_class_name;
use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::CliFileFetcher;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker; use crate::resolver::CliCjsTracker;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliResolver; use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs; use crate::sys::CliSys;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::CheckError;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionKind;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError;
use deno_graph::WorkspaceFastCheckOption;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use deno_graph::source::Loader;
use deno_graph::source::ResolveError;
use deno_graph::GraphKind;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use std::collections::HashSet;
use std::error::Error;
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
#[derive(Clone)] #[derive(Clone)]
pub struct GraphValidOptions { pub struct GraphValidOptions<'a> {
pub check_js: bool, pub check_js: CheckJsOption<'a>,
pub kind: GraphKind, pub kind: GraphKind,
/// Whether to exit the process for integrity check errors such as /// Whether to exit the process for integrity check errors such as
/// lockfile checksum mismatches and JSR integrity failures. /// lockfile checksum mismatches and JSR integrity failures.
@ -79,17 +89,17 @@ pub struct GraphValidOptions {
/// for the CLI. /// for the CLI.
pub fn graph_valid( pub fn graph_valid(
graph: &ModuleGraph, graph: &ModuleGraph,
fs: &Arc<dyn FileSystem>, sys: &CliSys,
roots: &[ModuleSpecifier], roots: &[ModuleSpecifier],
options: GraphValidOptions, options: GraphValidOptions,
) -> Result<(), AnyError> { ) -> Result<(), JsErrorBox> {
if options.exit_integrity_errors { if options.exit_integrity_errors {
graph_exit_integrity_errors(graph); graph_exit_integrity_errors(graph);
} }
let mut errors = graph_walk_errors( let mut errors = graph_walk_errors(
graph, graph,
fs, sys,
roots, roots,
GraphWalkErrorsOptions { GraphWalkErrorsOptions {
check_js: options.check_js, check_js: options.check_js,
@ -101,9 +111,9 @@ pub fn graph_valid(
} else { } else {
// finally surface the npm resolution result // finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result { if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error( return Err(JsErrorBox::new(
get_error_class_name(err), err.get_class(),
format_deno_graph_error(err.as_ref().deref()), format_deno_graph_error(err),
)); ));
} }
Ok(()) Ok(())
@ -130,8 +140,8 @@ pub fn fill_graph_from_lockfile(
} }
#[derive(Clone)] #[derive(Clone)]
pub struct GraphWalkErrorsOptions { pub struct GraphWalkErrorsOptions<'a> {
pub check_js: bool, pub check_js: CheckJsOption<'a>,
pub kind: GraphKind, pub kind: GraphKind,
} }
@ -139,10 +149,10 @@ pub struct GraphWalkErrorsOptions {
/// and enhances them with CLI information. /// and enhances them with CLI information.
pub fn graph_walk_errors<'a>( pub fn graph_walk_errors<'a>(
graph: &'a ModuleGraph, graph: &'a ModuleGraph,
fs: &'a Arc<dyn FileSystem>, sys: &'a CliSys,
roots: &'a [ModuleSpecifier], roots: &'a [ModuleSpecifier],
options: GraphWalkErrorsOptions, options: GraphWalkErrorsOptions<'a>,
) -> impl Iterator<Item = AnyError> + 'a { ) -> impl Iterator<Item = JsErrorBox> + 'a {
graph graph
.walk( .walk(
roots.iter(), roots.iter(),
@ -162,29 +172,15 @@ pub fn graph_walk_errors<'a>(
roots.contains(error.specifier()) roots.contains(error.specifier())
} }
}; };
let mut message = match &error { let message = enhance_graph_error(
ModuleGraphError::ResolutionError(resolution_error) => { sys,
enhanced_resolution_error_message(resolution_error) &error,
} if is_root {
ModuleGraphError::TypesResolutionError(resolution_error) => { EnhanceGraphErrorMode::HideRange
format!( } else {
"Failed resolving types. {}", EnhanceGraphErrorMode::ShowRange
enhanced_resolution_error_message(resolution_error) },
) );
}
ModuleGraphError::ModuleError(error) => {
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(fs, error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
};
if let Some(range) = error.maybe_range() {
if !is_root && !range.specifier.as_str().contains("/$deno$eval") {
message.push_str("\n at ");
message.push_str(&format_range_with_colors(range));
}
}
if graph.graph_kind() == GraphKind::TypesOnly if graph.graph_kind() == GraphKind::TypesOnly
&& matches!( && matches!(
@ -196,10 +192,61 @@ pub fn graph_walk_errors<'a>(
return None; return None;
} }
Some(custom_error(get_error_class_name(&error.into()), message)) if graph.graph_kind().include_types()
&& (message.contains(RUN_WITH_SLOPPY_IMPORTS_MSG)
|| matches!(
error,
ModuleGraphError::ModuleError(ModuleError::Missing(..))
))
{
// ignore and let typescript surface this as a diagnostic instead
log::debug!("Ignoring: {}", message);
return None;
}
Some(JsErrorBox::new(error.get_class(), message))
}) })
} }
#[derive(Debug, PartialEq, Eq)]
pub enum EnhanceGraphErrorMode {
ShowRange,
HideRange,
}
pub fn enhance_graph_error(
sys: &CliSys,
error: &ModuleGraphError,
mode: EnhanceGraphErrorMode,
) -> String {
let mut message = match &error {
ModuleGraphError::ResolutionError(resolution_error) => {
enhanced_resolution_error_message(resolution_error)
}
ModuleGraphError::TypesResolutionError(resolution_error) => {
format!(
"Failed resolving types. {}",
enhanced_resolution_error_message(resolution_error)
)
}
ModuleGraphError::ModuleError(error) => {
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(sys, error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
};
if let Some(range) = error.maybe_range() {
if mode == EnhanceGraphErrorMode::ShowRange
&& !range.specifier.as_str().contains("/$deno$eval")
{
message.push_str("\n at ");
message.push_str(&format_range_with_colors(range));
}
}
message
}
pub fn graph_exit_integrity_errors(graph: &ModuleGraph) { pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
for error in graph.module_errors() { for error in graph.module_errors() {
exit_for_integrity_error(error); exit_for_integrity_error(error);
@ -224,7 +271,7 @@ pub struct CreateGraphOptions<'a> {
pub struct ModuleGraphCreator { pub struct ModuleGraphCreator {
options: Arc<CliOptions>, options: Arc<CliOptions>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_installer: Option<Arc<NpmInstaller>>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
type_checker: Arc<TypeChecker>, type_checker: Arc<TypeChecker>,
} }
@ -232,13 +279,13 @@ pub struct ModuleGraphCreator {
impl ModuleGraphCreator { impl ModuleGraphCreator {
pub fn new( pub fn new(
options: Arc<CliOptions>, options: Arc<CliOptions>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_installer: Option<Arc<NpmInstaller>>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
type_checker: Arc<TypeChecker>, type_checker: Arc<TypeChecker>,
) -> Self { ) -> Self {
Self { Self {
options, options,
npm_resolver, npm_installer,
module_graph_builder, module_graph_builder,
type_checker, type_checker,
} }
@ -361,9 +408,9 @@ impl ModuleGraphCreator {
.build_graph_with_npm_resolution(&mut graph, options) .build_graph_with_npm_resolution(&mut graph, options)
.await?; .await?;
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_installer) = &self.npm_installer {
if graph.has_node_specifier && self.options.type_check_mode().is_true() { if graph.has_node_specifier && self.options.type_check_mode().is_true() {
npm_resolver.inject_synthetic_types_node_package().await?; npm_installer.inject_synthetic_types_node_package().await?;
} }
} }
@ -397,14 +444,14 @@ impl ModuleGraphCreator {
} }
} }
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> { pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
self.module_graph_builder.graph_valid(graph) self.module_graph_builder.graph_valid(graph)
} }
async fn type_check_graph( async fn type_check_graph(
&self, &self,
graph: ModuleGraph, graph: ModuleGraph,
) -> Result<Arc<ModuleGraph>, AnyError> { ) -> Result<Arc<ModuleGraph>, CheckError> {
self self
.type_checker .type_checker
.check( .check(
@ -412,7 +459,6 @@ impl ModuleGraphCreator {
check::CheckOptions { check::CheckOptions {
build_fast_check_graph: true, build_fast_check_graph: true,
lib: self.options.ts_type_lib_window(), lib: self.options.ts_type_lib_window(),
log_ignored_options: true,
reload: self.options.reload_flag(), reload: self.options.reload_flag(),
type_check_mode: self.options.type_check_mode(), type_check_mode: self.options.type_check_mode(),
}, },
@ -427,56 +473,89 @@ pub struct BuildFastCheckGraphOptions<'a> {
pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>, pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
} }
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum BuildGraphWithNpmResolutionError {
#[class(inherit)]
#[error(transparent)]
CompilerOptionTypesDeserialize(#[from] CompilerOptionTypesDeserializeError),
#[class(inherit)]
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[class(inherit)]
#[error(transparent)]
ToMaybeJsxImportSourceConfig(
#[from] deno_json::ToMaybeJsxImportSourceConfigError,
),
#[class(inherit)]
#[error(transparent)]
NodeModulesDirParse(#[from] deno_json::NodeModulesDirParseError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
#[class(generic)]
#[error("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead")]
UnsupportedNpmSpecifierEntrypointResolutionWay,
}
pub struct ModuleGraphBuilder { pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>, cjs_tracker: Arc<CliCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, in_npm_pkg_checker: DenoInNpmPackageChecker,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>, resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
sys: CliSys,
tsconfig_resolver: Arc<TsConfigResolver>,
} }
impl ModuleGraphBuilder { impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>, cjs_tracker: Arc<CliCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, in_npm_pkg_checker: DenoInNpmPackageChecker,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>, resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
sys: CliSys,
tsconfig_resolver: Arc<TsConfigResolver>,
) -> Self { ) -> Self {
Self { Self {
caches, caches,
cjs_tracker, cjs_tracker,
cli_options, cli_options,
file_fetcher, file_fetcher,
fs,
global_http_cache, global_http_cache,
in_npm_pkg_checker, in_npm_pkg_checker,
lockfile, lockfile,
maybe_file_watcher_reporter, maybe_file_watcher_reporter,
module_info_cache, module_info_cache,
npm_graph_resolver,
npm_installer,
npm_resolver, npm_resolver,
parsed_source_cache, parsed_source_cache,
resolver, resolver,
root_permissions_container, root_permissions_container,
sys,
tsconfig_resolver,
} }
} }
@ -484,7 +563,7 @@ impl ModuleGraphBuilder {
&self, &self,
graph: &mut ModuleGraph, graph: &mut ModuleGraph,
options: CreateGraphOptions<'a>, options: CreateGraphOptions<'a>,
) -> Result<(), AnyError> { ) -> Result<(), BuildGraphWithNpmResolutionError> {
enum MutLoaderRef<'a> { enum MutLoaderRef<'a> {
Borrowed(&'a mut dyn Loader), Borrowed(&'a mut dyn Loader),
Owned(cache::FetchCacher), Owned(cache::FetchCacher),
@ -561,7 +640,16 @@ impl ModuleGraphBuilder {
} }
let maybe_imports = if options.graph_kind.include_types() { let maybe_imports = if options.graph_kind.include_types() {
self.cli_options.to_compiler_option_types()? // Resolve all the imports from every deno.json. We'll separate
// them later based on the folder we're type checking.
let mut imports = Vec::new();
for deno_json in self.cli_options.workspace().deno_jsons() {
let maybe_imports = deno_json.to_compiler_option_types()?;
imports.extend(maybe_imports.into_iter().map(|(referrer, imports)| {
deno_graph::ReferrerImports { referrer, imports }
}));
}
imports
} else { } else {
Vec::new() Vec::new()
}; };
@ -570,10 +658,7 @@ impl ModuleGraphBuilder {
Some(loader) => MutLoaderRef::Borrowed(loader), Some(loader) => MutLoaderRef::Borrowed(loader),
None => MutLoaderRef::Owned(self.create_graph_loader()), None => MutLoaderRef::Owned(self.create_graph_loader()),
}; };
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?; let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver =
cli_resolver.create_graph_npm_resolver(options.npm_caching);
let maybe_file_watcher_reporter = self let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter .maybe_file_watcher_reporter
.as_ref() .as_ref()
@ -592,9 +677,9 @@ impl ModuleGraphBuilder {
is_dynamic: options.is_dynamic, is_dynamic: options.is_dynamic,
passthrough_jsr_specifiers: false, passthrough_jsr_specifiers: false,
executor: Default::default(), executor: Default::default(),
file_system: &DenoGraphFsAdapter(self.fs.as_ref()), file_system: &self.sys,
jsr_url_provider: &CliJsrUrlProvider, jsr_url_provider: &CliJsrUrlProvider,
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(self.npm_graph_resolver.as_ref()),
module_analyzer: &analyzer, module_analyzer: &analyzer,
reporter: maybe_file_watcher_reporter, reporter: maybe_file_watcher_reporter,
resolver: Some(&graph_resolver), resolver: Some(&graph_resolver),
@ -612,22 +697,21 @@ impl ModuleGraphBuilder {
loader: &'a mut dyn deno_graph::source::Loader, loader: &'a mut dyn deno_graph::source::Loader,
options: deno_graph::BuildOptions<'a>, options: deno_graph::BuildOptions<'a>,
npm_caching: NpmCachingStrategy, npm_caching: NpmCachingStrategy,
) -> Result<(), AnyError> { ) -> Result<(), BuildGraphWithNpmResolutionError> {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
if self if self
.cli_options .cli_options
.node_modules_dir()? .node_modules_dir()?
.map(|m| m.uses_node_modules_dir()) .map(|m| m == NodeModulesDirMode::Auto)
.unwrap_or(false) .unwrap_or(false)
{ {
if let Some(npm_resolver) = self.npm_resolver.as_managed() { if let Some(npm_installer) = &self.npm_installer {
let already_done = let already_done = npm_installer
npm_resolver.ensure_top_level_package_json_install().await?; .ensure_top_level_package_json_install()
.await?;
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) { if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
npm_resolver npm_installer.cache_packages(PackageCaching::All).await?;
.cache_packages(crate::npm::PackageCaching::All)
.await?;
} }
} }
} }
@ -646,10 +730,9 @@ impl ModuleGraphBuilder {
let initial_package_deps_len = graph.packages.package_deps_sum(); let initial_package_deps_len = graph.packages.package_deps_sum();
let initial_package_mappings_len = graph.packages.mappings().len(); let initial_package_mappings_len = graph.packages.mappings().len();
if roots.iter().any(|r| r.scheme() == "npm") if roots.iter().any(|r| r.scheme() == "npm") && self.npm_resolver.is_byonm()
&& self.npm_resolver.as_byonm().is_some()
{ {
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead"); return Err(BuildGraphWithNpmResolutionError::UnsupportedNpmSpecifierEntrypointResolutionWay);
} }
graph.build(roots, loader, options).await; graph.build(roots, loader, options).await;
@ -680,7 +763,7 @@ impl ModuleGraphBuilder {
for (from, to) in graph.packages.mappings() { for (from, to) in graph.packages.mappings() {
lockfile.insert_package_specifier( lockfile.insert_package_specifier(
JsrDepPackageReq::jsr(from.clone()), JsrDepPackageReq::jsr(from.clone()),
to.version.to_string(), to.version.to_custom_string::<SmallStackString>(),
); );
} }
} }
@ -700,7 +783,7 @@ impl ModuleGraphBuilder {
&self, &self,
graph: &mut ModuleGraph, graph: &mut ModuleGraph,
options: BuildFastCheckGraphOptions, options: BuildFastCheckGraphOptions,
) -> Result<(), AnyError> { ) -> Result<(), deno_json::ToMaybeJsxImportSourceConfigError> {
if !graph.graph_kind().include_types() { if !graph.graph_kind().include_types() {
return Ok(()); return Ok(());
} }
@ -715,11 +798,7 @@ impl ModuleGraphBuilder {
None None
}; };
let parser = self.parsed_source_cache.as_capturing_parser(); let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?; let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(
self.cli_options.default_npm_caching_strategy(),
);
graph.build_fast_check_type_graph( graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions { deno_graph::BuildFastCheckTypeGraphOptions {
@ -728,7 +807,7 @@ impl ModuleGraphBuilder {
fast_check_dts: false, fast_check_dts: false,
jsr_url_provider: &CliJsrUrlProvider, jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(&graph_resolver), resolver: Some(&graph_resolver),
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(self.npm_graph_resolver.as_ref()),
workspace_fast_check: options.workspace_fast_check, workspace_fast_check: options.workspace_fast_check,
}, },
); );
@ -746,10 +825,10 @@ impl ModuleGraphBuilder {
) -> cache::FetchCacher { ) -> cache::FetchCacher {
cache::FetchCacher::new( cache::FetchCacher::new(
self.file_fetcher.clone(), self.file_fetcher.clone(),
self.fs.clone(),
self.global_http_cache.clone(), self.global_http_cache.clone(),
self.in_npm_pkg_checker.clone(), self.in_npm_pkg_checker.clone(),
self.module_info_cache.clone(), self.module_info_cache.clone(),
self.sys.clone(),
cache::FetchCacherOptions { cache::FetchCacherOptions {
file_header_overrides: self.cli_options.resolve_file_header_overrides(), file_header_overrides: self.cli_options.resolve_file_header_overrides(),
permissions, permissions,
@ -764,7 +843,7 @@ impl ModuleGraphBuilder {
/// Check if `roots` and their deps are available. Returns `Ok(())` if /// Check if `roots` and their deps are available. Returns `Ok(())` if
/// so. Returns `Err(_)` if there is a known module graph or resolution /// so. Returns `Err(_)` if there is a known module graph or resolution
/// error statically reachable from `roots` and not a dynamic import. /// error statically reachable from `roots` and not a dynamic import.
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> { pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
self.graph_roots_valid( self.graph_roots_valid(
graph, graph,
&graph.roots.iter().cloned().collect::<Vec<_>>(), &graph.roots.iter().cloned().collect::<Vec<_>>(),
@ -775,10 +854,10 @@ impl ModuleGraphBuilder {
&self, &self,
graph: &ModuleGraph, graph: &ModuleGraph,
roots: &[ModuleSpecifier], roots: &[ModuleSpecifier],
) -> Result<(), AnyError> { ) -> Result<(), JsErrorBox> {
graph_valid( graph_valid(
graph, graph,
&self.fs, &self.sys,
roots, roots,
GraphValidOptions { GraphValidOptions {
kind: if self.cli_options.type_check_mode().is_true() { kind: if self.cli_options.type_check_mode().is_true() {
@ -786,21 +865,33 @@ impl ModuleGraphBuilder {
} else { } else {
GraphKind::CodeOnly GraphKind::CodeOnly
}, },
check_js: self.cli_options.check_js(), check_js: CheckJsOption::Custom(self.tsconfig_resolver.as_ref()),
exit_integrity_errors: true, exit_integrity_errors: true,
}, },
) )
} }
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> { fn create_graph_resolver(
let jsx_import_source_config = self &self,
) -> Result<CliGraphResolver, deno_json::ToMaybeJsxImportSourceConfigError>
{
let jsx_import_source_config_unscoped = self
.cli_options .cli_options
.workspace() .start_dir
.to_maybe_jsx_import_source_config()?; .to_maybe_jsx_import_source_config()?;
let mut jsx_import_source_config_by_scope = BTreeMap::default();
for (dir_url, _) in self.cli_options.workspace().config_folders() {
let dir = self.cli_options.workspace().resolve_member_dir(dir_url);
let jsx_import_source_config_unscoped =
dir.to_maybe_jsx_import_source_config()?;
jsx_import_source_config_by_scope
.insert(dir_url.clone(), jsx_import_source_config_unscoped);
}
Ok(CliGraphResolver { Ok(CliGraphResolver {
cjs_tracker: &self.cjs_tracker, cjs_tracker: &self.cjs_tracker,
resolver: &self.resolver, resolver: &self.resolver,
jsx_import_source_config, jsx_import_source_config_unscoped,
jsx_import_source_config_by_scope,
}) })
} }
} }
@ -833,18 +924,19 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
message message
} }
static RUN_WITH_SLOPPY_IMPORTS_MSG: &str =
"or run with --unstable-sloppy-imports";
fn enhanced_sloppy_imports_error_message( fn enhanced_sloppy_imports_error_message(
fs: &Arc<dyn FileSystem>, sys: &CliSys,
error: &ModuleError, error: &ModuleError,
) -> Option<String> { ) -> Option<String> {
match error { match error {
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => { | ModuleError::Missing(specifier, _) => {
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone())) let additional_message = maybe_additional_sloppy_imports_message(sys, specifier)?;
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message();
Some(format!( Some(format!(
"{} {} or run with --unstable-sloppy-imports", "{} {}",
error, error,
additional_message, additional_message,
)) ))
@ -853,6 +945,19 @@ fn enhanced_sloppy_imports_error_message(
} }
} }
pub fn maybe_additional_sloppy_imports_message(
sys: &CliSys,
specifier: &ModuleSpecifier,
) -> Option<String> {
Some(format!(
"{} {}",
CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone()))
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message(),
RUN_WITH_SLOPPY_IMPORTS_MSG
))
}
fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> { fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err { match err {
ModuleError::LoadingErr( ModuleError::LoadingErr(
@ -946,9 +1051,11 @@ fn get_resolution_error_bare_specifier(
{ {
Some(specifier.as_str()) Some(specifier.as_str())
} else if let ResolutionError::ResolverError { error, .. } = error { } else if let ResolutionError::ResolverError { error, .. } = error {
if let ResolveError::Other(error) = (*error).as_ref() { if let ResolveError::ImportMap(error) = (*error).as_ref() {
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = if let import_map::ImportMapErrorKind::UnmappedBareSpecifier(
error.downcast_ref::<ImportMapError>() specifier,
_,
) = error.as_kind()
{ {
Some(specifier.as_str()) Some(specifier.as_str())
} else { } else {
@ -985,11 +1092,12 @@ fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
ResolveError::Other(other_error) => { ResolveError::Other(other_error) => {
if let Some(SpecifierError::ImportPrefixMissing { if let Some(SpecifierError::ImportPrefixMissing {
specifier, .. specifier, ..
}) = other_error.downcast_ref::<SpecifierError>() }) = other_error.as_any().downcast_ref::<SpecifierError>()
{ {
maybe_specifier = Some(specifier); maybe_specifier = Some(specifier);
} }
} }
ResolveError::ImportMap(_) => {}
} }
} }
} }
@ -1019,7 +1127,7 @@ pub fn has_graph_root_local_dependent_changed(
follow_dynamic: true, follow_dynamic: true,
kind: GraphKind::All, kind: GraphKind::All,
prefer_fast_check_graph: true, prefer_fast_check_graph: true,
check_js: true, check_js: CheckJsOption::True,
}, },
); );
while let Some((s, _)) = dependent_specifiers.next() { while let Some((s, _)) = dependent_specifiers.next() {
@ -1081,71 +1189,6 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
} }
} }
pub struct DenoGraphFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> {
fn read_dir(
&self,
dir_url: &deno_graph::ModuleSpecifier,
) -> Vec<deno_graph::source::DirEntry> {
use deno_core::anyhow;
use deno_graph::source::DirEntry;
use deno_graph::source::DirEntryKind;
let dir_path = match dir_url.to_file_path() {
Ok(path) => path,
// ignore, treat as non-analyzable
Err(()) => return vec![],
};
let entries = match self.0.read_dir_sync(&dir_path) {
Ok(dir) => dir,
Err(err)
if matches!(
err.kind(),
std::io::ErrorKind::PermissionDenied | std::io::ErrorKind::NotFound
) =>
{
return vec![];
}
Err(err) => {
return vec![DirEntry {
kind: DirEntryKind::Error(
anyhow::Error::from(err)
.context("Failed to read directory.".to_string()),
),
url: dir_url.clone(),
}];
}
};
let mut dir_entries = Vec::with_capacity(entries.len());
for entry in entries {
let entry_path = dir_path.join(&entry.name);
dir_entries.push(if entry.is_directory {
DirEntry {
kind: DirEntryKind::Dir,
url: ModuleSpecifier::from_directory_path(&entry_path).unwrap(),
}
} else if entry.is_file {
DirEntry {
kind: DirEntryKind::File,
url: ModuleSpecifier::from_file_path(&entry_path).unwrap(),
}
} else if entry.is_symlink {
DirEntry {
kind: DirEntryKind::Symlink,
url: ModuleSpecifier::from_file_path(&entry_path).unwrap(),
}
} else {
continue;
});
}
dir_entries
}
}
pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String { pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
format!( format!(
"{}:{}:{}", "{}:{}:{}",
@ -1209,30 +1252,49 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
#[derive(Debug)] #[derive(Debug)]
struct CliGraphResolver<'a> { struct CliGraphResolver<'a> {
cjs_tracker: &'a CjsTracker, cjs_tracker: &'a CliCjsTracker,
resolver: &'a CliResolver, resolver: &'a CliResolver,
jsx_import_source_config: Option<JsxImportSourceConfig>, jsx_import_source_config_unscoped: Option<JsxImportSourceConfig>,
jsx_import_source_config_by_scope:
BTreeMap<Arc<ModuleSpecifier>, Option<JsxImportSourceConfig>>,
}
impl<'a> CliGraphResolver<'a> {
fn resolve_jsx_import_source_config(
&self,
referrer: &ModuleSpecifier,
) -> Option<&JsxImportSourceConfig> {
self
.jsx_import_source_config_by_scope
.iter()
.rfind(|(s, _)| referrer.as_str().starts_with(s.as_str()))
.map(|(_, c)| c.as_ref())
.unwrap_or(self.jsx_import_source_config_unscoped.as_ref())
}
} }
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> { impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> { fn default_jsx_import_source(
&self,
referrer: &ModuleSpecifier,
) -> Option<String> {
self self
.jsx_import_source_config .resolve_jsx_import_source_config(referrer)
.as_ref()
.and_then(|c| c.default_specifier.clone()) .and_then(|c| c.default_specifier.clone())
} }
fn default_jsx_import_source_types(&self) -> Option<String> { fn default_jsx_import_source_types(
&self,
referrer: &ModuleSpecifier,
) -> Option<String> {
self self
.jsx_import_source_config .resolve_jsx_import_source_config(referrer)
.as_ref()
.and_then(|c| c.default_types_specifier.clone()) .and_then(|c| c.default_types_specifier.clone())
} }
fn jsx_import_source_module(&self) -> &str { fn jsx_import_source_module(&self, referrer: &ModuleSpecifier) -> &str {
self self
.jsx_import_source_config .resolve_jsx_import_source_config(referrer)
.as_ref()
.map(|c| c.module.as_str()) .map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
} }
@ -1305,7 +1367,7 @@ mod test {
let specifier = ModuleSpecifier::parse("file:///file.ts").unwrap(); let specifier = ModuleSpecifier::parse("file:///file.ts").unwrap();
let err = import_map.resolve(input, &specifier).err().unwrap(); let err = import_map.resolve(input, &specifier).err().unwrap();
let err = ResolutionError::ResolverError { let err = ResolutionError::ResolverError {
error: Arc::new(ResolveError::Other(err.into())), error: Arc::new(ResolveError::ImportMap(err)),
specifier: input.to_string(), specifier: input.to_string(),
range: Range { range: Range {
specifier, specifier,

View file

@ -1,17 +1,20 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use crate::util::progress_bar::UpdateGuard; use std::collections::HashMap;
use crate::version; use std::sync::Arc;
use std::thread::ThreadId;
use boxed_error::Boxed; use boxed_error::Boxed;
use deno_cache_dir::file_fetcher::RedirectHeaderParseError; use deno_cache_dir::file_fetcher::RedirectHeaderParseError;
use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::serde; use deno_core::serde;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::deno_fetch; use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_fetch::CreateHttpClientOptions;
@ -23,12 +26,10 @@ use http::header::CONTENT_LENGTH;
use http::HeaderMap; use http::HeaderMap;
use http::StatusCode; use http::StatusCode;
use http_body_util::BodyExt; use http_body_util::BodyExt;
use std::collections::HashMap;
use std::sync::Arc;
use std::thread::ThreadId;
use thiserror::Error; use thiserror::Error;
use crate::util::progress_bar::UpdateGuard;
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum SendError { pub enum SendError {
#[error(transparent)] #[error(transparent)]
@ -69,7 +70,7 @@ impl HttpClientProvider {
} }
} }
pub fn get_or_create(&self) -> Result<HttpClient, AnyError> { pub fn get_or_create(&self) -> Result<HttpClient, JsErrorBox> {
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
let thread_id = std::thread::current().id(); let thread_id = std::thread::current().id();
let mut clients = self.clients_by_thread_id.lock(); let mut clients = self.clients_by_thread_id.lock();
@ -78,7 +79,7 @@ impl HttpClientProvider {
Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())), Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())),
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
let client = create_http_client( let client = create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
root_cert_store: match &self.root_cert_store_provider { root_cert_store: match &self.root_cert_store_provider {
Some(provider) => Some(provider.get_or_try_init()?.clone()), Some(provider) => Some(provider.get_or_try_init()?.clone()),
@ -86,7 +87,8 @@ impl HttpClientProvider {
}, },
..self.options.clone() ..self.options.clone()
}, },
)?; )
.map_err(JsErrorBox::from_err)?;
entry.insert(client.clone()); entry.insert(client.clone());
Ok(HttpClient::new(client)) Ok(HttpClient::new(client))
} }
@ -94,34 +96,49 @@ impl HttpClientProvider {
} }
} }
#[derive(Debug, Error)] #[derive(Debug, Error, JsError)]
#[class(type)]
#[error("Bad response: {:?}{}", .status_code, .response_text.as_ref().map(|s| format!("\n\n{}", s)).unwrap_or_else(String::new))] #[error("Bad response: {:?}{}", .status_code, .response_text.as_ref().map(|s| format!("\n\n{}", s)).unwrap_or_else(String::new))]
pub struct BadResponseError { pub struct BadResponseError {
pub status_code: StatusCode, pub status_code: StatusCode,
pub response_text: Option<String>, pub response_text: Option<String>,
} }
#[derive(Debug, Boxed)] #[derive(Debug, Boxed, JsError)]
pub struct DownloadError(pub Box<DownloadErrorKind>); pub struct DownloadError(pub Box<DownloadErrorKind>);
#[derive(Debug, Error)] #[derive(Debug, Error, JsError)]
pub enum DownloadErrorKind { pub enum DownloadErrorKind {
#[class(inherit)]
#[error(transparent)] #[error(transparent)]
Fetch(AnyError), Fetch(deno_fetch::ClientSendError),
#[class(inherit)]
#[error(transparent)] #[error(transparent)]
UrlParse(#[from] deno_core::url::ParseError), UrlParse(#[from] deno_core::url::ParseError),
#[class(generic)]
#[error(transparent)] #[error(transparent)]
HttpParse(#[from] http::Error), HttpParse(#[from] http::Error),
#[class(inherit)]
#[error(transparent)] #[error(transparent)]
Json(#[from] serde_json::Error), Json(#[from] serde_json::Error),
#[class(generic)]
#[error(transparent)] #[error(transparent)]
ToStr(#[from] http::header::ToStrError), ToStr(#[from] http::header::ToStrError),
#[class(inherit)]
#[error(transparent)] #[error(transparent)]
RedirectHeaderParse(RedirectHeaderParseError), RedirectHeaderParse(RedirectHeaderParseError),
#[class(type)]
#[error("Too many redirects.")] #[error("Too many redirects.")]
TooManyRedirects, TooManyRedirects,
#[class(inherit)]
#[error(transparent)] #[error(transparent)]
BadResponse(#[from] BadResponseError), BadResponse(#[from] BadResponseError),
#[class("Http")]
#[error("Not Found.")]
NotFound,
#[class(inherit)]
#[error(transparent)]
Other(JsErrorBox),
} }
#[derive(Debug)] #[derive(Debug)]
@ -208,11 +225,11 @@ impl HttpClient {
Ok(String::from_utf8(bytes)?) Ok(String::from_utf8(bytes)?)
} }
pub async fn download(&self, url: Url) -> Result<Vec<u8>, AnyError> { pub async fn download(&self, url: Url) -> Result<Vec<u8>, DownloadError> {
let maybe_bytes = self.download_inner(url, None, None).await?; let maybe_bytes = self.download_inner(url, None, None).await?;
match maybe_bytes { match maybe_bytes {
Some(bytes) => Ok(bytes), Some(bytes) => Ok(bytes),
None => Err(custom_error("Http", "Not found.")), None => Err(DownloadErrorKind::NotFound.into_box()),
} }
} }
@ -276,7 +293,7 @@ impl HttpClient {
get_response_body_with_progress(response, progress_guard) get_response_body_with_progress(response, progress_guard)
.await .await
.map(|(_, body)| Some(body)) .map(|(_, body)| Some(body))
.map_err(|err| DownloadErrorKind::Fetch(err).into_box()) .map_err(|err| DownloadErrorKind::Other(err).into_box())
} }
async fn get_redirected_response( async fn get_redirected_response(
@ -293,7 +310,7 @@ impl HttpClient {
.clone() .clone()
.send(req) .send(req)
.await .await
.map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; .map_err(|e| DownloadErrorKind::Fetch(e).into_box())?;
let status = response.status(); let status = response.status();
if status.is_redirection() { if status.is_redirection() {
for _ in 0..5 { for _ in 0..5 {
@ -313,7 +330,7 @@ impl HttpClient {
.clone() .clone()
.send(req) .send(req)
.await .await
.map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; .map_err(|e| DownloadErrorKind::Fetch(e).into_box())?;
let status = new_response.status(); let status = new_response.status();
if status.is_redirection() { if status.is_redirection() {
response = new_response; response = new_response;
@ -332,7 +349,7 @@ impl HttpClient {
pub async fn get_response_body_with_progress( pub async fn get_response_body_with_progress(
response: http::Response<deno_fetch::ResBody>, response: http::Response<deno_fetch::ResBody>,
progress_guard: Option<&UpdateGuard>, progress_guard: Option<&UpdateGuard>,
) -> Result<(HeaderMap, Vec<u8>), AnyError> { ) -> Result<(HeaderMap, Vec<u8>), JsErrorBox> {
use http_body::Body as _; use http_body::Body as _;
if let Some(progress_guard) = progress_guard { if let Some(progress_guard) = progress_guard {
let mut total_size = response.body().size_hint().exact(); let mut total_size = response.body().size_hint().exact();
@ -464,7 +481,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path().join("tls/RootCA.pem"), test_util::testdata_path().join("tls/RootCA.pem"),
@ -508,7 +525,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions::default(), CreateHttpClientOptions::default(),
) )
.unwrap(), .unwrap(),
@ -549,7 +566,7 @@ mod test {
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
root_cert_store: Some(root_cert_store), root_cert_store: Some(root_cert_store),
..Default::default() ..Default::default()
@ -570,7 +587,7 @@ mod test {
.unwrap(); .unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()
@ -603,7 +620,7 @@ mod test {
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap(); let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()
@ -644,7 +661,7 @@ mod test {
.unwrap(); .unwrap();
let client = HttpClient::new( let client = HttpClient::new(
create_http_client( create_http_client(
version::DENO_VERSION_INFO.user_agent, DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions { CreateHttpClientOptions {
ca_certs: vec![std::fs::read( ca_certs: vec![std::fs::read(
test_util::testdata_path() test_util::testdata_path()

View file

@ -1,18 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
pub fn main() { pub fn main() {
let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"]; // this file exists to cause the executable to be built when running cargo test
if !cfg!(debug_assertions) {
args.push("--release");
}
args.push("--");
// If any args were passed to this process, pass them through to the child
let orig_args = std::env::args().skip(1).collect::<Vec<_>>();
let orig_args: Vec<&str> =
orig_args.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
args.extend(orig_args);
test_util::spawn::exec_replace("cargo", &args).unwrap();
} }

View file

@ -1,19 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use log::debug; use log::debug;
#[cfg(not(feature = "hmr"))]
static CLI_SNAPSHOT: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
pub fn deno_isolate_init() -> Option<&'static [u8]> { pub fn deno_isolate_init() -> Option<&'static [u8]> {
debug!("Deno isolate init with snapshots."); debug!("Deno isolate init with snapshots.");
#[cfg(not(feature = "hmr"))] deno_snapshots::CLI_SNAPSHOT
{
Some(CLI_SNAPSHOT)
}
#[cfg(feature = "hmr")]
{
None
}
} }

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file // deno-lint-ignore-file
import { core, primordials } from "ext:core/mod.js"; import { core, primordials } from "ext:core/mod.js";
@ -8,7 +8,7 @@ import {
restorePermissions, restorePermissions,
} from "ext:cli/40_test_common.js"; } from "ext:cli/40_test_common.js";
import { Console } from "ext:deno_console/01_console.js"; import { Console } from "ext:deno_console/01_console.js";
import { setExitHandler } from "ext:runtime/30_os.js"; import { setExitHandler } from "ext:deno_os/30_os.js";
const { const {
op_register_bench, op_register_bench,
op_bench_get_origin, op_bench_get_origin,

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
// deno-lint-ignore-file // deno-lint-ignore-file
/* /*

1089
cli/js/40_lint.js Normal file

File diff suppressed because it is too large Load diff

1029
cli/js/40_lint_selector.js Normal file

File diff suppressed because it is too large Load diff

139
cli/js/40_lint_types.d.ts vendored Normal file
View file

@ -0,0 +1,139 @@
// Copyright 2018-2025 the Deno authors. MIT license.
export interface NodeFacade {
type: string;
range: [number, number];
[key: string]: unknown;
}
export interface AstContext {
buf: Uint8Array;
strTable: Map<number, string>;
strTableOffset: number;
rootOffset: number;
nodes: Map<number, NodeFacade>;
spansOffset: number;
propsOffset: number;
strByType: number[];
strByProp: number[];
typeByStr: Map<string, number>;
propByStr: Map<string, number>;
matcher: MatchContext;
}
export interface Node {
range: Range;
}
export type Range = [number, number];
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface RuleContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintRule {
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
destroy?(ctx: RuleContext): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
export interface LintState {
plugins: LintPlugin[];
installedPlugins: Set<string>;
}
export type VisitorFn = (node: unknown) => void;
export interface CompiledVisitor {
matcher: (ctx: MatchContext, offset: number) => boolean;
info: { enter: VisitorFn; exit: VisitorFn };
}
export interface AttrExists {
type: 3;
prop: number[];
}
export interface AttrBin {
type: 4;
prop: number[];
op: number;
// deno-lint-ignore no-explicit-any
value: any;
}
export type AttrSelector = AttrExists | AttrBin;
export interface ElemSelector {
type: 1;
wildcard: boolean;
elem: number;
}
export interface PseudoNthChild {
type: 5;
op: string | null;
step: number;
stepOffset: number;
of: Selector | null;
repeat: boolean;
}
export interface PseudoHas {
type: 6;
selectors: Selector[];
}
export interface PseudoNot {
type: 7;
selectors: Selector[];
}
export interface PseudoFirstChild {
type: 8;
}
export interface PseudoLastChild {
type: 9;
}
export interface Relation {
type: 2;
op: number;
}
export type Selector = Array<
| ElemSelector
| Relation
| AttrExists
| AttrBin
| PseudoNthChild
| PseudoNot
| PseudoHas
| PseudoFirstChild
| PseudoLastChild
>;
export interface SelectorParseCtx {
root: Selector;
current: Selector;
}
export interface MatchContext {
getFirstChild(id: number): number;
getLastChild(id: number): number;
getSiblings(id: number): number[];
getParent(id: number): number;
getType(id: number): number;
getAttrPathValue(id: number, propIds: number[], idx: number): unknown;
}
export type NextFn = (ctx: MatchContext, id: number) => boolean;
export type MatcherFn = (ctx: MatchContext, id: number) => boolean;
export type TransformFn = (value: string) => number;
export {};

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { core, primordials } from "ext:core/mod.js"; import { core, primordials } from "ext:core/mod.js";
import { escapeName, withPermissions } from "ext:cli/40_test_common.js"; import { escapeName, withPermissions } from "ext:cli/40_test_common.js";
@ -26,7 +26,7 @@ const {
TypeError, TypeError,
} = primordials; } = primordials;
import { setExitHandler } from "ext:runtime/30_os.js"; import { setExitHandler } from "ext:deno_os/30_os.js";
// Capture `Deno` global so that users deleting or mangling it, won't // Capture `Deno` global so that users deleting or mangling it, won't
// have impact on our sanitizers. // have impact on our sanitizers.

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { core, primordials } from "ext:core/mod.js"; import { core, primordials } from "ext:core/mod.js";
import { serializePermissions } from "ext:runtime/10_permissions.js"; import { serializePermissions } from "ext:runtime/10_permissions.js";
const ops = core.ops; const ops = core.ops;

View file

@ -1,14 +1,16 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use crate::args::jsr_url;
use crate::file_fetcher::CliFileFetcher;
use dashmap::DashMap; use dashmap::DashMap;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo; use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use std::sync::Arc;
use crate::args::jsr_url;
use crate::file_fetcher::CliFileFetcher;
/// This is similar to a subset of `JsrCacheResolver` which fetches rather than /// This is similar to a subset of `JsrCacheResolver` which fetches rather than
/// just reads the cache. Keep in sync! /// just reads the cache. Keep in sync!

46
cli/lib/Cargo.toml Normal file
View file

@ -0,0 +1,46 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_lib"
version = "0.3.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Shared code between the Deno CLI and denort"
[lib]
path = "lib.rs"
[dependencies]
capacity_builder.workspace = true
deno_config = { workspace = true, features = ["sync", "workspace"] }
deno_error.workspace = true
deno_fs = { workspace = true, features = ["sync_fs"] }
deno_media_type.workspace = true
deno_node = { workspace = true, features = ["sync_fs"] }
deno_npm.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime.workspace = true
deno_semver.workspace = true
deno_terminal.workspace = true
env_logger = "=0.10.0"
faster-hex.workspace = true
indexmap.workspace = true
libsui.workspace = true
log = { workspace = true, features = ["serde"] }
node_resolver = { workspace = true, features = ["sync"] }
parking_lot.workspace = true
ring.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sys_traits = { workspace = true, features = ["getrandom"] }
thiserror.workspace = true
tokio.workspace = true
twox-hash.workspace = true
url.workspace = true
[dev-dependencies]
test_util.workspace = true

4
cli/lib/README.md Normal file
View file

@ -0,0 +1,4 @@
# deno_lib
This crate contains the shared code between the Deno CLI and denort. It is
highly unstable.

216
cli/lib/args.rs Normal file
View file

@ -0,0 +1,216 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::path::PathBuf;
use std::sync::LazyLock;
use deno_npm::resolution::PackageIdNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_runtime::colors;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::rustls_pemfile;
use deno_runtime::deno_tls::webpki_roots;
use deno_semver::npm::NpmPackageReqReference;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool {
match std::env::var_os(name) {
Some(value) => value == "1",
None => false,
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
)]
UnknownStore(String),
#[error("Unable to add pem file to certificate store: {0}")]
FailedAddPemFile(String),
#[error("Failed opening CA file: {0}")]
CaFileOpenError(String),
}
/// Create and populate a root cert store based on the passed options and
/// environment.
pub fn get_root_cert_store(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Result<RootCertStore, RootCertStoreLoadError> {
let mut root_cert_store = RootCertStore::empty();
let ca_stores: Vec<String> = maybe_ca_stores
.or_else(|| {
let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?;
Some(
env_ca_store
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
})
.unwrap_or_else(|| vec!["mozilla".to_string()]);
for store in ca_stores.iter() {
match store.as_str() {
"mozilla" => {
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
}
"system" => {
let roots = load_native_certs().expect("could not load platform certs");
for root in roots {
if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
{
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
}
}
_ => {
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
}
}
}
let ca_data =
maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File));
if let Some(ca_data) = ca_data {
let result = match ca_data {
CaData::File(ca_file) => {
let ca_file = if let Some(root) = &maybe_root_path {
root.join(&ca_file)
} else {
PathBuf::from(ca_file)
};
let certfile = std::fs::File::open(ca_file).map_err(|err| {
RootCertStoreLoadError::CaFileOpenError(err.to_string())
})?;
let mut reader = BufReader::new(certfile);
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
CaData::Bytes(data) => {
let mut reader = BufReader::new(Cursor::new(data));
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
};
match result {
Ok(certs) => {
root_cert_store.add_parsable_certificates(certs);
}
Err(e) => {
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
}
}
}
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
LazyLock::new(|| {
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
pub fn resolve_npm_resolution_snapshot(
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, PackageIdNotFoundError>
{
if let Some(NpmProcessStateKind::Snapshot(snapshot)) =
NPM_PROCESS_STATE.as_ref().map(|s| &s.kind)
{
// TODO(bartlomieju): remove this clone
Ok(Some(snapshot.clone().into_valid()?))
} else {
Ok(None)
}
}
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}

42
cli/lib/build.rs Normal file
View file

@ -0,0 +1,42 @@
// Copyright 2018-2025 the Deno authors. MIT license.
fn main() {
// todo(dsherret): remove this after Deno 2.2.0 is published and then
// align the version of this crate with Deno then. We need to wait because
// there was previously a deno_lib 2.2.0 published (https://crates.io/crates/deno_lib/versions)
let version_path = std::path::Path::new(".").join("version.txt");
println!("cargo:rerun-if-changed={}", version_path.display());
#[allow(clippy::disallowed_methods)]
let text = std::fs::read_to_string(version_path).unwrap();
println!("cargo:rustc-env=DENO_VERSION={}", text);
let commit_hash = git_commit_hash();
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash);
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&commit_hash[..7]
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
}

48
cli/lib/clippy.toml Normal file
View file

@ -0,0 +1,48 @@
disallowed-methods = [
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" },
]

11
cli/lib/lib.rs Normal file
View file

@ -0,0 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod args;
pub mod loader;
pub mod npm;
pub mod shared;
pub mod standalone;
pub mod sys;
pub mod util;
pub mod version;
pub mod worker;

217
cli/lib/loader.rs Normal file
View file

@ -0,0 +1,217 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use deno_media_type::MediaType;
use deno_resolver::cjs::CjsTracker;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_core::ModuleSourceCode;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NpmPackageFolderResolver;
use thiserror::Error;
use url::Url;
use crate::sys::DenoLibSys;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
pub found_url: Url,
pub media_type: MediaType,
}
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("[{}]: Stripping types is currently unsupported for files under node_modules, for \"{}\"", self.code(), specifier)]
pub struct StrippingTypesNodeModulesError {
pub specifier: Url,
}
impl StrippingTypesNodeModulesError {
pub fn code(&self) -> &'static str {
"ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING"
}
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum NpmModuleLoadError {
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
StrippingTypesNodeModules(#[from] StrippingTypesNodeModulesError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
#[class(inherit)]
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
UnableToLoad {
file_path: PathBuf,
maybe_referrer: Option<Url>,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(
"{}",
format_dir_import_message(file_path, maybe_referrer, suggestion)
)]
DirImport {
file_path: PathBuf,
maybe_referrer: Option<Url>,
suggestion: Option<&'static str>,
#[source]
#[inherit]
source: std::io::Error,
},
}
fn format_dir_import_message(
file_path: &std::path::Path,
maybe_referrer: &Option<Url>,
suggestion: &Option<&'static str>,
) -> String {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
if let Some(entrypoint_name) = suggestion {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
}
msg
}
#[derive(Clone)]
pub struct NpmModuleLoader<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
> {
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
sys: TSys,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
>
NpmModuleLoader<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
sys: TSys,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
sys,
}
}
pub async fn load(
&self,
specifier: &Url,
maybe_referrer: Option<&Url>,
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
let file_path = deno_path_util::url_to_file_path(specifier)?;
let code = self.sys.fs_read(&file_path).map_err(|source| {
if self.sys.fs_is_dir_no_err(&file_path) {
let suggestion = ["index.mjs", "index.js", "index.cjs"]
.into_iter()
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
NpmModuleLoadError::DirImport {
file_path,
maybe_referrer: maybe_referrer.cloned(),
suggestion,
source,
}
} else {
NpmModuleLoadError::UnableToLoad {
file_path,
maybe_referrer: maybe_referrer.cloned(),
source,
}
}
})?;
let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(NpmModuleLoadError::StrippingTypesNodeModules(
StrippingTypesNodeModulesError {
specifier: specifier.clone(),
},
));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
code,
found_url: specifier.clone(),
media_type: MediaType::from_specifier(specifier),
})
}
}

80
cli/lib/npm/mod.rs Normal file
View file

@ -0,0 +1,80 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod permission_checker;
use std::path::Path;
use std::sync::Arc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_resolver::npm::NpmResolver;
use deno_runtime::deno_process::NpmProcessStateProvider;
use deno_runtime::deno_process::NpmProcessStateProviderRc;
pub use permission_checker::NpmRegistryReadPermissionChecker;
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::DenoLibSys;
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
npm_resolver: &NpmResolver<TSys>,
) -> NpmProcessStateProviderRc {
match npm_resolver {
NpmResolver::Byonm(byonm_npm_resolver) => {
Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone()))
}
NpmResolver::Managed(managed_npm_resolver) => {
Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone()))
}
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
#[derive(Debug)]
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
pub ManagedNpmResolverRc<TSys>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ManagedNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.0.resolution().serialized_valid_snapshot(),
self.0.root_node_modules_path(),
)
}
}
#[derive(Debug)]
pub struct ByonmNpmProcessStateProvider<TSys: DenoLibSys>(
pub Arc<ByonmNpmResolver<TSys>>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ByonmNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}

View file

@ -0,0 +1,120 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use deno_error::JsErrorBox;
use deno_runtime::deno_node::NodePermissions;
use parking_lot::Mutex;
use crate::sys::DenoLibSys;
#[derive(Debug)]
pub enum NpmRegistryReadPermissionCheckerMode {
Byonm,
Global(PathBuf),
Local(PathBuf),
}
#[derive(Debug)]
pub struct NpmRegistryReadPermissionChecker<TSys: DenoLibSys> {
sys: TSys,
cache: Mutex<HashMap<PathBuf, PathBuf>>,
mode: NpmRegistryReadPermissionCheckerMode,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("failed canonicalizing '{path}'")]
struct EnsureRegistryReadPermissionError {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
}
impl<TSys: DenoLibSys> NpmRegistryReadPermissionChecker<TSys> {
pub fn new(sys: TSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
Self {
sys,
cache: Default::default(),
mode,
}
}
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
pub fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, JsErrorBox> {
if permissions.query_read_all() {
return Ok(Cow::Borrowed(path)); // skip permissions checks below
}
match &self.mode {
NpmRegistryReadPermissionCheckerMode::Byonm => {
if path.components().any(|c| c.as_os_str() == "node_modules") {
Ok(Cow::Borrowed(path))
} else {
permissions
.check_read_path(path)
.map_err(JsErrorBox::from_err)
}
}
NpmRegistryReadPermissionCheckerMode::Global(registry_path)
| NpmRegistryReadPermissionCheckerMode::Local(registry_path) => {
// allow reading if it's in the node_modules
let is_path_in_node_modules = path.starts_with(registry_path)
&& path
.components()
.all(|c| !matches!(c, std::path::Component::ParentDir));
if is_path_in_node_modules {
let mut cache = self.cache.lock();
let mut canonicalize =
|path: &Path| -> Result<Option<PathBuf>, JsErrorBox> {
match cache.get(path) {
Some(canon) => Ok(Some(canon.clone())),
None => match self.sys.fs_canonicalize(path) {
Ok(canon) => {
cache.insert(path.to_path_buf(), canon.clone());
Ok(Some(canon))
}
Err(e) => {
if e.kind() == ErrorKind::NotFound {
return Ok(None);
}
Err(JsErrorBox::from_err(
EnsureRegistryReadPermissionError {
path: path.to_path_buf(),
source: e,
},
))
}
},
}
};
if let Some(registry_path_canon) = canonicalize(registry_path)? {
if let Some(path_canon) = canonicalize(path)? {
if path_canon.starts_with(registry_path_canon) {
return Ok(Cow::Owned(path_canon));
}
} else if path.starts_with(registry_path_canon)
|| path.starts_with(registry_path)
{
return Ok(Cow::Borrowed(path));
}
}
}
permissions
.check_read_path(path)
.map_err(JsErrorBox::from_err)
}
}
}
}

View file

@ -1,8 +1,11 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
/// This module is shared between build script and the binaries. Use it sparsely. /// This module is shared between build script and the binaries. Use it sparsely.
use deno_core::anyhow::bail; use thiserror::Error;
use deno_core::error::AnyError;
#[derive(Debug, Error)]
#[error("Unrecognized release channel: {0}")]
pub struct UnrecognizedReleaseChannelError(pub String);
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
pub enum ReleaseChannel { pub enum ReleaseChannel {
@ -50,13 +53,17 @@ impl ReleaseChannel {
// NOTE(bartlomieju): do not ever change these values, tools like `patchver` // NOTE(bartlomieju): do not ever change these values, tools like `patchver`
// rely on them. // rely on them.
#[allow(unused)] #[allow(unused)]
pub fn deserialize(str_: &str) -> Result<Self, AnyError> { pub fn deserialize(
str_: &str,
) -> Result<Self, UnrecognizedReleaseChannelError> {
Ok(match str_ { Ok(match str_ {
"stable" => Self::Stable, "stable" => Self::Stable,
"canary" => Self::Canary, "canary" => Self::Canary,
"rc" => Self::Rc, "rc" => Self::Rc,
"lts" => Self::Lts, "lts" => Self::Lts,
unknown => bail!("Unrecognized release channel: {}", unknown), unknown => {
return Err(UnrecognizedReleaseChannelError(unknown.to_string()))
}
}) })
} }
} }

View file

@ -0,0 +1,389 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use deno_config::workspace::PackageJsonDepResolution;
use deno_media_type::MediaType;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_semver::Version;
use indexmap::IndexMap;
use node_resolver::analyze::CjsAnalysisExports;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use super::virtual_fs::FileSystemCaseSensitivity;
use crate::args::UnstableConfig;
pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
pub trait DenoRtDeserializable<'a>: Sized {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)>;
}
impl<'a> DenoRtDeserializable<'a> for Cow<'a, [u8]> {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, data) = read_bytes_with_u32_len(input)?;
Ok((input, Cow::Borrowed(data)))
}
}
pub trait DenoRtSerializable<'a> {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
);
}
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
/// Relative path for the node_modules directory in the vfs.
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: Option<String>,
},
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolverImportMap {
pub specifier: String,
pub json: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
// Note: Don't use hashmaps/hashsets. Ensure the serialization
// is deterministic.
#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<log::Level>,
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: IndexMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpecifierId(u32);
impl SpecifierId {
pub fn new(id: u32) -> Self {
Self(id)
}
}
impl<'a> capacity_builder::BytesAppendable<'a> for SpecifierId {
fn append_to_builder<TBytes: capacity_builder::BytesType>(
self,
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
) {
builder.append_le(self.0);
}
}
impl<'a> DenoRtSerializable<'a> for SpecifierId {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
builder.append_le(self.0);
}
}
impl<'a> DenoRtDeserializable<'a> for SpecifierId {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, id) = read_u32(input)?;
Ok((input, Self(id)))
}
}
#[derive(Deserialize, Serialize)]
pub enum CjsExportAnalysisEntry {
Esm,
Cjs(CjsAnalysisExports),
}
const HAS_TRANSPILED_FLAG: u8 = 1 << 0;
const HAS_SOURCE_MAP_FLAG: u8 = 1 << 1;
const HAS_CJS_EXPORT_ANALYSIS_FLAG: u8 = 1 << 2;
pub struct RemoteModuleEntry<'a> {
pub media_type: MediaType,
pub data: Cow<'a, [u8]>,
pub maybe_transpiled: Option<Cow<'a, [u8]>>,
pub maybe_source_map: Option<Cow<'a, [u8]>>,
pub maybe_cjs_export_analysis: Option<Cow<'a, [u8]>>,
}
impl<'a> DenoRtSerializable<'a> for RemoteModuleEntry<'a> {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
fn append_maybe_data<'a>(
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
maybe_data: Option<&'a [u8]>,
) {
if let Some(data) = maybe_data {
builder.append_le(data.len() as u32);
builder.append(data);
}
}
let mut has_data_flags = 0;
if self.maybe_transpiled.is_some() {
has_data_flags |= HAS_TRANSPILED_FLAG;
}
if self.maybe_source_map.is_some() {
has_data_flags |= HAS_SOURCE_MAP_FLAG;
}
if self.maybe_cjs_export_analysis.is_some() {
has_data_flags |= HAS_CJS_EXPORT_ANALYSIS_FLAG;
}
builder.append(serialize_media_type(self.media_type));
builder.append_le(self.data.len() as u32);
builder.append(self.data.as_ref());
builder.append(has_data_flags);
append_maybe_data(builder, self.maybe_transpiled.as_deref());
append_maybe_data(builder, self.maybe_source_map.as_deref());
append_maybe_data(builder, self.maybe_cjs_export_analysis.as_deref());
}
}
impl<'a> DenoRtDeserializable<'a> for RemoteModuleEntry<'a> {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
#[allow(clippy::type_complexity)]
fn deserialize_data_if_has_flag(
input: &[u8],
has_data_flags: u8,
flag: u8,
) -> std::io::Result<(&[u8], Option<Cow<[u8]>>)> {
if has_data_flags & flag != 0 {
let (input, bytes) = read_bytes_with_u32_len(input)?;
Ok((input, Some(Cow::Borrowed(bytes))))
} else {
Ok((input, None))
}
}
let (input, media_type) = MediaType::deserialize(input)?;
let (input, data) = read_bytes_with_u32_len(input)?;
let (input, has_data_flags) = read_u8(input)?;
let (input, maybe_transpiled) =
deserialize_data_if_has_flag(input, has_data_flags, HAS_TRANSPILED_FLAG)?;
let (input, maybe_source_map) =
deserialize_data_if_has_flag(input, has_data_flags, HAS_SOURCE_MAP_FLAG)?;
let (input, maybe_cjs_export_analysis) = deserialize_data_if_has_flag(
input,
has_data_flags,
HAS_CJS_EXPORT_ANALYSIS_FLAG,
)?;
Ok((
input,
Self {
media_type,
data: Cow::Borrowed(data),
maybe_transpiled,
maybe_source_map,
maybe_cjs_export_analysis,
},
))
}
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
MediaType::Jsx => 1,
MediaType::Mjs => 2,
MediaType::Cjs => 3,
MediaType::TypeScript => 4,
MediaType::Mts => 5,
MediaType::Cts => 6,
MediaType::Dts => 7,
MediaType::Dmts => 8,
MediaType::Dcts => 9,
MediaType::Tsx => 10,
MediaType::Json => 11,
MediaType::Wasm => 12,
MediaType::Css => 13,
MediaType::SourceMap => 14,
MediaType::Unknown => 15,
}
}
impl<'a> DenoRtDeserializable<'a> for MediaType {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, value) = read_u8(input)?;
let value = match value {
0 => MediaType::JavaScript,
1 => MediaType::Jsx,
2 => MediaType::Mjs,
3 => MediaType::Cjs,
4 => MediaType::TypeScript,
5 => MediaType::Mts,
6 => MediaType::Cts,
7 => MediaType::Dts,
8 => MediaType::Dmts,
9 => MediaType::Dcts,
10 => MediaType::Tsx,
11 => MediaType::Json,
12 => MediaType::Wasm,
13 => MediaType::Css,
14 => MediaType::SourceMap,
15 => MediaType::Unknown,
value => {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Unknown media type value: {value}"),
))
}
};
Ok((input, value))
}
}
/// Data stored keyed by specifier.
pub struct SpecifierDataStore<TData> {
data: IndexMap<SpecifierId, TData>,
}
impl<TData> Default for SpecifierDataStore<TData> {
fn default() -> Self {
Self {
data: IndexMap::new(),
}
}
}
impl<TData> SpecifierDataStore<TData> {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn iter(&self) -> impl Iterator<Item = (SpecifierId, &TData)> {
self.data.iter().map(|(k, v)| (*k, v))
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.data.len()
}
pub fn contains(&self, specifier: SpecifierId) -> bool {
self.data.contains_key(&specifier)
}
pub fn add(&mut self, specifier: SpecifierId, value: TData) {
self.data.insert(specifier, value);
}
pub fn get(&self, specifier: SpecifierId) -> Option<&TData> {
self.data.get(&specifier)
}
}
impl<'a, TData> SpecifierDataStore<TData>
where
TData: DenoRtSerializable<'a> + 'a,
{
pub fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
builder.append_le(self.len() as u32);
for (specifier, value) in self.iter() {
builder.append(specifier);
value.serialize(builder);
}
}
}
impl<'a, TData> DenoRtDeserializable<'a> for SpecifierDataStore<TData>
where
TData: DenoRtDeserializable<'a>,
{
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, len) = read_u32_as_usize(input)?;
let mut data = IndexMap::with_capacity(len);
let mut input = input;
for _ in 0..len {
let (new_input, specifier) = SpecifierId::deserialize(input)?;
let (new_input, value) = TData::deserialize(new_input)?;
data.insert(specifier, value);
input = new_input;
}
Ok((input, Self { data }))
}
}
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
read_u32(input).map(|(input, len)| (input, len as usize))
}
fn read_u32(input: &[u8]) -> std::io::Result<(&[u8], u32)> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
Ok((input, len))
}
fn read_u8(input: &[u8]) -> std::io::Result<(&[u8], u8)> {
check_has_len(input, 1)?;
Ok((&input[1..], input[0]))
}
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
if input.len() < len {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Unexpected end of data",
))
} else {
Ok(())
}
}

View file

@ -0,0 +1,4 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod binary;
pub mod virtual_fs;

View file

@ -0,0 +1,999 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::cmp::Ordering;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::normalize_path;
use deno_path_util::strip_unc_prefix;
use deno_runtime::colors;
use deno_runtime::deno_core::anyhow::bail;
use deno_runtime::deno_core::anyhow::Context;
use deno_runtime::deno_core::error::AnyError;
use indexmap::IndexSet;
use serde::de;
use serde::de::SeqAccess;
use serde::de::Visitor;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::Serializer;
#[derive(Debug, PartialEq, Eq)]
pub enum WindowsSystemRootablePath {
/// The root of the system above any drive letters.
WindowSystemRoot,
Path(PathBuf),
}
impl WindowsSystemRootablePath {
pub fn root_for_current_os() -> Self {
if cfg!(windows) {
WindowsSystemRootablePath::WindowSystemRoot
} else {
WindowsSystemRootablePath::Path(PathBuf::from("/"))
}
}
pub fn join(&self, name_component: &str) -> PathBuf {
// this method doesn't handle multiple components
debug_assert!(
!name_component.contains('\\'),
"Invalid component: {}",
name_component
);
debug_assert!(
!name_component.contains('/'),
"Invalid component: {}",
name_component
);
match self {
WindowsSystemRootablePath::WindowSystemRoot => {
// windows drive letter
PathBuf::from(&format!("{}\\", name_component))
}
WindowsSystemRootablePath::Path(path) => path.join(name_component),
}
}
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum FileSystemCaseSensitivity {
#[serde(rename = "s")]
Sensitive,
#[serde(rename = "i")]
Insensitive,
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct VirtualDirectoryEntries(Vec<VfsEntry>);
impl VirtualDirectoryEntries {
pub fn new(mut entries: Vec<VfsEntry>) -> Self {
// needs to be sorted by name
entries.sort_by(|a, b| a.name().cmp(b.name()));
Self(entries)
}
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, VfsEntry> {
self.0.iter_mut()
}
pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> {
self.0.iter()
}
pub fn take_inner(&mut self) -> Vec<VfsEntry> {
std::mem::take(&mut self.0)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn get_by_name(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&VfsEntry> {
self
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &self.0[index])
}
pub fn get_mut_by_name(
&mut self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&mut VfsEntry> {
self
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &mut self.0[index])
}
pub fn get_mut_by_index(&mut self, index: usize) -> Option<&mut VfsEntry> {
self.0.get_mut(index)
}
pub fn get_by_index(&self, index: usize) -> Option<&VfsEntry> {
self.0.get(index)
}
pub fn binary_search(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Result<usize, usize> {
match case_sensitivity {
FileSystemCaseSensitivity::Sensitive => {
self.0.binary_search_by(|e| e.name().cmp(name))
}
FileSystemCaseSensitivity::Insensitive => self.0.binary_search_by(|e| {
e.name()
.chars()
.zip(name.chars())
.map(|(a, b)| a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()))
.find(|&ord| ord != Ordering::Equal)
.unwrap_or_else(|| e.name().len().cmp(&name.len()))
}),
}
}
pub fn insert(
&mut self,
entry: VfsEntry,
case_sensitivity: FileSystemCaseSensitivity,
) -> usize {
match self.binary_search(entry.name(), case_sensitivity) {
Ok(index) => {
self.0[index] = entry;
index
}
Err(insert_index) => {
self.0.insert(insert_index, entry);
insert_index
}
}
}
pub fn insert_or_modify(
&mut self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
on_insert: impl FnOnce() -> VfsEntry,
on_modify: impl FnOnce(&mut VfsEntry),
) -> usize {
match self.binary_search(name, case_sensitivity) {
Ok(index) => {
on_modify(&mut self.0[index]);
index
}
Err(insert_index) => {
self.0.insert(insert_index, on_insert());
insert_index
}
}
}
pub fn remove(&mut self, index: usize) -> VfsEntry {
self.0.remove(index)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualDirectory {
#[serde(rename = "n")]
pub name: String,
// should be sorted by name
#[serde(rename = "e")]
pub entries: VirtualDirectoryEntries,
}
#[derive(Debug, Clone, Copy)]
pub struct OffsetWithLength {
pub offset: u64,
pub len: u64,
}
// serialize as an array in order to save space
impl Serialize for OffsetWithLength {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let array = [self.offset, self.len];
array.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for OffsetWithLength {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct OffsetWithLengthVisitor;
impl<'de> Visitor<'de> for OffsetWithLengthVisitor {
type Value = OffsetWithLength;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an array with two elements: [offset, len]")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let offset = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let len = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
Ok(OffsetWithLength { offset, len })
}
}
deserializer.deserialize_seq(OffsetWithLengthVisitor)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VirtualFile {
#[serde(rename = "n")]
pub name: String,
#[serde(rename = "o")]
pub offset: OffsetWithLength,
#[serde(rename = "m", skip_serializing_if = "Option::is_none")]
pub transpiled_offset: Option<OffsetWithLength>,
#[serde(rename = "c", skip_serializing_if = "Option::is_none")]
pub cjs_export_analysis_offset: Option<OffsetWithLength>,
#[serde(rename = "s", skip_serializing_if = "Option::is_none")]
pub source_map_offset: Option<OffsetWithLength>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualSymlinkParts(Vec<String>);
impl VirtualSymlinkParts {
pub fn from_path(path: &Path) -> Self {
Self(
path
.components()
.filter(|c| !matches!(c, std::path::Component::RootDir))
.map(|c| c.as_os_str().to_string_lossy().to_string())
.collect(),
)
}
pub fn take_parts(&mut self) -> Vec<String> {
std::mem::take(&mut self.0)
}
pub fn parts(&self) -> &[String] {
&self.0
}
pub fn set_parts(&mut self, parts: Vec<String>) {
self.0 = parts;
}
pub fn display(&self) -> String {
self.0.join("/")
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualSymlink {
#[serde(rename = "n")]
pub name: String,
#[serde(rename = "p")]
pub dest_parts: VirtualSymlinkParts,
}
impl VirtualSymlink {
pub fn resolve_dest_from_root(&self, root: &Path) -> PathBuf {
let mut dest = root.to_path_buf();
for part in &self.dest_parts.0 {
dest.push(part);
}
dest
}
}
#[derive(Debug, Copy, Clone)]
pub enum VfsEntryRef<'a> {
Dir(&'a VirtualDirectory),
File(&'a VirtualFile),
Symlink(&'a VirtualSymlink),
}
impl VfsEntryRef<'_> {
pub fn name(&self) -> &str {
match self {
Self::Dir(dir) => &dir.name,
Self::File(file) => &file.name,
Self::Symlink(symlink) => &symlink.name,
}
}
}
// todo(dsherret): we should store this more efficiently in the binary
#[derive(Debug, Serialize, Deserialize)]
pub enum VfsEntry {
Dir(VirtualDirectory),
File(VirtualFile),
Symlink(VirtualSymlink),
}
impl VfsEntry {
pub fn name(&self) -> &str {
match self {
Self::Dir(dir) => &dir.name,
Self::File(file) => &file.name,
Self::Symlink(symlink) => &symlink.name,
}
}
pub fn as_ref(&self) -> VfsEntryRef {
match self {
VfsEntry::Dir(dir) => VfsEntryRef::Dir(dir),
VfsEntry::File(file) => VfsEntryRef::File(file),
VfsEntry::Symlink(symlink) => VfsEntryRef::Symlink(symlink),
}
}
}
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
#[derive(Debug)]
pub struct BuiltVfs {
pub root_path: WindowsSystemRootablePath,
pub case_sensitivity: FileSystemCaseSensitivity,
pub entries: VirtualDirectoryEntries,
pub files: Vec<Vec<u8>>,
}
#[derive(Debug, Default)]
struct FilesData {
files: Vec<Vec<u8>>,
current_offset: u64,
file_offsets: HashMap<(String, usize), OffsetWithLength>,
}
impl FilesData {
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
if offset.len == 0 {
return Some(&[]);
}
// the debug assertions in this method should never happen
// because it would indicate providing an offset not in the vfs
let mut count: u64 = 0;
for file in &self.files {
// clippy wanted a match
match count.cmp(&offset.offset) {
Ordering::Equal => {
debug_assert_eq!(offset.len, file.len() as u64);
if offset.len == file.len() as u64 {
return Some(file);
} else {
return None;
}
}
Ordering::Less => {
count += file.len() as u64;
}
Ordering::Greater => {
debug_assert!(false);
return None;
}
}
}
debug_assert!(false);
None
}
pub fn add_data(&mut self, data: Vec<u8>) -> OffsetWithLength {
if data.is_empty() {
return OffsetWithLength { offset: 0, len: 0 };
}
let checksum = crate::util::checksum::gen(&[&data]);
match self.file_offsets.entry((checksum, data.len())) {
Entry::Occupied(occupied_entry) => {
let offset_and_len = *occupied_entry.get();
debug_assert_eq!(data.len() as u64, offset_and_len.len);
offset_and_len
}
Entry::Vacant(vacant_entry) => {
let offset_and_len = OffsetWithLength {
offset: self.current_offset,
len: data.len() as u64,
};
vacant_entry.insert(offset_and_len);
self.current_offset += offset_and_len.len;
self.files.push(data);
offset_and_len
}
}
}
}
pub struct AddFileDataOptions {
pub data: Vec<u8>,
pub maybe_transpiled: Option<Vec<u8>>,
pub maybe_source_map: Option<Vec<u8>>,
pub maybe_cjs_export_analysis: Option<Vec<u8>>,
}
#[derive(Debug)]
pub struct VfsBuilder {
executable_root: VirtualDirectory,
files: FilesData,
/// The minimum root directory that should be included in the VFS.
min_root_dir: Option<WindowsSystemRootablePath>,
case_sensitivity: FileSystemCaseSensitivity,
}
impl Default for VfsBuilder {
fn default() -> Self {
Self::new()
}
}
impl VfsBuilder {
pub fn new() -> Self {
Self {
executable_root: VirtualDirectory {
name: "/".to_string(),
entries: Default::default(),
},
files: Default::default(),
min_root_dir: Default::default(),
// This is not exactly correct because file systems on these OSes
// may be case-sensitive or not based on the directory, but this
// is a good enough approximation and limitation. In the future,
// we may want to store this information per directory instead
// depending on the feedback we get.
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
FileSystemCaseSensitivity::Insensitive
} else {
FileSystemCaseSensitivity::Sensitive
},
}
}
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
self.case_sensitivity
}
pub fn files_len(&self) -> usize {
self.files.files.len()
}
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
self.files.file_bytes(offset)
}
/// Add a directory that might be the minimum root directory
/// of the VFS.
///
/// For example, say the user has a deno.json and specifies an
/// import map in a parent directory. The import map won't be
/// included in the VFS, but its base will meaning we need to
/// tell the VFS builder to include the base of the import map
/// by calling this method.
pub fn add_possible_min_root_dir(&mut self, path: &Path) {
self.add_dir_raw(path);
match &self.min_root_dir {
Some(WindowsSystemRootablePath::WindowSystemRoot) => {
// already the root dir
}
Some(WindowsSystemRootablePath::Path(current_path)) => {
let mut common_components = Vec::new();
for (a, b) in current_path.components().zip(path.components()) {
if a != b {
break;
}
common_components.push(a);
}
if common_components.is_empty() {
self.min_root_dir =
Some(WindowsSystemRootablePath::root_for_current_os());
} else {
self.min_root_dir = Some(WindowsSystemRootablePath::Path(
common_components.iter().collect(),
));
}
}
None => {
self.min_root_dir =
Some(WindowsSystemRootablePath::Path(path.to_path_buf()));
}
}
}
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
let target_path = self.resolve_target_path(path)?;
self.add_dir_recursive_not_symlink(&target_path)
}
fn add_dir_recursive_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
self.add_dir_raw(path);
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let read_dir = std::fs::read_dir(path)
.with_context(|| format!("Reading {}", path.display()))?;
let mut dir_entries =
read_dir.into_iter().collect::<Result<Vec<_>, _>>()?;
dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism
for entry in dir_entries {
let file_type = entry.file_type()?;
let path = entry.path();
if file_type.is_dir() {
self.add_dir_recursive_not_symlink(&path)?;
} else if file_type.is_file() {
self.add_file_at_path_not_symlink(&path)?;
} else if file_type.is_symlink() {
match self.add_symlink(&path) {
Ok(target) => match target {
SymlinkTarget::File(target) => {
self.add_file_at_path_not_symlink(&target)?
}
SymlinkTarget::Dir(target) => {
self.add_dir_recursive_not_symlink(&target)?;
}
},
Err(err) => {
log::warn!(
"{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}",
colors::yellow("Warning"),
path.display(),
err
);
}
}
}
}
Ok(())
}
fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory {
log::debug!("Ensuring directory '{}'", path.display());
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let index = current_dir.entries.insert_or_modify(
&name,
self.case_sensitivity,
|| {
VfsEntry::Dir(VirtualDirectory {
name: name.to_string(),
entries: Default::default(),
})
},
|_| {
// ignore
},
);
match current_dir.entries.get_mut_by_index(index) {
Some(VfsEntry::Dir(dir)) => {
current_dir = dir;
}
_ => unreachable!(),
};
}
current_dir
}
pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory {
&mut self.executable_root
}
pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> {
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let entry = current_dir
.entries
.get_mut_by_name(&name, self.case_sensitivity)?;
match entry {
VfsEntry::Dir(dir) => {
current_dir = dir;
}
_ => unreachable!("{}", path.display()),
};
}
Some(current_dir)
}
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data(
path,
AddFileDataOptions {
data: file_bytes,
maybe_cjs_export_analysis: None,
maybe_transpiled: None,
maybe_source_map: None,
},
)
}
fn add_file_at_path_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data_raw(path, file_bytes)
}
pub fn add_file_with_data(
&mut self,
path: &Path,
options: AddFileDataOptions,
) -> Result<(), AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
let target = self.add_symlink(path)?.into_path_buf();
self.add_file_with_data_raw_options(&target, options)
} else {
self.add_file_with_data_raw_options(path, options)
}
}
pub fn add_file_with_data_raw(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
self.add_file_with_data_raw_options(
path,
AddFileDataOptions {
data,
maybe_transpiled: None,
maybe_cjs_export_analysis: None,
maybe_source_map: None,
},
)
}
fn add_file_with_data_raw_options(
&mut self,
path: &Path,
options: AddFileDataOptions,
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display());
let case_sensitivity = self.case_sensitivity;
let offset_and_len = self.files.add_data(options.data);
let transpiled_offset = options
.maybe_transpiled
.map(|data| self.files.add_data(data));
let source_map_offset = options
.maybe_source_map
.map(|data| self.files.add_data(data));
let cjs_export_analysis_offset = options
.maybe_cjs_export_analysis
.map(|data| self.files.add_data(data));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::File(VirtualFile {
name: name.to_string(),
offset: offset_and_len,
transpiled_offset,
cjs_export_analysis_offset,
source_map_offset,
})
},
|entry| match entry {
VfsEntry::File(virtual_file) => {
virtual_file.offset = offset_and_len;
// doesn't overwrite to None
if transpiled_offset.is_some() {
virtual_file.transpiled_offset = transpiled_offset;
}
if source_map_offset.is_some() {
virtual_file.source_map_offset = source_map_offset;
}
if cjs_export_analysis_offset.is_some() {
virtual_file.cjs_export_analysis_offset =
cjs_export_analysis_offset;
}
}
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
},
);
Ok(())
}
fn resolve_target_path(&mut self, path: &Path) -> Result<PathBuf, AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
Ok(self.add_symlink(path)?.into_path_buf())
} else {
Ok(path.to_path_buf())
}
}
pub fn add_symlink(
&mut self,
path: &Path,
) -> Result<SymlinkTarget, AnyError> {
self.add_symlink_inner(path, &mut IndexSet::new())
}
fn add_symlink_inner(
&mut self,
path: &Path,
visited: &mut IndexSet<PathBuf>,
) -> Result<SymlinkTarget, AnyError> {
log::debug!("Adding symlink '{}'", path.display());
let target = strip_unc_prefix(
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
std::fs::read_link(path)
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
);
let case_sensitivity = self.case_sensitivity;
let target = normalize_path(path.parent().unwrap().join(&target));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::Symlink(VirtualSymlink {
name: name.to_string(),
dest_parts: VirtualSymlinkParts::from_path(&target),
})
},
|_| {
// ignore previously inserted
},
);
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let target_metadata =
std::fs::symlink_metadata(&target).with_context(|| {
format!("Reading symlink target '{}'", target.display())
})?;
if target_metadata.is_symlink() {
if !visited.insert(target.clone()) {
// todo: probably don't error in this scenario
bail!(
"Circular symlink detected: {} -> {}",
visited
.iter()
.map(|p| p.display().to_string())
.collect::<Vec<_>>()
.join(" -> "),
target.display()
);
}
self.add_symlink_inner(&target, visited)
} else if target_metadata.is_dir() {
Ok(SymlinkTarget::Dir(target))
} else {
Ok(SymlinkTarget::File(target))
}
}
/// Adds the CJS export analysis to the provided file.
///
/// Warning: This will panic if the file wasn't properly
/// setup before calling this.
pub fn add_cjs_export_analysis(&mut self, path: &Path, data: Vec<u8>) {
self.add_data_for_file_or_panic(path, data, |file, offset_with_length| {
file.cjs_export_analysis_offset = Some(offset_with_length);
})
}
fn add_data_for_file_or_panic(
&mut self,
path: &Path,
data: Vec<u8>,
update_file: impl FnOnce(&mut VirtualFile, OffsetWithLength),
) {
let offset_with_length = self.files.add_data(data);
let case_sensitivity = self.case_sensitivity;
let dir = self.get_dir_mut(path.parent().unwrap()).unwrap();
let name = path.file_name().unwrap().to_string_lossy();
let file = dir
.entries
.get_mut_by_name(&name, case_sensitivity)
.unwrap();
match file {
VfsEntry::File(virtual_file) => {
update_file(virtual_file, offset_with_length);
}
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => {
unreachable!()
}
}
}
/// Iterates through all the files in the virtual file system.
pub fn iter_files(
&self,
) -> impl Iterator<Item = (PathBuf, &VirtualFile)> + '_ {
FileIterator {
pending_dirs: VecDeque::from([(
WindowsSystemRootablePath::root_for_current_os(),
&self.executable_root,
)]),
current_dir_index: 0,
}
}
pub fn build(self) -> BuiltVfs {
fn strip_prefix_from_symlinks(
dir: &mut VirtualDirectory,
parts: &[String],
) {
for entry in dir.entries.iter_mut() {
match entry {
VfsEntry::Dir(dir) => {
strip_prefix_from_symlinks(dir, parts);
}
VfsEntry::File(_) => {}
VfsEntry::Symlink(symlink) => {
let parts = symlink
.dest_parts
.take_parts()
.into_iter()
.skip(parts.len())
.collect();
symlink.dest_parts.set_parts(parts);
}
}
}
}
let mut current_dir = self.executable_root;
let mut current_path = WindowsSystemRootablePath::root_for_current_os();
loop {
if current_dir.entries.len() != 1 {
break;
}
if self.min_root_dir.as_ref() == Some(&current_path) {
break;
}
match current_dir.entries.iter().next().unwrap() {
VfsEntry::Dir(dir) => {
if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME {
// special directory we want to maintain
break;
}
match current_dir.entries.remove(0) {
VfsEntry::Dir(dir) => {
current_path =
WindowsSystemRootablePath::Path(current_path.join(&dir.name));
current_dir = dir;
}
_ => unreachable!(),
};
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => break,
}
}
if let WindowsSystemRootablePath::Path(path) = &current_path {
strip_prefix_from_symlinks(
&mut current_dir,
VirtualSymlinkParts::from_path(path).parts(),
);
}
BuiltVfs {
root_path: current_path,
case_sensitivity: self.case_sensitivity,
entries: current_dir.entries,
files: self.files.files,
}
}
}
struct FileIterator<'a> {
pending_dirs: VecDeque<(WindowsSystemRootablePath, &'a VirtualDirectory)>,
current_dir_index: usize,
}
impl<'a> Iterator for FileIterator<'a> {
type Item = (PathBuf, &'a VirtualFile);
fn next(&mut self) -> Option<Self::Item> {
while !self.pending_dirs.is_empty() {
let (dir_path, current_dir) = self.pending_dirs.front()?;
if let Some(entry) =
current_dir.entries.get_by_index(self.current_dir_index)
{
self.current_dir_index += 1;
match entry {
VfsEntry::Dir(virtual_directory) => {
self.pending_dirs.push_back((
WindowsSystemRootablePath::Path(
dir_path.join(&virtual_directory.name),
),
virtual_directory,
));
}
VfsEntry::File(virtual_file) => {
return Some((dir_path.join(&virtual_file.name), virtual_file));
}
VfsEntry::Symlink(_) => {
// ignore
}
}
} else {
self.pending_dirs.pop_front();
self.current_dir_index = 0;
}
}
None
}
}
#[derive(Debug)]
pub enum SymlinkTarget {
File(PathBuf),
Dir(PathBuf),
}
impl SymlinkTarget {
pub fn into_path_buf(self) -> PathBuf {
match self {
Self::File(path) => path,
Self::Dir(path) => path,
}
}
}

37
cli/lib/sys.rs Normal file
View file

@ -0,0 +1,37 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_node::ExtNodeSys;
use sys_traits::FsCanonicalize;
use sys_traits::FsCreateDirAll;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use sys_traits::FsRemoveFile;
use sys_traits::FsRename;
use sys_traits::SystemRandom;
use sys_traits::ThreadSleep;
pub trait DenoLibSys:
FsCanonicalize
+ FsCreateDirAll
+ FsReadDir
+ FsMetadata
+ FsOpen
+ FsRemoveFile
+ FsRename
+ FsRead
+ ThreadSleep
+ SystemRandom
+ ExtNodeSys
+ Clone
+ Send
+ Sync
+ std::fmt::Debug
+ 'static
{
}
// ok, implementation
#[allow(clippy::disallowed_types)]
impl DenoLibSys for sys_traits::impls::RealSys {}

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use ring::digest::Context; use ring::digest::Context;
use ring::digest::SHA256; use ring::digest::SHA256;

View file

@ -1,8 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::hash::Hasher; use std::hash::Hasher;
/// A very fast insecure hasher that uses the xxHash algorithm. /// A very fast insecure hasher that uses the xxHash algorithm.
#[derive(Debug, Clone)]
pub struct FastInsecureHasher(twox_hash::XxHash64); pub struct FastInsecureHasher(twox_hash::XxHash64);
impl FastInsecureHasher { impl FastInsecureHasher {

View file

@ -1,43 +1,34 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use super::draw_thread::DrawThread;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
use std::io::Write; use std::io::Write;
struct CliLogger { use deno_runtime::deno_telemetry;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_runtime::deno_telemetry::OtelConsoleConfig;
struct CliLogger<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
otel_console_config: OtelConsoleConfig, otel_console_config: OtelConsoleConfig,
logger: env_logger::Logger, logger: env_logger::Logger,
on_log_start: FnOnLogStart,
on_log_end: FnOnLogEnd,
} }
impl CliLogger { impl<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> CliLogger<FnOnLogStart, FnOnLogEnd> {
pub fn new(
logger: env_logger::Logger,
otel_console_config: OtelConsoleConfig,
) -> Self {
Self {
logger,
otel_console_config,
}
}
pub fn filter(&self) -> log::LevelFilter { pub fn filter(&self) -> log::LevelFilter {
self.logger.filter() self.logger.filter()
} }
} }
impl log::Log for CliLogger { impl<FnOnLogStart: Fn() + Send + Sync, FnOnLogEnd: Fn() + Send + Sync> log::Log
for CliLogger<FnOnLogStart, FnOnLogEnd>
{
fn enabled(&self, metadata: &log::Metadata) -> bool { fn enabled(&self, metadata: &log::Metadata) -> bool {
self.logger.enabled(metadata) self.logger.enabled(metadata)
} }
fn log(&self, record: &log::Record) { fn log(&self, record: &log::Record) {
if self.enabled(record.metadata()) { if self.enabled(record.metadata()) {
// it was considered to hold the draw thread's internal lock (self.on_log_start)();
// across logging, but if outputting to stderr blocks then that
// could potentially block other threads that access the draw
// thread's state
DrawThread::hide();
match self.otel_console_config { match self.otel_console_config {
OtelConsoleConfig::Ignore => { OtelConsoleConfig::Ignore => {
@ -52,7 +43,7 @@ impl log::Log for CliLogger {
} }
} }
DrawThread::show(); (self.on_log_end)();
} }
} }
@ -61,8 +52,20 @@ impl log::Log for CliLogger {
} }
} }
pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) { pub struct InitLoggingOptions<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
let log_level = maybe_level.unwrap_or(log::Level::Info); pub on_log_start: FnOnLogStart,
pub on_log_end: FnOnLogEnd,
pub maybe_level: Option<log::Level>,
pub otel_config: Option<OtelConfig>,
}
pub fn init<
FOnLogStart: Fn() + Send + Sync + 'static,
FnOnLogEnd: Fn() + Send + Sync + 'static,
>(
options: InitLoggingOptions<FOnLogStart, FnOnLogEnd>,
) {
let log_level = options.maybe_level.unwrap_or(log::Level::Info);
let logger = env_logger::Builder::from_env( let logger = env_logger::Builder::from_env(
env_logger::Env::new() env_logger::Env::new()
// Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix // Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix
@ -115,12 +118,15 @@ pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
}) })
.build(); .build();
let cli_logger = CliLogger::new( let cli_logger = CliLogger {
on_log_start: options.on_log_start,
on_log_end: options.on_log_end,
logger, logger,
otel_config otel_console_config: options
.otel_config
.map(|c| c.console) .map(|c| c.console)
.unwrap_or(OtelConsoleConfig::Ignore), .unwrap_or(OtelConsoleConfig::Ignore),
); };
let max_level = cli_logger.filter(); let max_level = cli_logger.filter();
let r = log::set_boxed_logger(Box::new(cli_logger)); let r = log::set_boxed_logger(Box::new(cli_logger));
if r.is_ok() { if r.is_ok() {

8
cli/lib/util/mod.rs Normal file
View file

@ -0,0 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod checksum;
pub mod hash;
pub mod logger;
pub mod result;
pub mod text_encoding;
pub mod v8;

43
cli/lib/util/result.rs Normal file
View file

@ -0,0 +1,43 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::convert::Infallible;
use std::fmt::Debug;
use std::fmt::Display;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_runtime::deno_core::error::AnyError;
use deno_runtime::deno_core::error::CoreError;
pub trait InfallibleResultExt<T> {
fn unwrap_infallible(self) -> T;
}
impl<T> InfallibleResultExt<T> for Result<T, Infallible> {
fn unwrap_infallible(self) -> T {
match self {
Ok(value) => value,
Err(never) => match never {},
}
}
}
pub fn any_and_jserrorbox_downcast_ref<
E: Display + Debug + Send + Sync + 'static,
>(
err: &AnyError,
) -> Option<&E> {
err
.downcast_ref::<E>()
.or_else(|| {
err
.downcast_ref::<JsErrorBox>()
.and_then(|e| e.as_any().downcast_ref::<E>())
})
.or_else(|| {
err.downcast_ref::<CoreError>().and_then(|e| match e {
CoreError::JsBox(e) => e.as_any().downcast_ref::<E>(),
_ => None,
})
})
}

View file

@ -0,0 +1,45 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
#[inline(always)]
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
#[inline(always)]
pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow<str> {
match bytes {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
}
}
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
#[allow(dead_code)]
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
let raw = Arc::into_raw(arc_str);
// SAFETY: This is safe because they have the same memory layout.
unsafe { Arc::from_raw(raw as *const [u8]) }
}
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
#[allow(dead_code)]
pub fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
}

Some files were not shown because too many files have changed in this diff Show more