1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge remote-tracking branch 'upstream/main' into check-workspace-member-compiler-options

This commit is contained in:
Nayeem Rahman 2025-01-14 03:16:38 +00:00
commit 5a1bb6b854
284 changed files with 7261 additions and 6403 deletions

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 32;
const cacheVersion = 33;
const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl";

View file

@ -184,8 +184,8 @@ jobs:
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git/db
key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
key: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)'
@ -379,7 +379,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -689,7 +689,7 @@ jobs:
!./target/*/gn_root
!./target/*/*.zip
!./target/*/*.tar.gz
key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

236
Cargo.lock generated
View file

@ -1244,7 +1244,7 @@ dependencies = [
[[package]]
name = "deno"
version = "2.1.4"
version = "2.1.5"
dependencies = [
"anstream",
"async-trait",
@ -1275,7 +1275,7 @@ dependencies = [
"deno_npm",
"deno_npm_cache",
"deno_package_json",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
@ -1421,7 +1421,7 @@ dependencies = [
[[package]]
name = "deno_bench_util"
version = "0.178.0"
version = "0.179.0"
dependencies = [
"bencher",
"deno_core",
@ -1430,10 +1430,11 @@ dependencies = [
[[package]]
name = "deno_broadcast_channel"
version = "0.178.0"
version = "0.179.0"
dependencies = [
"async-trait",
"deno_core",
"deno_error",
"thiserror 2.0.3",
"tokio",
"uuid",
@ -1441,10 +1442,11 @@ dependencies = [
[[package]]
name = "deno_cache"
version = "0.116.0"
version = "0.117.0"
dependencies = [
"async-trait",
"deno_core",
"deno_error",
"rusqlite",
"serde",
"sha2",
@ -1467,7 +1469,7 @@ dependencies = [
"data-url",
"deno_error",
"deno_media_type",
"deno_path_util 0.3.0",
"deno_path_util",
"http 1.1.0",
"indexmap 2.3.0",
"log",
@ -1483,9 +1485,10 @@ dependencies = [
[[package]]
name = "deno_canvas"
version = "0.53.0"
version = "0.54.0"
dependencies = [
"deno_core",
"deno_error",
"deno_webgpu",
"image",
"serde",
@ -1494,12 +1497,14 @@ dependencies = [
[[package]]
name = "deno_config"
version = "0.42.0"
source = "git+https://github.com/denoland/deno_config.git?rev=4cbb63704442a7834dc6bed2e7e310a0d46ade09#4cbb63704442a7834dc6bed2e7e310a0d46ade09"
version = "0.45.0"
source = "git+https://github.com/denoland/deno_config.git?rev=39be71a5936221bf23e438c11cfcffe56ce54690#39be71a5936221bf23e438c11cfcffe56ce54690"
dependencies = [
"anyhow",
"boxed_error",
"capacity_builder 0.5.0",
"deno_error",
"deno_package_json",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_semver",
"glob",
"ignore",
@ -1512,22 +1517,22 @@ dependencies = [
"serde",
"serde_json",
"sys_traits",
"thiserror 1.0.64",
"thiserror 2.0.3",
"url",
]
[[package]]
name = "deno_console"
version = "0.184.0"
version = "0.185.0"
dependencies = [
"deno_core",
]
[[package]]
name = "deno_core"
version = "0.327.0"
version = "0.330.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaf8dff204b9c2415deb47b9f30d4d38b0925d0d88f1f9074e8e76f59e6d7ded"
checksum = "fd38bbbd68ed873165ccb630322704b44140d3a8c8d50f898beac4d1a8a3358c"
dependencies = [
"anyhow",
"az",
@ -1538,6 +1543,7 @@ dependencies = [
"capacity_builder 0.1.3",
"cooked-waker",
"deno_core_icudata",
"deno_error",
"deno_ops",
"deno_unsync",
"futures",
@ -1553,6 +1559,7 @@ dependencies = [
"smallvec",
"sourcemap 8.0.1",
"static_assertions",
"thiserror 2.0.3",
"tokio",
"url",
"v8",
@ -1567,12 +1574,13 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
[[package]]
name = "deno_cron"
version = "0.64.0"
version = "0.65.0"
dependencies = [
"anyhow",
"async-trait",
"chrono",
"deno_core",
"deno_error",
"saffron",
"thiserror 2.0.3",
"tokio",
@ -1580,7 +1588,7 @@ dependencies = [
[[package]]
name = "deno_crypto"
version = "0.198.0"
version = "0.199.0"
dependencies = [
"aes",
"aes-gcm",
@ -1591,6 +1599,7 @@ dependencies = [
"ctr",
"curve25519-dalek",
"deno_core",
"deno_error",
"deno_web",
"ed448-goldilocks",
"elliptic-curve",
@ -1617,16 +1626,17 @@ dependencies = [
[[package]]
name = "deno_doc"
version = "0.161.3"
version = "0.164.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353a39c70d248af04600928cefc8066a9e4535fb6e7d7c518411e5efc822819f"
checksum = "ad1edb02603c7e8a4003c84af2482a05e5eda3a14f1af275434fda89223f054d"
dependencies = [
"anyhow",
"cfg-if",
"comrak",
"deno_ast",
"deno_graph",
"deno_path_util 0.2.2",
"deno_path_util",
"deno_terminal 0.2.0",
"handlebars",
"html-escape",
"import_map",
@ -1646,22 +1656,23 @@ dependencies = [
[[package]]
name = "deno_error"
version = "0.5.2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "199c66ffd17ee1a948904d33f3d3f364573951c1f9fb3f859bfe7770bf33862a"
checksum = "c4da6a58de6932a96f84e133c072fd3b525966ee122a71f3efd48bbff2eed5ac"
dependencies = [
"deno_error_macro",
"libc",
"serde",
"serde_json",
"tokio",
"url",
]
[[package]]
name = "deno_error_macro"
version = "0.5.2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cd99df6ae75443907e1f959fc42ec6dcea67a7bd083e76cf23a117102c9a2ce"
checksum = "46351dff93aed2039407c91e2ded2a5591e42d2795ab3d111288625bb710d3d2"
dependencies = [
"proc-macro2",
"quote",
@ -1670,13 +1681,14 @@ dependencies = [
[[package]]
name = "deno_fetch"
version = "0.208.0"
version = "0.209.0"
dependencies = [
"base64 0.21.7",
"bytes",
"data-url",
"deno_core",
"deno_path_util 0.3.0",
"deno_error",
"deno_path_util",
"deno_permissions",
"deno_tls",
"dyn-clone",
@ -1706,9 +1718,10 @@ dependencies = [
[[package]]
name = "deno_ffi"
version = "0.171.0"
version = "0.172.0"
dependencies = [
"deno_core",
"deno_error",
"deno_permissions",
"dlopen2",
"dynasmrt",
@ -1726,14 +1739,15 @@ dependencies = [
[[package]]
name = "deno_fs"
version = "0.94.0"
version = "0.95.0"
dependencies = [
"async-trait",
"base32",
"boxed_error",
"deno_core",
"deno_error",
"deno_io",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_permissions",
"filetime",
"junction",
@ -1749,16 +1763,17 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.86.7"
version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace3acf321fac446636ae605b01723f2120b40ab3d32c6836aeb7d603a8e08f9"
checksum = "f56d4eb4b7c81ae920b6d18c45a1866924f93110caee80bbbc362dc28143f2bb"
dependencies = [
"anyhow",
"async-trait",
"capacity_builder 0.5.0",
"data-url",
"deno_ast",
"deno_path_util 0.3.0",
"deno_error",
"deno_media_type",
"deno_path_util",
"deno_semver",
"deno_unsync",
"encoding_rs",
@ -1782,7 +1797,7 @@ dependencies = [
[[package]]
name = "deno_http"
version = "0.182.0"
version = "0.183.0"
dependencies = [
"async-compression",
"async-trait",
@ -1792,6 +1807,7 @@ dependencies = [
"bytes",
"cache_control",
"deno_core",
"deno_error",
"deno_net",
"deno_websocket",
"flate2",
@ -1821,10 +1837,11 @@ dependencies = [
[[package]]
name = "deno_io"
version = "0.94.0"
version = "0.95.0"
dependencies = [
"async-trait",
"deno_core",
"deno_error",
"filetime",
"fs3",
"libc",
@ -1842,7 +1859,7 @@ dependencies = [
[[package]]
name = "deno_kv"
version = "0.92.0"
version = "0.93.0"
dependencies = [
"anyhow",
"async-trait",
@ -1851,8 +1868,9 @@ dependencies = [
"bytes",
"chrono",
"deno_core",
"deno_error",
"deno_fetch",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_permissions",
"deno_tls",
"denokv_proto",
@ -1915,9 +1933,10 @@ dependencies = [
[[package]]
name = "deno_napi"
version = "0.115.0"
version = "0.116.0"
dependencies = [
"deno_core",
"deno_error",
"deno_permissions",
"libc",
"libloading 0.7.4",
@ -1943,9 +1962,10 @@ dependencies = [
[[package]]
name = "deno_net"
version = "0.176.0"
version = "0.177.0"
dependencies = [
"deno_core",
"deno_error",
"deno_permissions",
"deno_tls",
"hickory-proto",
@ -1961,7 +1981,7 @@ dependencies = [
[[package]]
name = "deno_node"
version = "0.122.0"
version = "0.123.0"
dependencies = [
"aead-gcm-stream",
"aes",
@ -1975,13 +1995,14 @@ dependencies = [
"const-oid",
"data-encoding",
"deno_core",
"deno_error",
"deno_fetch",
"deno_fs",
"deno_io",
"deno_media_type",
"deno_net",
"deno_package_json",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_permissions",
"deno_whoami",
"der",
@ -2053,9 +2074,9 @@ dependencies = [
[[package]]
name = "deno_npm"
version = "0.27.0"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f818ad5dc4c206b50b5cfa6f10b4b94b127e15c8342c152768eba40c225ca23"
checksum = "4adceb4c34f10e837d0e3ae76e88dddefb13e83c05c1ef1699fa5519241c9d27"
dependencies = [
"async-trait",
"capacity_builder 0.5.0",
@ -2073,17 +2094,15 @@ dependencies = [
[[package]]
name = "deno_npm_cache"
version = "0.3.0"
version = "0.4.0"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.7",
"boxed_error",
"deno_cache_dir",
"deno_core",
"deno_error",
"deno_npm",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_semver",
"deno_unsync",
"faster-hex",
@ -2105,10 +2124,11 @@ dependencies = [
[[package]]
name = "deno_ops"
version = "0.203.0"
version = "0.206.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b146ca74cac431843486ade58e2accc16c11315fb2c6934590a52a73c56b7ec3"
checksum = "4c25ffa9d088ea00748dbef870bba110ac22ebf8cf7b2e9eb288409c5d852af3"
dependencies = [
"indexmap 2.3.0",
"proc-macro-rules",
"proc-macro2",
"quote",
@ -2116,7 +2136,7 @@ dependencies = [
"strum",
"strum_macros",
"syn 2.0.87",
"thiserror 1.0.64",
"thiserror 2.0.3",
]
[[package]]
@ -2127,7 +2147,7 @@ checksum = "e1d3c0f699ba2040669204ce24ab73720499fc290af843e4ce0fc8a9b3d67735"
dependencies = [
"boxed_error",
"deno_error",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_semver",
"indexmap 2.3.0",
"serde",
@ -2137,18 +2157,6 @@ dependencies = [
"url",
]
[[package]]
name = "deno_path_util"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b02c7d341e1b2cf089daff0f4fb2b4be8f3b5511b1d96040b3f7ed63a66c737b"
dependencies = [
"deno_error",
"percent-encoding",
"thiserror 2.0.3",
"url",
]
[[package]]
name = "deno_path_util"
version = "0.3.0"
@ -2164,11 +2172,12 @@ dependencies = [
[[package]]
name = "deno_permissions"
version = "0.43.0"
version = "0.44.0"
dependencies = [
"capacity_builder 0.5.0",
"deno_core",
"deno_path_util 0.3.0",
"deno_error",
"deno_path_util",
"deno_terminal 0.2.0",
"fqdn",
"libc",
@ -2183,18 +2192,24 @@ dependencies = [
[[package]]
name = "deno_resolver"
version = "0.15.0"
version = "0.16.0"
dependencies = [
"anyhow",
"async-trait",
"base32",
"boxed_error",
"dashmap",
"deno_cache_dir",
"deno_config",
"deno_error",
"deno_media_type",
"deno_npm",
"deno_package_json",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_semver",
"log",
"node_resolver",
"parking_lot",
"sys_traits",
"test_server",
"thiserror 2.0.3",
@ -2203,7 +2218,7 @@ dependencies = [
[[package]]
name = "deno_runtime"
version = "0.192.0"
version = "0.193.0"
dependencies = [
"color-print",
"deno_ast",
@ -2214,6 +2229,7 @@ dependencies = [
"deno_core",
"deno_cron",
"deno_crypto",
"deno_error",
"deno_fetch",
"deno_ffi",
"deno_fs",
@ -2223,7 +2239,7 @@ dependencies = [
"deno_napi",
"deno_net",
"deno_node",
"deno_path_util 0.3.0",
"deno_path_util",
"deno_permissions",
"deno_telemetry",
"deno_terminal 0.2.0",
@ -2308,10 +2324,11 @@ dependencies = [
[[package]]
name = "deno_telemetry"
version = "0.6.0"
version = "0.7.0"
dependencies = [
"async-trait",
"deno_core",
"deno_error",
"http-body-util",
"hyper 1.4.1",
"hyper-util",
@ -2324,6 +2341,7 @@ dependencies = [
"opentelemetry_sdk",
"pin-project",
"serde",
"thiserror 2.0.3",
"tokio",
]
@ -2349,9 +2367,10 @@ dependencies = [
[[package]]
name = "deno_tls"
version = "0.171.0"
version = "0.172.0"
dependencies = [
"deno_core",
"deno_error",
"deno_native_certs",
"rustls",
"rustls-pemfile",
@ -2399,11 +2418,12 @@ dependencies = [
[[package]]
name = "deno_url"
version = "0.184.0"
version = "0.185.0"
dependencies = [
"deno_bench_util",
"deno_console",
"deno_core",
"deno_error",
"deno_webidl",
"thiserror 2.0.3",
"urlpattern",
@ -2411,7 +2431,7 @@ dependencies = [
[[package]]
name = "deno_web"
version = "0.215.0"
version = "0.216.0"
dependencies = [
"async-trait",
"base64-simd 0.8.0",
@ -2419,6 +2439,7 @@ dependencies = [
"deno_bench_util",
"deno_console",
"deno_core",
"deno_error",
"deno_permissions",
"deno_url",
"deno_webidl",
@ -2433,9 +2454,10 @@ dependencies = [
[[package]]
name = "deno_webgpu"
version = "0.151.0"
version = "0.152.0"
dependencies = [
"deno_core",
"deno_error",
"raw-window-handle",
"serde",
"thiserror 2.0.3",
@ -2446,7 +2468,7 @@ dependencies = [
[[package]]
name = "deno_webidl"
version = "0.184.0"
version = "0.185.0"
dependencies = [
"deno_bench_util",
"deno_core",
@ -2454,10 +2476,11 @@ dependencies = [
[[package]]
name = "deno_websocket"
version = "0.189.0"
version = "0.190.0"
dependencies = [
"bytes",
"deno_core",
"deno_error",
"deno_net",
"deno_permissions",
"deno_tls",
@ -2476,9 +2499,10 @@ dependencies = [
[[package]]
name = "deno_webstorage"
version = "0.179.0"
version = "0.180.0"
dependencies = [
"deno_core",
"deno_error",
"deno_web",
"rusqlite",
"thiserror 2.0.3",
@ -2496,13 +2520,13 @@ dependencies = [
[[package]]
name = "denokv_proto"
version = "0.8.4"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7ba1f99ed11a9c11e868a8521b1f71a7e1aba785d7f42ea9ecbdc01146c89ec"
checksum = "d5b77de4d3b9215e14624d4f4eb16cb38c0810e3f5860ba3b3fc47d0537f9a4d"
dependencies = [
"anyhow",
"async-trait",
"chrono",
"deno_error",
"futures",
"num-bigint",
"prost",
@ -2512,15 +2536,15 @@ dependencies = [
[[package]]
name = "denokv_remote"
version = "0.8.4"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08ed833073189e8f6d03155fe3b05a024e75e29d8a28a4c2e9ec3b5c925e727b"
checksum = "c6497c28eec268ed99f1e8664f0842935f02d1508529c67d94c57ca5d893d743"
dependencies = [
"anyhow",
"async-stream",
"async-trait",
"bytes",
"chrono",
"deno_error",
"denokv_proto",
"futures",
"http 1.1.0",
@ -2529,6 +2553,7 @@ dependencies = [
"rand",
"serde",
"serde_json",
"thiserror 2.0.3",
"tokio",
"tokio-util",
"url",
@ -2537,14 +2562,14 @@ dependencies = [
[[package]]
name = "denokv_sqlite"
version = "0.8.4"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b790f01d1302d53a0c3cbd27de88a06b3abd64ec8ab8673924e490541c7c713"
checksum = "dc0f21a450a35eb85760761401fddf9bfff9840127be07a6ca5c31863127913d"
dependencies = [
"anyhow",
"async-stream",
"async-trait",
"chrono",
"deno_error",
"denokv_proto",
"futures",
"hex",
@ -2553,7 +2578,7 @@ dependencies = [
"rand",
"rusqlite",
"serde_json",
"thiserror 1.0.64",
"thiserror 2.0.3",
"tokio",
"tokio-stream",
"uuid",
@ -4316,16 +4341,18 @@ dependencies = [
[[package]]
name = "import_map"
version = "0.20.1"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "351a787decc56f38d65d16d32687265045d6d6a4531b4a0e1b649def3590354e"
checksum = "1215d4d92511fbbdaea50e750e91f2429598ef817f02b579158e92803b52c00a"
dependencies = [
"boxed_error",
"deno_error",
"indexmap 2.3.0",
"log",
"percent-encoding",
"serde",
"serde_json",
"thiserror 1.0.64",
"thiserror 2.0.3",
"url",
]
@ -5010,7 +5037,7 @@ dependencies = [
[[package]]
name = "napi_sym"
version = "0.114.0"
version = "0.115.0"
dependencies = [
"quote",
"serde",
@ -5065,14 +5092,15 @@ dependencies = [
[[package]]
name = "node_resolver"
version = "0.22.0"
version = "0.23.0"
dependencies = [
"anyhow",
"async-trait",
"boxed_error",
"deno_error",
"deno_media_type",
"deno_package_json",
"deno_path_util 0.3.0",
"deno_path_util",
"futures",
"lazy-regex",
"once_cell",
@ -6797,14 +6825,15 @@ dependencies = [
[[package]]
name = "serde_v8"
version = "0.236.0"
version = "0.239.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e23b3abce64010612f88f4ff689a959736f99eb3dc0dbf1c7903434b8bd8cda5"
checksum = "3caa6d882827148e5d9052d9d8d6d1c9d6ad426ed00cab46cafb8c07a0e7126a"
dependencies = [
"deno_error",
"num-bigint",
"serde",
"smallvec",
"thiserror 1.0.64",
"thiserror 2.0.3",
"v8",
]
@ -7678,9 +7707,9 @@ dependencies = [
[[package]]
name = "sys_traits"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c12873696bde6de3aea3cd27de8e52897177c5b368a6a30987fd4926e30f85"
checksum = "5b46ac05dfbe9fd3a9703eff20e17f5b31e7b6a54daf27a421dcd56c7a27ecdd"
dependencies = [
"filetime",
"getrandom",
@ -8418,9 +8447,9 @@ dependencies = [
[[package]]
name = "v8"
version = "130.0.2"
version = "130.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ee0be58935708fa4d7efb970c6cf9f2d9511d24ee24246481a65b6ee167348d"
checksum = "a511192602f7b435b0a241c1947aa743eb7717f20a9195f4b5e8ed1952e01db1"
dependencies = [
"bindgen",
"bitflags 2.6.0",
@ -8612,11 +8641,12 @@ dependencies = [
[[package]]
name = "wasm_dep_analyzer"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f270206a91783fd90625c8bb0d8fbd459d0b1d1bf209b656f713f01ae7c04b8"
checksum = "2eeee3bdea6257cc36d756fa745a70f9d393571e47d69e0ed97581676a5369ca"
dependencies = [
"thiserror 1.0.64",
"deno_error",
"thiserror 2.0.3",
]
[[package]]

View file

@ -48,56 +48,56 @@ repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.327.0" }
deno_core = { version = "0.330.0" }
deno_bench_util = { version = "0.178.0", path = "./bench_util" }
deno_bench_util = { version = "0.179.0", path = "./bench_util" }
# TODO(nayeemrmn): Use proper version when https://github.com/denoland/deno_config/pull/143 lands!
deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "4cbb63704442a7834dc6bed2e7e310a0d46ade09", features = ["workspace", "sync"] }
deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "39be71a5936221bf23e438c11cfcffe56ce54690", features = ["workspace", "sync"] }
deno_lockfile = "=0.24.0"
deno_media_type = { version = "0.2.3", features = ["module_specifier"] }
deno_npm = "=0.27.0"
deno_npm = "=0.27.2"
deno_path_util = "=0.3.0"
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.192.0", path = "./runtime" }
deno_permissions = { version = "0.44.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.193.0", path = "./runtime" }
deno_semver = "=0.7.1"
deno_terminal = "0.2.0"
napi_sym = { version = "0.114.0", path = "./ext/napi/sym" }
napi_sym = { version = "0.115.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.4"
denokv_remote = "0.8.4"
denokv_proto = "0.9.0"
denokv_remote = "0.9.0"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.4" }
denokv_sqlite = { default-features = false, version = "0.9.0" }
# exts
deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.116.0", path = "./ext/cache" }
deno_canvas = { version = "0.53.0", path = "./ext/canvas" }
deno_console = { version = "0.184.0", path = "./ext/console" }
deno_cron = { version = "0.64.0", path = "./ext/cron" }
deno_crypto = { version = "0.198.0", path = "./ext/crypto" }
deno_fetch = { version = "0.208.0", path = "./ext/fetch" }
deno_ffi = { version = "0.171.0", path = "./ext/ffi" }
deno_fs = { version = "0.94.0", path = "./ext/fs" }
deno_http = { version = "0.182.0", path = "./ext/http" }
deno_io = { version = "0.94.0", path = "./ext/io" }
deno_kv = { version = "0.92.0", path = "./ext/kv" }
deno_napi = { version = "0.115.0", path = "./ext/napi" }
deno_net = { version = "0.176.0", path = "./ext/net" }
deno_node = { version = "0.122.0", path = "./ext/node" }
deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" }
deno_tls = { version = "0.171.0", path = "./ext/tls" }
deno_url = { version = "0.184.0", path = "./ext/url" }
deno_web = { version = "0.215.0", path = "./ext/web" }
deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.184.0", path = "./ext/webidl" }
deno_websocket = { version = "0.189.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.179.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.117.0", path = "./ext/cache" }
deno_canvas = { version = "0.54.0", path = "./ext/canvas" }
deno_console = { version = "0.185.0", path = "./ext/console" }
deno_cron = { version = "0.65.0", path = "./ext/cron" }
deno_crypto = { version = "0.199.0", path = "./ext/crypto" }
deno_fetch = { version = "0.209.0", path = "./ext/fetch" }
deno_ffi = { version = "0.172.0", path = "./ext/ffi" }
deno_fs = { version = "0.95.0", path = "./ext/fs" }
deno_http = { version = "0.183.0", path = "./ext/http" }
deno_io = { version = "0.95.0", path = "./ext/io" }
deno_kv = { version = "0.93.0", path = "./ext/kv" }
deno_napi = { version = "0.116.0", path = "./ext/napi" }
deno_net = { version = "0.177.0", path = "./ext/net" }
deno_node = { version = "0.123.0", path = "./ext/node" }
deno_telemetry = { version = "0.7.0", path = "./ext/telemetry" }
deno_tls = { version = "0.172.0", path = "./ext/tls" }
deno_url = { version = "0.185.0", path = "./ext/url" }
deno_web = { version = "0.216.0", path = "./ext/web" }
deno_webgpu = { version = "0.152.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.185.0", path = "./ext/webidl" }
deno_websocket = { version = "0.190.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.180.0", path = "./ext/webstorage" }
# resolvers
deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.15.0", path = "./resolvers/deno" }
node_resolver = { version = "0.22.0", path = "./resolvers/node" }
deno_npm_cache = { version = "0.4.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.16.0", path = "./resolvers/deno" }
node_resolver = { version = "0.23.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"
@ -120,7 +120,7 @@ dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.1"
deno_cache_dir = "=0.16.0"
deno_error = "=0.5.2"
deno_error = "=0.5.3"
deno_package_json = { version = "0.4.0", default-features = false }
deno_unsync = "0.4.2"
dlopen2 = "0.6.1"
@ -194,7 +194,7 @@ slab = "0.4"
smallvec = "1.8"
socket2 = { version = "0.5.3", features = ["all"] }
spki = "0.7.2"
sys_traits = "=0.1.6"
sys_traits = "=0.1.7"
tar = "=0.4.40"
tempfile = "3.4.0"
termcolor = "1.1.3"

View file

@ -6,6 +6,85 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at:
https://github.com/denoland/deno_install
### 2.1.5 / 2025.01.09
- feat(unstable): implement QUIC (#21942)
- feat(unstable): add JS linting plugin infrastructure (#27416)
- feat(unstable): add OTEL MeterProvider (#27240)
- feat(unstable): no config npm:@opentelemetry/api integration (#27541)
- feat(unstable): replace SpanExporter with TracerProvider (#27473)
- feat(unstable): support selectors in JS lint plugins (#27452)
- fix(check): line-break between diagnostic message chain entries (#27543)
- fix(check): move module not found errors to typescript diagnostics (#27533)
- fix(compile): analyze modules in directory specified in --include (#27296)
- fix(compile): be more deterministic when compiling the same code in different
directories (#27395)
- fix(compile): display embedded file sizes and total (#27360)
- fix(compile): output contents of embedded file system (#27302)
- fix(ext/fetch): better error message when body resource is unavailable
(#27429)
- fix(ext/fetch): retry some http/2 errors (#27417)
- fix(ext/fs): do not throw for bigint ctime/mtime/atime (#27453)
- fix(ext/http): improve error message when underlying resource of request body
unavailable (#27463)
- fix(ext/net): update moka cache to avoid potential panic in `Deno.resolveDns`
on some laptops with Ryzen CPU (#27572)
- fix(ext/node): fix `fs.access`/`fs.promises.access` with `X_OK` mode parameter
on Windows (#27407)
- fix(ext/node): fix `os.cpus()` on Linux (#27592)
- fix(ext/node): RangeError timingSafeEqual with different byteLength (#27470)
- fix(ext/node): add `truncate` method to the `FileHandle` class (#27389)
- fix(ext/node): add support of any length IV for aes-(128|256)-gcm ciphers
(#27476)
- fix(ext/node): convert brotli chunks with proper byte offset (#27455)
- fix(ext/node): do not exit worker thread when there is pending async op
(#27378)
- fix(ext/node): have `process` global available in Node context (#27562)
- fix(ext/node): make getCiphers return supported ciphers (#27466)
- fix(ext/node): sort list of built-in modules alphabetically (#27410)
- fix(ext/node): support createConnection option in node:http.request() (#25470)
- fix(ext/node): support private key export in JWK format (#27325)
- fix(ext/web): add `[[ErrorData]]` slot to `DOMException` (#27342)
- fix(ext/websocket): Fix close code without reason (#27578)
- fix(jsr): Wasm imports fail to load (#27594)
- fix(kv): improve backoff error message and inline documentation (#27537)
- fix(lint): fix single char selectors being ignored (#27576)
- fix(lockfile): include dependencies listed in external import map in lockfile
(#27337)
- fix(lsp): css preprocessor formatting (#27526)
- fix(lsp): don't skip dirs with enabled subdirs (#27580)
- fix(lsp): include "node:" prefix for node builtin auto-imports (#27404)
- fix(lsp): respect "typescript.suggestionActions.enabled" setting (#27373)
- fix(lsp): rewrite imports for 'Move to a new file' action (#27427)
- fix(lsp): sql and component file formatting (#27350)
- fix(lsp): use verbatim specifier for URL auto-imports (#27605)
- fix(no-slow-types): handle rest param with internal assignments (#27581)
- fix(node/fs): add a chmod method to the FileHandle class (#27522)
- fix(node): add missing `inspector/promises` (#27491)
- fix(node): handle cjs exports with escaped chars (#27438)
- fix(npm): deterministically output tags to initialized file (#27514)
- fix(npm): search node_modules folder for package matching npm specifier
(#27345)
- fix(outdated): ensure "Latest" version is greater than "Update" version
(#27390)
- fix(outdated): support updating dependencies in external import maps (#27339)
- fix(permissions): implicit `--allow-import` when using `--cached-only`
(#27530)
- fix(publish): infer literal types in const contexts (#27425)
- fix(task): properly handle task name wildcards with --recursive (#27396)
- fix(task): support tasks without commands (#27191)
- fix(unstable): don't error on non-existing attrs or type attr (#27456)
- fix: FastString v8_string() should error when cannot allocated (#27375)
- fix: deno_resolver crate without 'sync' feature (#27403)
- fix: incorrect memory info free/available bytes on mac (#27460)
- fix: upgrade deno_doc to 0.161.3 (#27377)
- perf(fs/windows): stat - only open file once (#27487)
- perf(node/fs/copy): reduce metadata lookups copying directory (#27495)
- perf: don't store duplicate info for ops in the snapshot (#27430)
- perf: remove now needless canonicalization getting closest package.json
(#27437)
- perf: upgrade to deno_semver 0.7 (#27426)
### 2.1.4 / 2024.12.11
- feat(unstable): support caching npm dependencies only as they're needed

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.178.0"
version = "0.179.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "2.1.4"
version = "2.1.5"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -62,6 +62,7 @@ serde_json.workspace = true
zstd.workspace = true
glibc_version = "0.1.2"
flate2 = { workspace = true, features = ["default"] }
deno_error.workspace = true
[target.'cfg(windows)'.build-dependencies]
winapi.workspace = true
@ -72,9 +73,9 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] }
deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.86.7" }
deno_graph = { version = "=0.87.0" }
deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm.workspace = true
@ -124,7 +125,7 @@ http.workspace = true
http-body.workspace = true
http-body-util.workspace = true
hyper-util.workspace = true
import_map = { version = "=0.20.1", features = ["ext"] }
import_map = { version = "=0.21.0", features = ["ext"] }
indexmap.workspace = true
jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }

View file

@ -10,6 +10,7 @@ use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::serde_json;
use deno_error::JsErrorBox;
use deno_lockfile::Lockfile;
use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue;
@ -59,6 +60,14 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[error("Failed writing lockfile")]
#[class(inherit)]
struct AtomicWriteFileWithRetriesError {
#[source]
source: std::io::Error,
}
impl CliLockfile {
/// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<Lockfile> {
@ -78,7 +87,7 @@ impl CliLockfile {
self.lockfile.lock().overwrite
}
pub fn write_if_changed(&self) -> Result<(), AnyError> {
pub fn write_if_changed(&self) -> Result<(), JsErrorBox> {
if self.skip_write {
return Ok(());
}
@ -96,7 +105,9 @@ impl CliLockfile {
&bytes,
cache::CACHE_PERM,
)
.context("Failed writing lockfile.")?;
.map_err(|source| {
JsErrorBox::from_err(AtomicWriteFileWithRetriesError { source })
})?;
lockfile.has_content_changed = false;
Ok(())
}
@ -255,7 +266,7 @@ impl CliLockfile {
})
}
pub fn error_if_changed(&self) -> Result<(), AnyError> {
pub fn error_if_changed(&self) -> Result<(), JsErrorBox> {
if !self.frozen {
return Ok(());
}
@ -267,9 +278,7 @@ impl CliLockfile {
let diff = crate::util::diff::diff(&contents, &new_contents);
// has an extra newline at the end
let diff = diff.trim_end();
Err(deno_core::anyhow::anyhow!(
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
))
Err(JsErrorBox::generic(format!("The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}")))
} else {
Ok(())
}

View file

@ -26,6 +26,7 @@ use deno_ast::SourceMapOption;
use deno_cache_dir::file_fetcher::CacheSetting;
pub use deno_config::deno_json::BenchConfig;
pub use deno_config::deno_json::ConfigFile;
use deno_config::deno_json::ConfigFileError;
use deno_config::deno_json::FmtConfig;
pub use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::LintConfig;
@ -553,7 +554,8 @@ pub fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
})
}
#[derive(Error, Debug, Clone)]
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
@ -817,8 +819,6 @@ impl CliOptions {
} else {
&[]
};
let config_parse_options =
deno_config::deno_json::ConfigParseOptions::default();
let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled
&& !flags.no_npm
&& !has_flag_env_var("DENO_NO_PACKAGE_JSON");
@ -829,7 +829,6 @@ impl CliOptions {
deno_json_cache: None,
pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache),
workspace_cache: None,
config_parse_options,
additional_config_file_names,
discover_pkg_json,
maybe_vendor_override,
@ -1095,11 +1094,11 @@ impl CliOptions {
}
};
Ok(self.workspace().create_resolver(
&CliSys::default(),
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map: cli_arg_specified_import_map,
},
|path| Ok(std::fs::read_to_string(path)?),
)?)
}
@ -1241,11 +1240,14 @@ impl CliOptions {
pub fn node_modules_dir(
&self,
) -> Result<Option<NodeModulesDirMode>, AnyError> {
) -> Result<
Option<NodeModulesDirMode>,
deno_config::deno_json::NodeModulesDirParseError,
> {
if let Some(flag) = self.flags.node_modules_dir {
return Ok(Some(flag));
}
self.workspace().node_modules_dir().map_err(Into::into)
self.workspace().node_modules_dir()
}
pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
@ -1255,7 +1257,7 @@ impl CliOptions {
pub fn resolve_ts_config_for_emit(
&self,
config_type: TsConfigType,
) -> Result<TsConfigForEmit, AnyError> {
) -> Result<TsConfigForEmit, ConfigFileError> {
self.start_dir.to_ts_config_for_emit(config_type)
}
@ -1284,7 +1286,7 @@ impl CliOptions {
pub fn to_compiler_option_types(
&self,
) -> Result<Vec<deno_graph::ReferrerImports>, AnyError> {
) -> Result<Vec<deno_graph::ReferrerImports>, serde_json::Error> {
self
.start_dir
.to_compiler_option_types()
@ -2158,12 +2160,7 @@ mod test {
let cwd = &std::env::current_dir().unwrap();
let config_specifier =
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
let config_file = ConfigFile::new(
config_text,
config_specifier,
&deno_config::deno_json::ConfigParseOptions::default(),
)
.unwrap();
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
let actual = resolve_import_map_specifier(
Some("import-map.json"),
Some(&config_file),
@ -2182,12 +2179,7 @@ mod test {
let config_text = r#"{}"#;
let config_specifier =
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
let config_file = ConfigFile::new(
config_text,
config_specifier,
&deno_config::deno_json::ConfigParseOptions::default(),
)
.unwrap();
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
let actual = resolve_import_map_specifier(
None,
Some(&config_file),

View file

@ -13,10 +13,9 @@ mod ts {
use std::path::Path;
use std::path::PathBuf;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use deno_error::JsErrorBox;
use serde::Serialize;
use super::*;
@ -53,7 +52,7 @@ mod ts {
fn op_script_version(
_state: &mut OpState,
#[string] _arg: &str,
) -> Result<Option<String>, AnyError> {
) -> Result<Option<String>, JsErrorBox> {
Ok(Some("1".to_string()))
}
@ -72,7 +71,7 @@ mod ts {
fn op_load(
state: &mut OpState,
#[string] load_specifier: &str,
) -> Result<LoadResponse, AnyError> {
) -> Result<LoadResponse, JsErrorBox> {
let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>();
let path_dts = state.borrow::<PathBuf>();
let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts");
@ -93,12 +92,15 @@ mod ts {
// if it comes from an op crate, we were supplied with the path to the
// file.
let path = if let Some(op_crate_lib) = op_crate_libs.get(lib) {
PathBuf::from(op_crate_lib).canonicalize()?
PathBuf::from(op_crate_lib)
.canonicalize()
.map_err(JsErrorBox::from_err)?
// otherwise we will generate the path ourself
} else {
path_dts.join(format!("lib.{lib}.d.ts"))
};
let data = std::fs::read_to_string(path)?;
let data =
std::fs::read_to_string(path).map_err(JsErrorBox::from_err)?;
Ok(LoadResponse {
data,
version: "1".to_string(),
@ -106,13 +108,13 @@ mod ts {
script_kind: 3,
})
} else {
Err(custom_error(
Err(JsErrorBox::new(
"InvalidSpecifier",
format!("An invalid specifier was requested: {}", load_specifier),
))
}
} else {
Err(custom_error(
Err(JsErrorBox::new(
"InvalidSpecifier",
format!("An invalid specifier was requested: {}", load_specifier),
))

17
cli/cache/mod.rs vendored
View file

@ -8,7 +8,6 @@ use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
use deno_cache_dir::file_fetcher::FileOrRedirect;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::FutureExt;
use deno_core::ModuleSpecifier;
@ -62,6 +61,7 @@ pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<CliSys>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<CliSys>;
pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache<CliSys>;
pub use deno_cache_dir::HttpCache;
use deno_error::JsErrorBox;
pub struct FetchCacherOptions {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
@ -194,9 +194,9 @@ impl Loader for FetchCacher {
LoaderCacheSetting::Use => None,
LoaderCacheSetting::Reload => {
if matches!(file_fetcher.cache_setting(), CacheSetting::Only) {
return Err(deno_core::anyhow::anyhow!(
return Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic(
"Could not resolve version constraint using only cached data. Try running again without --cached-only"
));
))));
}
Some(CacheSetting::ReloadAll)
}
@ -262,28 +262,27 @@ impl Loader for FetchCacher {
FetchNoFollowErrorKind::CacheSave { .. } |
FetchNoFollowErrorKind::UnsupportedScheme { .. } |
FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)),
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err)))),
FetchNoFollowErrorKind::NotCached { .. } => {
if options.cache_setting == LoaderCacheSetting::Only {
Ok(None)
} else {
Err(AnyError::from(err))
Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err))))
}
},
FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph
Err(
deno_graph::source::ChecksumIntegrityError {
deno_graph::source::LoadError::ChecksumIntegrity(deno_graph::source::ChecksumIntegrityError {
actual: err.actual,
expected: err.expected,
}
.into(),
}),
)
}
}
},
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)),
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(permission_check_error)))),
}
})
}

View file

@ -11,10 +11,12 @@ use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::MediaType;
use deno_graph::Module;
use deno_graph::ModuleGraph;
@ -124,7 +126,7 @@ impl Emitter {
let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source = source.clone();
move || -> Result<_, AnyError> {
move || {
EmitParsedSourceHelper::transpile(
&parsed_source_cache,
&specifier,
@ -155,7 +157,7 @@ impl Emitter {
media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>,
) -> Result<String, AnyError> {
) -> Result<String, EmitParsedSourceHelperError> {
// Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, module_kind, source) {
@ -210,7 +212,7 @@ impl Emitter {
pub async fn load_and_emit_for_hmr(
&self,
specifier: &ModuleSpecifier,
) -> Result<String, AnyError> {
) -> Result<String, CoreError> {
let media_type = MediaType::from_specifier(specifier);
let source_code = tokio::fs::read_to_string(
ModuleSpecifier::to_file_path(specifier).unwrap(),
@ -225,17 +227,21 @@ impl Emitter {
let source_arc: Arc<str> = source_code.into();
let parsed_source = self
.parsed_source_cache
.remove_or_parse_module(specifier, source_arc, media_type)?;
.remove_or_parse_module(specifier, source_arc, media_type)
.map_err(JsErrorBox::from_err)?;
// HMR doesn't work with embedded source maps for some reason, so set
// the option to not use them (though you should test this out because
// this statement is probably wrong)
let mut options = self.transpile_and_emit_options.1.clone();
options.source_map = SourceMapOption::None;
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)?;
let is_cjs = self
.cjs_tracker
.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)
.map_err(JsErrorBox::from_err)?;
let transpiled_source = parsed_source
.transpile(
&self.transpile_and_emit_options.0,
@ -243,7 +249,8 @@ impl Emitter {
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
},
&options,
)?
)
.map_err(JsErrorBox::from_err)?
.into_source();
Ok(transpiled_source.text)
}
@ -282,6 +289,19 @@ enum PreEmitResult {
NotCached { source_hash: u64 },
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum EmitParsedSourceHelperError {
#[class(inherit)]
#[error(transparent)]
ParseDiagnostic(#[from] deno_ast::ParseDiagnostic),
#[class(inherit)]
#[error(transparent)]
Transpile(#[from] deno_ast::TranspileError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
/// Helper to share code between async and sync emit_parsed_source methods.
struct EmitParsedSourceHelper<'a>(&'a Emitter);
@ -311,7 +331,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions,
) -> Result<EmittedSourceText, AnyError> {
) -> Result<EmittedSourceText, EmitParsedSourceHelperError> {
// nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned
let parsed_source = parsed_source_cache
@ -351,7 +371,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
// todo(dsherret): this is a temporary measure until we have swc erroring for this
fn ensure_no_import_assertion(
parsed_source: &deno_ast::ParsedSource,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
fn has_import_assertion(text: &str) -> bool {
// good enough
text.contains(" assert ") && !text.contains(" with ")
@ -360,7 +380,7 @@ fn ensure_no_import_assertion(
fn create_err(
parsed_source: &deno_ast::ParsedSource,
range: SourceRange,
) -> AnyError {
) -> JsErrorBox {
let text_info = parsed_source.text_info_lazy();
let loc = text_info.line_and_column_display(range.start);
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
@ -373,7 +393,7 @@ fn ensure_no_import_assertion(
loc.line_number,
loc.column_number,
));
deno_core::anyhow::anyhow!("{}", msg)
JsErrorBox::generic(msg)
}
let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {

View file

@ -1,123 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
//! There are many types of errors in Deno:
//! - AnyError: a generic wrapper that can encapsulate any type of error.
//! - JsError: a container for the error message and stack trace for exceptions
//! thrown in JavaScript code. We use this to pretty-print stack traces.
//! - Diagnostic: these are errors that originate in TypeScript's compiler.
//! They're similar to JsError, in that they have line numbers. But
//! Diagnostics are compile-time type errors, whereas JsErrors are runtime
//! exceptions.
use deno_ast::ParseDiagnostic;
use deno_core::error::AnyError;
use deno_graph::source::ResolveError;
use deno_graph::ModuleError;
use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use import_map::ImportMapError;
fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
"URIError"
}
fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
"SyntaxError"
}
pub fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
match err {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
get_resolution_error_class(err)
}
ModuleGraphError::ModuleError(err) => get_module_error_class(err),
}
}
pub fn get_module_error_class(err: &ModuleError) -> &'static str {
use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError;
match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
"NotFound"
}
ModuleError::LoadingErr(_, _, err) => match err {
ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()),
ModuleLoadError::HttpsChecksumIntegrity(_)
| ModuleLoadError::TooManyRedirects => "Error",
ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound",
ModuleLoadError::Decode(_) => "TypeError",
ModuleLoadError::Npm(err) => match err {
NpmLoadError::NotSupportedEnvironment
| NpmLoadError::PackageReqResolution(_)
| NpmLoadError::RegistryInfo(_) => "Error",
NpmLoadError::PackageReqReferenceParse(_) => "TypeError",
},
ModuleLoadError::Jsr(err) => match err {
JsrLoadError::UnsupportedManifestChecksum
| JsrLoadError::PackageFormat(_) => "TypeError",
JsrLoadError::ContentLoadExternalSpecifier
| JsrLoadError::ContentLoad(_)
| JsrLoadError::ContentChecksumIntegrity(_)
| JsrLoadError::PackageManifestLoad(_, _)
| JsrLoadError::PackageVersionManifestChecksumIntegrity(..)
| JsrLoadError::PackageVersionManifestLoad(_, _)
| JsrLoadError::RedirectInPackage(_) => "Error",
JsrLoadError::PackageNotFound(_)
| JsrLoadError::PackageReqNotFound(_)
| JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound",
},
},
}
}
fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
match err {
ResolutionError::ResolverError { error, .. } => {
use ResolveError::*;
match error.as_ref() {
Specifier(_) => "TypeError",
Other(e) => get_error_class_name(e),
}
}
_ => "TypeError",
}
}
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e)
.or_else(|| {
e.downcast_ref::<ImportMapError>()
.map(get_import_map_error_class)
})
.or_else(|| {
e.downcast_ref::<ParseDiagnostic>()
.map(get_diagnostic_class)
})
.or_else(|| {
e.downcast_ref::<ModuleGraphError>()
.map(get_module_graph_error_class)
})
.or_else(|| {
e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class)
})
.or_else(|| {
e.downcast_ref::<std::num::TryFromIntError>()
.map(get_try_from_int_error_class)
})
.unwrap_or("Error")
}

View file

@ -17,9 +17,15 @@ use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
use deno_error::JsErrorBox;
use deno_npm_cache::NpmCacheSetting;
use deno_path_util::url_to_file_path;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs;
@ -68,32 +74,37 @@ use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClientProvider;
use crate::module_loader::CliModuleLoaderFactory;
use crate::module_loader::ModuleLoadPreparer;
use crate::module_loader::PrepareModuleLoadError;
use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::CliNpmTarballCache;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::npm::NpmRegistryReadPermissionCheckerMode;
use crate::npm::NpmResolutionInitializer;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::FoundPackageJsonDepFlag;
use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::binary::DenoCompileBinaryWriter;
use crate::sys::CliSys;
use crate::tools::check::MaybeDiagnostics;
use crate::tools::check::CheckError;
use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector;
use crate::tools::lint::LintRuleProvider;
@ -131,7 +142,7 @@ impl CliRootCertStoreProvider {
}
impl RootCertStoreProvider for CliRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> {
self
.cell
.get_or_try_init(|| {
@ -141,7 +152,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider {
self.maybe_ca_data.clone(),
)
})
.map_err(|e| e.into())
.map_err(JsErrorBox::from_err)
}
}
@ -201,6 +212,7 @@ struct CliFactoryServices {
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<CliFileFetcher>>,
found_pkg_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>,
@ -215,9 +227,18 @@ struct CliFactoryServices {
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<CliNodeResolver>>,
npm_cache: Deferred<Arc<CliNpmCache>>,
npm_cache_dir: Deferred<Arc<NpmCacheDir>>,
npm_cache_http_client: Deferred<Arc<CliNpmCacheHttpClient>>,
npm_graph_resolver: Deferred<Arc<CliNpmGraphResolver>>,
npm_installer: Deferred<Arc<NpmInstaller>>,
npm_registry_info_provider: Deferred<Arc<CliNpmRegistryInfoProvider>>,
npm_req_resolver: Deferred<Arc<CliNpmReqResolver>>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolution_initializer: Deferred<Arc<NpmResolutionInitializer>>,
npm_resolution_installer: Deferred<Arc<NpmResolutionInstaller>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
npm_tarball_cache: Deferred<Arc<CliNpmTarballCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser:
Deferred<Arc<RuntimePermissionDescriptorParser<CliSys>>>,
@ -399,7 +420,7 @@ impl CliFactory {
CreateInNpmPkgCheckerOptions::Byonm
} else {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
@ -407,7 +428,19 @@ impl CliFactory {
},
)
};
Ok(create_in_npm_pkg_checker(options))
Ok(deno_resolver::npm::create_in_npm_pkg_checker(options))
})
}
pub fn npm_cache(&self) -> Result<&Arc<CliNpmCache>, AnyError> {
self.services.npm_cache.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliNpmCache::new(
self.npm_cache_dir()?.clone(),
self.sys(),
NpmCacheSetting::from_cache_setting(&cli_options.cache_setting()),
cli_options.npmrc().clone(),
)))
})
}
@ -423,6 +456,123 @@ impl CliFactory {
})
}
pub fn npm_cache_http_client(&self) -> &Arc<CliNpmCacheHttpClient> {
self.services.npm_cache_http_client.get_or_init(|| {
Arc::new(CliNpmCacheHttpClient::new(
self.http_client_provider().clone(),
self.text_only_progress_bar().clone(),
))
})
}
pub fn npm_graph_resolver(
&self,
) -> Result<&Arc<CliNpmGraphResolver>, AnyError> {
self.services.npm_graph_resolver.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliNpmGraphResolver::new(
self.npm_installer_if_managed()?.cloned(),
self.services.found_pkg_json_dep_flag.clone(),
cli_options.unstable_bare_node_builtins(),
cli_options.default_npm_caching_strategy(),
)))
})
}
pub fn npm_installer_if_managed(
&self,
) -> Result<Option<&Arc<NpmInstaller>>, AnyError> {
let options = self.cli_options()?;
if options.use_byonm() || options.no_npm() {
Ok(None)
} else {
Ok(Some(self.npm_installer()?))
}
}
pub fn npm_installer(&self) -> Result<&Arc<NpmInstaller>, AnyError> {
self.services.npm_installer.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmInstaller::new(
self.npm_cache()?.clone(),
Arc::new(NpmInstallDepsProvider::from_workspace(
cli_options.workspace(),
)),
self.npm_resolution().clone(),
self.npm_resolution_initializer()?.clone(),
self.npm_resolution_installer()?.clone(),
self.text_only_progress_bar(),
self.sys(),
self.npm_tarball_cache()?.clone(),
cli_options.maybe_lockfile().cloned(),
cli_options.node_modules_dir_path().cloned(),
cli_options.lifecycle_scripts_config(),
cli_options.npm_system_info(),
)))
})
}
pub fn npm_registry_info_provider(
&self,
) -> Result<&Arc<CliNpmRegistryInfoProvider>, AnyError> {
self
.services
.npm_registry_info_provider
.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliNpmRegistryInfoProvider::new(
self.npm_cache()?.clone(),
self.npm_cache_http_client().clone(),
cli_options.npmrc().clone(),
)))
})
}
pub fn npm_resolution(&self) -> &Arc<NpmResolutionCell> {
&self.services.npm_resolution
}
pub fn npm_resolution_initializer(
&self,
) -> Result<&Arc<NpmResolutionInitializer>, AnyError> {
self
.services
.npm_resolution_initializer
.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmResolutionInitializer::new(
self.npm_registry_info_provider()?.clone(),
self.npm_resolution().clone(),
match cli_options.resolve_npm_resolution_snapshot()? {
Some(snapshot) => {
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot))
}
None => match cli_options.maybe_lockfile() {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
},
},
)))
})
}
pub fn npm_resolution_installer(
&self,
) -> Result<&Arc<NpmResolutionInstaller>, AnyError> {
self.services.npm_resolution_installer.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmResolutionInstaller::new(
self.npm_registry_info_provider()?.clone(),
self.npm_resolution().clone(),
cli_options.maybe_lockfile().cloned(),
)))
})
}
pub async fn npm_resolver(
&self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
@ -432,7 +582,7 @@ impl CliFactory {
.get_or_try_init_async(
async {
let cli_options = self.cli_options()?;
create_cli_npm_resolver(if cli_options.use_byonm() {
Ok(create_cli_npm_resolver(if cli_options.use_byonm() {
CliNpmResolverCreateOptions::Byonm(
CliByonmNpmResolverCreateOptions {
sys: self.sys(),
@ -451,52 +601,43 @@ impl CliFactory {
},
)
} else {
self
.npm_resolution_initializer()?
.ensure_initialized()
.await?;
CliNpmResolverCreateOptions::Managed(
CliManagedNpmResolverCreateOptions {
http_client_provider: self.http_client_provider().clone(),
npm_install_deps_provider: Arc::new(
NpmInstallDepsProvider::from_workspace(
cli_options.workspace(),
),
),
sys: self.sys(),
snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
Some(snapshot) => {
CliNpmResolverManagedSnapshotOption::Specified(Some(
snapshot,
))
}
None => match cli_options.maybe_lockfile() {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => {
CliNpmResolverManagedSnapshotOption::Specified(None)
}
},
},
maybe_lockfile: cli_options.maybe_lockfile().cloned(),
npm_resolution: self.npm_resolution().clone(),
npm_cache_dir: self.npm_cache_dir()?.clone(),
cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.cloned(),
npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
},
)
})
.await
}))
}
.boxed_local(),
)
.await
}
pub fn npm_tarball_cache(
&self,
) -> Result<&Arc<CliNpmTarballCache>, AnyError> {
self.services.npm_tarball_cache.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliNpmTarballCache::new(
self.npm_cache()?.clone(),
self.npm_cache_http_client().clone(),
self.sys(),
cli_options.npmrc().clone(),
)))
})
}
pub fn sloppy_imports_resolver(
&self,
) -> Result<Option<&Arc<CliSloppyImportsResolver>>, AnyError> {
@ -584,17 +725,10 @@ impl CliFactory {
.resolver
.get_or_try_init_async(
async {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliResolver::new(CliResolverOptions {
npm_resolver: if cli_options.no_npm() {
None
} else {
Some(self.npm_resolver().await?.clone())
},
bare_node_builtins_enabled: cli_options
.unstable_bare_node_builtins(),
deno_resolver: self.deno_resolver().await?.clone(),
})))
Ok(Arc::new(CliResolver::new(
self.deno_resolver().await?.clone(),
self.services.found_pkg_json_dep_flag.clone(),
)))
}
.boxed_local(),
)
@ -683,6 +817,7 @@ impl CliFactory {
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
self.sys(),
node_resolver::ConditionsFromResolutionMode::default(),
)))
}
.boxed_local(),
@ -733,11 +868,10 @@ impl CliFactory {
.get_or_try_init_async(async {
let npm_resolver = self.npm_resolver().await?;
Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
sys: self.sys(),
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_resolver: self.node_resolver().await?.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
npm_resolver: npm_resolver.clone().into_byonm_or_managed(),
})))
})
.await
@ -765,6 +899,7 @@ impl CliFactory {
cli_options.clone(),
self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(),
self.npm_installer_if_managed()?.cloned(),
self.npm_resolver().await?.clone(),
self.sys(),
)))
@ -790,6 +925,8 @@ impl CliFactory {
cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(),
self.module_info_cache()?.clone(),
self.npm_graph_resolver()?.clone(),
self.npm_installer_if_managed()?.cloned(),
self.npm_resolver().await?.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
@ -810,7 +947,7 @@ impl CliFactory {
let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphCreator::new(
cli_options.clone(),
self.npm_resolver().await?.clone(),
self.npm_installer_if_managed()?.cloned(),
self.module_graph_builder().await?.clone(),
self.type_checker().await?.clone(),
)))
@ -1010,6 +1147,7 @@ impl CliFactory {
self.sys(),
)),
node_resolver.clone(),
self.npm_installer_if_managed()?.cloned(),
npm_resolver.clone(),
pkg_json_resolver,
self.root_cert_store_provider().clone(),
@ -1308,15 +1446,17 @@ impl WorkspaceFilesFactory {
.await
{
match err {
MaybeDiagnostics::Diagnostics(Diagnostics(d)) => {
diagnostics.extend(d)
}
MaybeDiagnostics::Other(err) => all_errors.push(err),
PrepareModuleLoadError::Check(CheckError::Diagnostics(
Diagnostics(d),
)) => diagnostics.extend(d),
err => all_errors.push(err),
}
}
}
if !diagnostics.is_empty() {
all_errors.push(AnyError::from(Diagnostics(diagnostics)));
all_errors.push(PrepareModuleLoadError::Check(CheckError::Diagnostics(
Diagnostics(diagnostics),
)));
}
if !all_errors.is_empty() {
return Err(anyhow!(

View file

@ -13,7 +13,7 @@ use deno_runtime::deno_permissions::PermissionsContainer;
use crate::args::CliOptions;
use crate::module_loader::ModuleLoadPreparer;
use crate::tools::check::MaybeDiagnostics;
use crate::module_loader::PrepareModuleLoadError;
use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
@ -70,7 +70,7 @@ impl MainModuleGraphContainer {
&self,
specifiers: &[ModuleSpecifier],
ext_overwrite: Option<&String>,
) -> Result<(), MaybeDiagnostics> {
) -> Result<(), PrepareModuleLoadError> {
let mut graph_permit = self.acquire_update_permit().await;
let graph = graph_permit.graph_mut();
self

View file

@ -1,17 +1,19 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::error::Error;
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
use deno_config::deno_json;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_graph::source::Loader;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionKind;
@ -26,13 +28,13 @@ use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::WorkspaceFastCheckOption;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::SmallStackString;
use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use crate::args::config_to_deno_graph_workspace_member;
@ -47,16 +49,17 @@ use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
use crate::errors::get_module_graph_error_class;
use crate::file_fetcher::CliFileFetcher;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::sys::CliSys;
use crate::tools::check;
use crate::tools::check::CheckError;
use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator;
@ -82,7 +85,7 @@ pub fn graph_valid(
sys: &CliSys,
roots: &[ModuleSpecifier],
options: GraphValidOptions,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if options.exit_integrity_errors {
graph_exit_integrity_errors(graph);
}
@ -101,9 +104,9 @@ pub fn graph_valid(
} else {
// finally surface the npm resolution result
if let Err(err) = &graph.npm_dep_graph_result {
return Err(custom_error(
get_error_class_name(err),
format_deno_graph_error(err.as_ref().deref()),
return Err(JsErrorBox::new(
err.get_class(),
format_deno_graph_error(err),
));
}
Ok(())
@ -142,7 +145,7 @@ pub fn graph_walk_errors<'a>(
sys: &'a CliSys,
roots: &'a [ModuleSpecifier],
options: GraphWalkErrorsOptions,
) -> impl Iterator<Item = AnyError> + 'a {
) -> impl Iterator<Item = JsErrorBox> + 'a {
graph
.walk(
roots.iter(),
@ -194,7 +197,7 @@ pub fn graph_walk_errors<'a>(
return None;
}
Some(custom_error(get_module_graph_error_class(&error), message))
Some(JsErrorBox::new(error.get_class(), message))
})
}
@ -261,7 +264,7 @@ pub struct CreateGraphOptions<'a> {
pub struct ModuleGraphCreator {
options: Arc<CliOptions>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
module_graph_builder: Arc<ModuleGraphBuilder>,
type_checker: Arc<TypeChecker>,
}
@ -269,13 +272,13 @@ pub struct ModuleGraphCreator {
impl ModuleGraphCreator {
pub fn new(
options: Arc<CliOptions>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
module_graph_builder: Arc<ModuleGraphBuilder>,
type_checker: Arc<TypeChecker>,
) -> Self {
Self {
options,
npm_resolver,
npm_installer,
module_graph_builder,
type_checker,
}
@ -398,9 +401,9 @@ impl ModuleGraphCreator {
.build_graph_with_npm_resolution(&mut graph, options)
.await?;
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
if let Some(npm_installer) = &self.npm_installer {
if graph.has_node_specifier && self.options.type_check_mode().is_true() {
npm_resolver.inject_synthetic_types_node_package().await?;
npm_installer.inject_synthetic_types_node_package().await?;
}
}
@ -434,14 +437,14 @@ impl ModuleGraphCreator {
}
}
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> {
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
self.module_graph_builder.graph_valid(graph)
}
async fn type_check_graph(
&self,
graph: ModuleGraph,
) -> Result<Arc<ModuleGraph>, AnyError> {
) -> Result<Arc<ModuleGraph>, CheckError> {
self
.type_checker
.check(
@ -455,7 +458,6 @@ impl ModuleGraphCreator {
},
)
.await
.map_err(AnyError::from)
}
}
@ -465,6 +467,27 @@ pub struct BuildFastCheckGraphOptions<'a> {
pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum BuildGraphWithNpmResolutionError {
#[class(inherit)]
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[class(inherit)]
#[error(transparent)]
ToMaybeJsxImportSourceConfig(
#[from] deno_json::ToMaybeJsxImportSourceConfigError,
),
#[class(inherit)]
#[error(transparent)]
NodeModulesDirParse(#[from] deno_json::NodeModulesDirParseError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
#[class(generic)]
#[error("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead")]
UnsupportedNpmSpecifierEntrypointResolutionWay,
}
pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
@ -475,6 +498,8 @@ pub struct ModuleGraphBuilder {
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
@ -494,6 +519,8 @@ impl ModuleGraphBuilder {
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
@ -510,6 +537,8 @@ impl ModuleGraphBuilder {
lockfile,
maybe_file_watcher_reporter,
module_info_cache,
npm_graph_resolver,
npm_installer,
npm_resolver,
parsed_source_cache,
resolver,
@ -522,7 +551,7 @@ impl ModuleGraphBuilder {
&self,
graph: &mut ModuleGraph,
options: CreateGraphOptions<'a>,
) -> Result<(), AnyError> {
) -> Result<(), BuildGraphWithNpmResolutionError> {
enum MutLoaderRef<'a> {
Borrowed(&'a mut dyn Loader),
Owned(cache::FetchCacher),
@ -608,10 +637,7 @@ impl ModuleGraphBuilder {
Some(loader) => MutLoaderRef::Borrowed(loader),
None => MutLoaderRef::Owned(self.create_graph_loader()),
};
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver =
cli_resolver.create_graph_npm_resolver(options.npm_caching);
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
.as_ref()
@ -632,7 +658,7 @@ impl ModuleGraphBuilder {
executor: Default::default(),
file_system: &self.sys,
jsr_url_provider: &CliJsrUrlProvider,
npm_resolver: Some(&graph_npm_resolver),
npm_resolver: Some(self.npm_graph_resolver.as_ref()),
module_analyzer: &analyzer,
reporter: maybe_file_watcher_reporter,
resolver: Some(&graph_resolver),
@ -650,22 +676,21 @@ impl ModuleGraphBuilder {
loader: &'a mut dyn deno_graph::source::Loader,
options: deno_graph::BuildOptions<'a>,
npm_caching: NpmCachingStrategy,
) -> Result<(), AnyError> {
) -> Result<(), BuildGraphWithNpmResolutionError> {
// ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory
if self
.cli_options
.node_modules_dir()?
.map(|m| m.uses_node_modules_dir())
.map(|m| m == NodeModulesDirMode::Auto)
.unwrap_or(false)
{
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if let Some(npm_installer) = &self.npm_installer {
let already_done = npm_installer
.ensure_top_level_package_json_install()
.await?;
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
npm_installer.cache_packages(PackageCaching::All).await?;
}
}
}
@ -687,7 +712,7 @@ impl ModuleGraphBuilder {
if roots.iter().any(|r| r.scheme() == "npm")
&& self.npm_resolver.as_byonm().is_some()
{
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
return Err(BuildGraphWithNpmResolutionError::UnsupportedNpmSpecifierEntrypointResolutionWay);
}
graph.build(roots, loader, options).await;
@ -738,7 +763,7 @@ impl ModuleGraphBuilder {
&self,
graph: &mut ModuleGraph,
options: BuildFastCheckGraphOptions,
) -> Result<(), AnyError> {
) -> Result<(), deno_json::ToMaybeJsxImportSourceConfigError> {
if !graph.graph_kind().include_types() {
return Ok(());
}
@ -753,11 +778,7 @@ impl ModuleGraphBuilder {
None
};
let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(
self.cli_options.default_npm_caching_strategy(),
);
graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions {
@ -766,7 +787,7 @@ impl ModuleGraphBuilder {
fast_check_dts: false,
jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(&graph_resolver),
npm_resolver: Some(&graph_npm_resolver),
npm_resolver: Some(self.npm_graph_resolver.as_ref()),
workspace_fast_check: options.workspace_fast_check,
},
);
@ -802,7 +823,7 @@ impl ModuleGraphBuilder {
/// Check if `roots` and their deps are available. Returns `Ok(())` if
/// so. Returns `Err(_)` if there is a known module graph or resolution
/// error statically reachable from `roots` and not a dynamic import.
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> {
pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> {
self.graph_roots_valid(
graph,
&graph.roots.iter().cloned().collect::<Vec<_>>(),
@ -813,7 +834,7 @@ impl ModuleGraphBuilder {
&self,
graph: &ModuleGraph,
roots: &[ModuleSpecifier],
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
graph_valid(
graph,
&self.sys,
@ -830,7 +851,10 @@ impl ModuleGraphBuilder {
)
}
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
fn create_graph_resolver(
&self,
) -> Result<CliGraphResolver, deno_json::ToMaybeJsxImportSourceConfigError>
{
let jsx_import_source_config = self
.cli_options
.start_dir
@ -998,9 +1022,11 @@ fn get_resolution_error_bare_specifier(
{
Some(specifier.as_str())
} else if let ResolutionError::ResolverError { error, .. } = error {
if let ResolveError::Other(error) = (*error).as_ref() {
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) =
error.downcast_ref::<ImportMapError>()
if let ResolveError::ImportMap(error) = (*error).as_ref() {
if let import_map::ImportMapErrorKind::UnmappedBareSpecifier(
specifier,
_,
) = error.as_kind()
{
Some(specifier.as_str())
} else {
@ -1037,11 +1063,12 @@ fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
ResolveError::Other(other_error) => {
if let Some(SpecifierError::ImportPrefixMissing {
specifier, ..
}) = other_error.downcast_ref::<SpecifierError>()
}) = other_error.as_any().downcast_ref::<SpecifierError>()
{
maybe_specifier = Some(specifier);
}
}
ResolveError::ImportMap(_) => {}
}
}
}
@ -1261,7 +1288,7 @@ mod test {
let specifier = ModuleSpecifier::parse("file:///file.ts").unwrap();
let err = import_map.resolve(input, &specifier).err().unwrap();
let err = ResolutionError::ResolverError {
error: Arc::new(ResolveError::Other(err.into())),
error: Arc::new(ResolveError::ImportMap(err)),
specifier: input.to_string(),
range: Range {
specifier,

View file

@ -6,13 +6,14 @@ use std::thread::ThreadId;
use boxed_error::Boxed;
use deno_cache_dir::file_fetcher::RedirectHeaderParseError;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions;
@ -69,7 +70,7 @@ impl HttpClientProvider {
}
}
pub fn get_or_create(&self) -> Result<HttpClient, AnyError> {
pub fn get_or_create(&self) -> Result<HttpClient, JsErrorBox> {
use std::collections::hash_map::Entry;
let thread_id = std::thread::current().id();
let mut clients = self.clients_by_thread_id.lock();
@ -86,7 +87,8 @@ impl HttpClientProvider {
},
..self.options.clone()
},
)?;
)
.map_err(JsErrorBox::from_err)?;
entry.insert(client.clone());
Ok(HttpClient::new(client))
}
@ -94,34 +96,49 @@ impl HttpClientProvider {
}
}
#[derive(Debug, Error)]
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error("Bad response: {:?}{}", .status_code, .response_text.as_ref().map(|s| format!("\n\n{}", s)).unwrap_or_else(String::new))]
pub struct BadResponseError {
pub status_code: StatusCode,
pub response_text: Option<String>,
}
#[derive(Debug, Boxed)]
#[derive(Debug, Boxed, JsError)]
pub struct DownloadError(pub Box<DownloadErrorKind>);
#[derive(Debug, Error)]
#[derive(Debug, Error, JsError)]
pub enum DownloadErrorKind {
#[class(inherit)]
#[error(transparent)]
Fetch(AnyError),
Fetch(deno_fetch::ClientSendError),
#[class(inherit)]
#[error(transparent)]
UrlParse(#[from] deno_core::url::ParseError),
#[class(generic)]
#[error(transparent)]
HttpParse(#[from] http::Error),
#[class(inherit)]
#[error(transparent)]
Json(#[from] serde_json::Error),
#[class(generic)]
#[error(transparent)]
ToStr(#[from] http::header::ToStrError),
#[class(inherit)]
#[error(transparent)]
RedirectHeaderParse(RedirectHeaderParseError),
#[class(type)]
#[error("Too many redirects.")]
TooManyRedirects,
#[class(inherit)]
#[error(transparent)]
BadResponse(#[from] BadResponseError),
#[class("Http")]
#[error("Not Found.")]
NotFound,
#[class(inherit)]
#[error(transparent)]
Other(JsErrorBox),
}
#[derive(Debug)]
@ -208,11 +225,11 @@ impl HttpClient {
Ok(String::from_utf8(bytes)?)
}
pub async fn download(&self, url: Url) -> Result<Vec<u8>, AnyError> {
pub async fn download(&self, url: Url) -> Result<Vec<u8>, DownloadError> {
let maybe_bytes = self.download_inner(url, None, None).await?;
match maybe_bytes {
Some(bytes) => Ok(bytes),
None => Err(custom_error("Http", "Not found.")),
None => Err(DownloadErrorKind::NotFound.into_box()),
}
}
@ -276,7 +293,7 @@ impl HttpClient {
get_response_body_with_progress(response, progress_guard)
.await
.map(|(_, body)| Some(body))
.map_err(|err| DownloadErrorKind::Fetch(err).into_box())
.map_err(|err| DownloadErrorKind::Other(err).into_box())
}
async fn get_redirected_response(
@ -293,7 +310,7 @@ impl HttpClient {
.clone()
.send(req)
.await
.map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?;
.map_err(|e| DownloadErrorKind::Fetch(e).into_box())?;
let status = response.status();
if status.is_redirection() {
for _ in 0..5 {
@ -313,7 +330,7 @@ impl HttpClient {
.clone()
.send(req)
.await
.map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?;
.map_err(|e| DownloadErrorKind::Fetch(e).into_box())?;
let status = new_response.status();
if status.is_redirection() {
response = new_response;
@ -332,7 +349,7 @@ impl HttpClient {
pub async fn get_response_body_with_progress(
response: http::Response<deno_fetch::ResBody>,
progress_guard: Option<&UpdateGuard>,
) -> Result<(HeaderMap, Vec<u8>), AnyError> {
) -> Result<(HeaderMap, Vec<u8>), JsErrorBox> {
use http_body::Body as _;
if let Some(progress_guard) = progress_guard {
let mut total_size = response.body().size_hint().exact();

View file

@ -406,8 +406,9 @@ export function splitSelectors(input) {
}
}
if (last < input.length - 1) {
out.push(input.slice(last).trim());
const remaining = input.slice(last).trim();
if (remaining.length > 0) {
out.push(remaining);
}
return out;

View file

@ -10,13 +10,13 @@ use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_config::workspace::MappedResolution;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PathClean;
@ -1070,10 +1070,13 @@ impl CodeActionCollection {
// we wrap tsc, we can't handle the asynchronous response, so it is
// actually easier to return errors if we ever encounter one of these,
// which we really wouldn't expect from the Deno lsp.
return Err(custom_error(
"UnsupportedFix",
"The action returned from TypeScript is unsupported.",
));
return Err(
JsErrorBox::new(
"UnsupportedFix",
"The action returned from TypeScript is unsupported.",
)
.into(),
);
}
let Some(action) =
fix_ts_import_action(specifier, resolution_mode, action, language_server)

View file

@ -45,6 +45,7 @@ use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet;
use lsp_types::ClientCapabilities;
@ -64,7 +65,6 @@ use crate::cache::FastInsecureHasher;
use crate::file_fetcher::CliFileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::sys::CliSys;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
@ -853,7 +853,8 @@ impl Settings {
Some(false)
} else if let Some(enable_paths) = &enable_paths {
for enable_path in enable_paths {
if path.starts_with(enable_path) {
// Also enable if the checked path is a dir containing an enabled path.
if path.starts_with(enable_path) || enable_path.starts_with(&path) {
return Some(true);
}
}
@ -1245,7 +1246,6 @@ impl ConfigData {
pkg_json_cache: Some(pkg_json_cache),
workspace_cache: Some(workspace_cache),
discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"),
config_parse_options: Default::default(),
maybe_vendor_override: None,
},
)
@ -1572,11 +1572,11 @@ impl ConfigData {
let resolver = member_dir
.workspace
.create_resolver(
&CliSys::default(),
CreateResolverOptions {
pkg_json_dep_resolution,
specified_import_map,
},
|path| Ok(std::fs::read_to_string(path)?),
)
.inspect_err(|err| {
lsp_warn!(
@ -2077,7 +2077,6 @@ impl deno_config::workspace::WorkspaceCache for WorkspaceMemCache {
#[cfg(test)]
mod tests {
use deno_config::deno_json::ConfigParseOptions;
use deno_core::resolve_url;
use deno_core::serde_json;
use deno_core::serde_json::json;
@ -2351,12 +2350,7 @@ mod tests {
config
.tree
.inject_config_file(
ConfigFile::new(
"{}",
root_uri.join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
ConfigFile::new("{}", root_uri.join("deno.json").unwrap()).unwrap(),
)
.await;
assert!(config.specifier_enabled(&root_uri));
@ -2412,7 +2406,6 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
)
@ -2438,7 +2431,6 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
)
@ -2456,7 +2448,6 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
)

View file

@ -26,6 +26,7 @@ use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node;
@ -34,7 +35,7 @@ use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use import_map::ImportMap;
use import_map::ImportMapError;
use import_map::ImportMapErrorKind;
use log::error;
use tokio::sync::mpsc;
use tokio::sync::Mutex;
@ -61,7 +62,6 @@ use crate::graph_util;
use crate::graph_util::enhanced_resolution_error_message;
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::sys::CliSys;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
@ -1297,8 +1297,8 @@ impl DenoDiagnostic {
let mut message;
message = enhanced_resolution_error_message(err);
if let deno_graph::ResolutionError::ResolverError {error, ..} = err{
if let ResolveError::Other(resolve_error, ..) = (*error).as_ref() {
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = resolve_error.downcast_ref::<ImportMapError>() {
if let ResolveError::ImportMap(importmap) = (*error).as_ref() {
if let ImportMapErrorKind::UnmappedBareSpecifier(specifier, _) = &**importmap {
if specifier.chars().next().unwrap_or('\0') == '@'{
let hint = format!("\nHint: Use [deno add {}] to add the dependency.", specifier);
message.push_str(hint.as_str());
@ -1695,12 +1695,7 @@ mod tests {
let mut config = Config::new_with_roots([root_uri.clone()]);
if let Some((relative_path, json_string)) = maybe_import_map {
let base_url = root_uri.join(relative_path).unwrap();
let config_file = ConfigFile::new(
json_string,
base_url,
&deno_config::deno_json::ConfigParseOptions::default(),
)
.unwrap();
let config_file = ConfigFile::new(json_string, base_url).unwrap();
config.tree.inject_config_file(config_file).await;
}
let resolver =

View file

@ -18,13 +18,13 @@ use deno_ast::swc::visit::VisitWith;
use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::futures::future;
use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node;
@ -480,7 +480,7 @@ impl Document {
let is_cjs_resolver =
resolver.as_is_cjs_resolver(self.file_referrer.as_ref());
let npm_resolver =
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
resolver.as_graph_npm_resolver(self.file_referrer.as_ref());
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
@ -503,7 +503,7 @@ impl Document {
s,
&CliJsrUrlProvider,
Some(&resolver),
Some(&npm_resolver),
Some(npm_resolver.as_ref()),
),
)
})
@ -513,7 +513,7 @@ impl Document {
Arc::new(d.with_new_resolver(
&CliJsrUrlProvider,
Some(&resolver),
Some(&npm_resolver),
Some(npm_resolver.as_ref()),
))
});
is_script = self.is_script;
@ -1081,7 +1081,7 @@ impl Documents {
.or_else(|| self.file_system_docs.remove_document(specifier))
.map(Ok)
.unwrap_or_else(|| {
Err(custom_error(
Err(JsErrorBox::new(
"NotFound",
format!("The specifier \"{specifier}\" was not found."),
))
@ -1702,7 +1702,7 @@ fn analyze_module(
) -> (ModuleResult, ResolutionMode) {
match parsed_source_result {
Ok(parsed_source) => {
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
let npm_resolver = resolver.as_graph_npm_resolver(file_referrer);
let cli_resolver = resolver.as_cli_resolver(file_referrer);
let is_cjs_resolver = resolver.as_is_cjs_resolver(file_referrer);
let config_data = resolver.as_config_data(file_referrer);
@ -1731,7 +1731,7 @@ fn analyze_module(
file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(&resolver),
maybe_npm_resolver: Some(&npm_resolver),
maybe_npm_resolver: Some(npm_resolver.as_ref()),
},
)),
module_resolution_mode,
@ -1767,7 +1767,6 @@ fn bytes_to_content(
#[cfg(test)]
mod tests {
use deno_config::deno_json::ConfigFile;
use deno_config::deno_json::ConfigParseOptions;
use deno_core::serde_json;
use deno_core::serde_json::json;
use pretty_assertions::assert_eq;
@ -1924,7 +1923,6 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
)
@ -1968,7 +1966,6 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
&ConfigParseOptions::default(),
)
.unwrap(),
)

View file

@ -122,7 +122,7 @@ use crate::util::sync::AsyncFlag;
struct LspRootCertStoreProvider(RootCertStore);
impl RootCertStoreProvider for LspRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
fn get_or_try_init(&self) -> Result<&RootCertStore, deno_error::JsErrorBox> {
Ok(&self.0)
}
}
@ -1419,18 +1419,16 @@ impl Inner {
// the file path is only used to determine what formatter should
// be used to format the file, so give the filepath an extension
// that matches what the user selected as the language
let file_path = document
let ext = document
.maybe_language_id()
.and_then(|id| id.as_extension())
.map(|ext| file_path.with_extension(ext))
.unwrap_or(file_path);
.and_then(|id| id.as_extension().map(|s| s.to_string()));
// it's not a js/ts file, so attempt to format its contents
format_file(
&file_path,
document.content(),
&fmt_options,
&unstable_options,
None,
ext,
)
}
};
@ -3623,8 +3621,6 @@ impl Inner {
deno_json_cache: None,
pkg_json_cache: None,
workspace_cache: None,
config_parse_options:
deno_config::deno_json::ConfigParseOptions::default(),
additional_config_file_names: &[],
discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"),
maybe_vendor_override: if force_global_cache {
@ -4006,12 +4002,14 @@ mod tests {
temp_dir.write("root1/target/main.ts", ""); // no, because there is a Cargo.toml in the root directory
temp_dir.create_dir_all("root2/folder");
temp_dir.create_dir_all("root2/folder2/inner_folder");
temp_dir.create_dir_all("root2/sub_folder");
temp_dir.create_dir_all("root2/root2.1");
temp_dir.write("root2/file1.ts", ""); // yes, enabled
temp_dir.write("root2/file2.ts", ""); // no, not enabled
temp_dir.write("root2/folder/main.ts", ""); // yes, enabled
temp_dir.write("root2/folder/other.ts", ""); // no, disabled
temp_dir.write("root2/folder2/inner_folder/main.ts", ""); // yes, enabled (regression test for https://github.com/denoland/vscode_deno/issues/1239)
temp_dir.write("root2/sub_folder/a.js", ""); // no, not enabled
temp_dir.write("root2/sub_folder/b.ts", ""); // no, not enabled
temp_dir.write("root2/sub_folder/c.js", ""); // no, not enabled
@ -4052,6 +4050,7 @@ mod tests {
enable_paths: Some(vec![
"file1.ts".to_string(),
"folder".to_string(),
"folder2/inner_folder".to_string(),
]),
disable_paths: vec!["folder/other.ts".to_string()],
..Default::default()
@ -4102,6 +4101,10 @@ mod tests {
temp_dir.url().join("root1/folder/mod.ts").unwrap(),
temp_dir.url().join("root2/folder/main.ts").unwrap(),
temp_dir.url().join("root2/root2.1/main.ts").unwrap(),
temp_dir
.url()
.join("root2/folder2/inner_folder/main.ts")
.unwrap(),
])
);
}

View file

@ -9,7 +9,6 @@ use std::sync::Arc;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
@ -21,8 +20,12 @@ use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::TarballCache;
use deno_path_util::url_to_file_path;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
@ -40,6 +43,8 @@ use super::cache::LspCache;
use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmCachingStrategy;
use crate::args::NpmInstallDepsProvider;
use crate::factory::Deferred;
use crate::graph_util::to_node_resolution_kind;
@ -51,21 +56,25 @@ use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::create_cli_npm_resolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::ManagedCliNpmResolver;
use crate::npm::NpmResolutionInitializer;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::FoundPackageJsonDepFlag;
use crate::resolver::IsCjsResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::sys::CliSys;
use crate::tsc::into_specifier_and_media_type;
use crate::util::progress_bar::ProgressBar;
@ -77,6 +86,8 @@ struct LspScopeResolver {
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
is_cjs_resolver: Arc<IsCjsResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>,
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
@ -96,6 +107,8 @@ impl Default for LspScopeResolver {
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: None,
npm_graph_resolver: factory.npm_graph_resolver().clone(),
npm_installer: None,
npm_resolver: None,
node_resolver: None,
npm_pkg_req_resolver: None,
@ -121,6 +134,7 @@ impl LspScopeResolver {
}
let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone();
let npm_resolver = factory.npm_resolver().cloned();
let npm_installer = factory.npm_installer().cloned();
let node_resolver = factory.node_resolver().cloned();
let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned();
let cli_resolver = factory.cli_resolver().clone();
@ -133,8 +147,7 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()),
)));
let npm_graph_resolver = cli_resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager);
let npm_graph_resolver = factory.npm_graph_resolver();
let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data
@ -156,7 +169,7 @@ impl LspScopeResolver {
imports,
&CliJsrUrlProvider,
Some(&resolver),
Some(&npm_graph_resolver),
Some(npm_graph_resolver.as_ref()),
);
(referrer, graph_import)
})
@ -207,8 +220,10 @@ impl LspScopeResolver {
in_npm_pkg_checker,
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver,
npm_graph_resolver: factory.npm_graph_resolver().clone(),
npm_pkg_req_resolver,
npm_resolver,
npm_installer,
node_resolver,
pkg_json_resolver,
redirect_resolver,
@ -231,6 +246,9 @@ impl LspScopeResolver {
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: self.jsr_resolver.clone(),
npm_graph_resolver: factory.npm_graph_resolver().clone(),
// npm installer isn't necessary for a snapshot
npm_installer: None,
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
npm_resolver: factory.npm_resolver().cloned(),
node_resolver: factory.node_resolver().cloned(),
@ -318,14 +336,12 @@ impl LspResolver {
if let Some(dep_info) = dep_info {
*resolver.dep_info.lock() = dep_info.clone();
}
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
if let Some(npm_resolver) = npm_resolver.as_managed() {
let reqs = dep_info
.map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default();
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
lsp_warn!("Could not set npm package requirements: {:#}", err);
}
if let Some(npm_installer) = resolver.npm_installer.as_ref() {
let reqs = dep_info
.map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default();
if let Err(err) = npm_installer.set_package_reqs(&reqs).await {
lsp_warn!("Could not set npm package requirements: {:#}", err);
}
}
}
@ -339,14 +355,12 @@ impl LspResolver {
resolver.resolver.as_ref()
}
pub fn create_graph_npm_resolver(
pub fn as_graph_npm_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver {
) -> &Arc<CliNpmGraphResolver> {
let resolver = self.get_scope_resolver(file_referrer);
resolver
.resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager)
&resolver.npm_graph_resolver
}
pub fn as_is_cjs_resolver(
@ -590,9 +604,12 @@ pub struct ScopeDepInfo {
#[derive(Default)]
struct ResolverFactoryServices {
cli_resolver: Deferred<Arc<CliResolver>>,
found_pkg_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
is_cjs_resolver: Deferred<Arc<IsCjsResolver>>,
node_resolver: Deferred<Option<Arc<CliNodeResolver>>>,
npm_graph_resolver: Deferred<Arc<CliNpmGraphResolver>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
}
@ -616,6 +633,10 @@ impl<'a> ResolverFactory<'a> {
}
}
// todo(dsherret): probably this method could be removed in the future
// and instead just `npm_resolution_initializer.ensure_initialized()` could
// be called. The reason this exists is because creating the npm resolvers
// used to be async.
async fn init_npm_resolver(
&mut self,
http_client_provider: &Arc<HttpClientProvider>,
@ -645,11 +666,31 @@ impl<'a> ResolverFactory<'a> {
cache.deno_dir().npm_folder_path(),
npmrc.get_all_known_registries_urls(),
));
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
http_client_provider: http_client_provider.clone(),
// only used for top level install, so we can ignore this
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) {
let npm_cache = Arc::new(CliNpmCache::new(
npm_cache_dir.clone(),
sys.clone(),
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
// the user is typing.
deno_npm_cache::NpmCacheSetting::Only,
npmrc.clone(),
));
let pb = ProgressBar::new(ProgressBarStyle::TextOnly);
let npm_client = Arc::new(CliNpmCacheHttpClient::new(
http_client_provider.clone(),
pb.clone(),
));
let registry_info_provider = Arc::new(CliNpmRegistryInfoProvider::new(
npm_cache.clone(),
npm_client.clone(),
npmrc.clone(),
));
let npm_resolution = Arc::new(NpmResolutionCell::default());
let npm_resolution_initializer = Arc::new(NpmResolutionInitializer::new(
registry_info_provider.clone(),
npm_resolution.clone(),
match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
@ -657,26 +698,62 @@ impl<'a> ResolverFactory<'a> {
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
},
));
// Don't provide the lockfile. We don't want these resolvers
// updating it. Only the cache request should update the lockfile.
let maybe_lockfile: Option<Arc<CliLockfile>> = None;
let maybe_node_modules_path =
self.config_data.and_then(|d| d.node_modules_dir.clone());
let tarball_cache = Arc::new(TarballCache::new(
npm_cache.clone(),
npm_client.clone(),
sys.clone(),
npmrc.clone(),
));
let npm_resolution_installer = Arc::new(NpmResolutionInstaller::new(
registry_info_provider,
npm_resolution.clone(),
maybe_lockfile.clone(),
));
let npm_installer = Arc::new(NpmInstaller::new(
npm_cache.clone(),
Arc::new(NpmInstallDepsProvider::empty()),
npm_resolution.clone(),
npm_resolution_initializer.clone(),
npm_resolution_installer,
&pb,
sys.clone(),
tarball_cache.clone(),
maybe_lockfile,
maybe_node_modules_path.clone(),
LifecycleScriptsConfig::default(),
NpmSystemInfo::default(),
));
self.set_npm_installer(npm_installer);
// spawn due to the lsp's `Send` requirement
deno_core::unsync::spawn(async move {
if let Err(err) = npm_resolution_initializer.ensure_initialized().await
{
log::warn!("failed to initialize npm resolution: {}", err);
}
})
.await
.unwrap();
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
sys: CliSys::default(),
npm_cache_dir,
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
// the user is typing.
cache_setting: CacheSetting::Only,
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
// Don't provide the lockfile. We don't want these resolvers
// updating it. Only the cache request should update the lockfile.
maybe_lockfile: None,
maybe_node_modules_path: self
.config_data
.and_then(|d| d.node_modules_dir.clone()),
maybe_node_modules_path,
npmrc,
npm_resolution,
npm_system_info: NpmSystemInfo::default(),
lifecycle_scripts: Default::default(),
})
};
self.set_npm_resolver(create_cli_npm_resolver_for_lsp(options).await);
self.set_npm_resolver(create_cli_npm_resolver(options));
}
pub fn set_npm_installer(&mut self, npm_installer: Arc<NpmInstaller>) {
self.services.npm_installer = Some(npm_installer);
}
pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) {
@ -720,13 +797,27 @@ impl<'a> ResolverFactory<'a> {
is_byonm: self.config_data.map(|d| d.byonm).unwrap_or(false),
maybe_vendor_dir: self.config_data.and_then(|d| d.vendor_dir.as_ref()),
}));
Arc::new(CliResolver::new(CliResolverOptions {
Arc::new(CliResolver::new(
deno_resolver,
npm_resolver: self.npm_resolver().cloned(),
bare_node_builtins_enabled: self
self.services.found_pkg_json_dep_flag.clone(),
))
})
}
pub fn npm_installer(&self) -> Option<&Arc<NpmInstaller>> {
self.services.npm_installer.as_ref()
}
pub fn npm_graph_resolver(&self) -> &Arc<CliNpmGraphResolver> {
self.services.npm_graph_resolver.get_or_init(|| {
Arc::new(CliNpmGraphResolver::new(
None,
self.services.found_pkg_json_dep_flag.clone(),
self
.config_data
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
}))
NpmCachingStrategy::Eager,
))
})
}
@ -736,14 +827,14 @@ impl<'a> ResolverFactory<'a> {
pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> {
self.services.in_npm_pkg_checker.get_or_init(|| {
crate::npm::create_in_npm_pkg_checker(
deno_resolver::npm::create_in_npm_pkg_checker(
match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) {
Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => {
CreateInNpmPkgCheckerOptions::Byonm
}
Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.maybe_node_modules_path(),
},
@ -783,6 +874,7 @@ impl<'a> ResolverFactory<'a> {
npm_resolver.clone().into_npm_pkg_folder_resolver(),
self.pkg_json_resolver.clone(),
self.sys.clone(),
node_resolver::ConditionsFromResolutionMode::default(),
)))
})
.as_ref()
@ -796,10 +888,9 @@ impl<'a> ResolverFactory<'a> {
let node_resolver = self.node_resolver()?;
let npm_resolver = self.npm_resolver()?;
Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
node_resolver: node_resolver.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
npm_resolver: npm_resolver.clone().into_byonm_or_managed(),
sys: self.sys.clone(),
})))
})

View file

@ -2,8 +2,8 @@
use std::collections::HashMap;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_error::JsErrorBox;
use dissimilar::diff;
use dissimilar::Chunk;
use text_size::TextRange;
@ -137,7 +137,7 @@ impl LineIndex {
if let Some(line_offset) = self.utf8_offsets.get(position.line as usize) {
Ok(line_offset + col)
} else {
Err(custom_error("OutOfRange", "The position is out of range."))
Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into())
}
}
@ -157,7 +157,7 @@ impl LineIndex {
if let Some(line_offset) = self.utf16_offsets.get(position.line as usize) {
Ok(line_offset + TextSize::from(position.character))
} else {
Err(custom_error("OutOfRange", "The position is out of range."))
Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into())
}
}

View file

@ -20,7 +20,6 @@ use deno_core::anyhow::Context as _;
use deno_core::convert::Smi;
use deno_core::convert::ToV8;
use deno_core::error::AnyError;
use deno_core::error::StdAnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
@ -3973,6 +3972,11 @@ impl CompletionEntry {
if let Some(mut new_specifier) = import_mapper
.check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_data.normalized))
.or_else(|| {
ModuleSpecifier::parse(&import_data.raw.module_specifier)
.is_ok()
.then(|| import_data.normalized.to_string())
})
{
if new_specifier.contains("/node_modules/") {
return None;
@ -4331,7 +4335,7 @@ impl TscSpecifierMap {
pub fn normalize<S: AsRef<str>>(
&self,
specifier: S,
) -> Result<ModuleSpecifier, AnyError> {
) -> Result<ModuleSpecifier, deno_core::url::ParseError> {
let original = specifier.as_ref();
if let Some(specifier) = self.normalized_specifiers.get(original) {
return Ok(specifier.clone());
@ -4339,7 +4343,7 @@ impl TscSpecifierMap {
let specifier_str = original.replace(".d.ts.d.ts", ".d.ts");
let specifier = match ModuleSpecifier::parse(&specifier_str) {
Ok(s) => s,
Err(err) => return Err(err.into()),
Err(err) => return Err(err),
};
if specifier.as_str() != original {
self
@ -4437,6 +4441,16 @@ fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
r
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
enum LoadError {
#[error("{0}")]
#[class(inherit)]
UrlParse(#[from] deno_core::url::ParseError),
#[error("{0}")]
#[class(inherit)]
SerdeV8(#[from] serde_v8::Error),
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct LoadResponse {
@ -4451,7 +4465,7 @@ fn op_load<'s>(
scope: &'s mut v8::HandleScope,
state: &mut OpState,
#[string] specifier: &str,
) -> Result<v8::Local<'s, v8::Value>, AnyError> {
) -> Result<v8::Local<'s, v8::Value>, LoadError> {
let state = state.borrow_mut::<State>();
let mark = state
.performance
@ -4482,7 +4496,7 @@ fn op_load<'s>(
fn op_release(
state: &mut OpState,
#[string] specifier: &str,
) -> Result<(), AnyError> {
) -> Result<(), deno_core::url::ParseError> {
let state = state.borrow_mut::<State>();
let mark = state
.performance
@ -4495,11 +4509,12 @@ fn op_release(
#[op2]
#[serde]
#[allow(clippy::type_complexity)]
fn op_resolve(
state: &mut OpState,
#[string] base: String,
#[serde] specifiers: Vec<(bool, String)>,
) -> Result<Vec<Option<(String, String)>>, AnyError> {
) -> Result<Vec<Option<(String, Option<String>)>>, deno_core::url::ParseError> {
op_resolve_inner(state, ResolveArgs { base, specifiers })
}
@ -4511,7 +4526,7 @@ struct TscRequestArray {
}
impl<'a> ToV8<'a> for TscRequestArray {
type Error = StdAnyError;
type Error = serde_v8::Error;
fn to_v8(
self,
@ -4526,9 +4541,7 @@ impl<'a> ToV8<'a> for TscRequestArray {
.unwrap()
.into();
let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into());
let scope_url = serde_v8::to_v8(scope, self.scope)
.map_err(AnyError::from)
.map_err(StdAnyError::from)?;
let scope_url = serde_v8::to_v8(scope, self.scope)?;
let change = self.change.to_v8(scope).unwrap_infallible();
@ -4583,10 +4596,11 @@ async fn op_poll_requests(
}
#[inline]
#[allow(clippy::type_complexity)]
fn op_resolve_inner(
state: &mut OpState,
args: ResolveArgs,
) -> Result<Vec<Option<(String, String)>>, AnyError> {
) -> Result<Vec<Option<(String, Option<String>)>>, deno_core::url::ParseError> {
let state = state.borrow_mut::<State>();
let mark = state.performance.mark_with_args("tsc.op.op_resolve", &args);
let referrer = state.specifier_map.normalize(&args.base)?;
@ -4599,7 +4613,11 @@ fn op_resolve_inner(
o.map(|(s, mt)| {
(
state.specifier_map.denormalize(&s),
mt.as_ts_extension().to_string(),
if matches!(mt, MediaType::Unknown) {
None
} else {
Some(mt.as_ts_extension().to_string())
},
)
})
})
@ -4743,7 +4761,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
fn op_script_version(
state: &mut OpState,
#[string] specifier: &str,
) -> Result<Option<String>, AnyError> {
) -> Result<Option<String>, deno_core::url::ParseError> {
let state = state.borrow_mut::<State>();
let mark = state.performance.mark("tsc.op.op_script_version");
let specifier = state.specifier_map.normalize(specifier)?;
@ -5398,7 +5416,8 @@ impl TscRequest {
fn to_server_request<'s>(
&self,
scope: &mut v8::HandleScope<'s>,
) -> Result<(&'static str, Option<v8::Local<'s, v8::Value>>), AnyError> {
) -> Result<(&'static str, Option<v8::Local<'s, v8::Value>>), serde_v8::Error>
{
let args = match self {
TscRequest::GetDiagnostics(args) => {
("$getDiagnostics", Some(serde_v8::to_v8(scope, args)?))
@ -5570,7 +5589,6 @@ mod tests {
})
.to_string(),
temp_dir.url().join("deno.json").unwrap(),
&Default::default(),
)
.unwrap(),
)
@ -6448,7 +6466,7 @@ mod tests {
resolved,
vec![Some((
temp_dir.url().join("b.ts").unwrap().to_string(),
MediaType::TypeScript.as_ts_extension().to_string()
Some(MediaType::TypeScript.as_ts_extension().to_string())
))]
);
}

View file

@ -282,24 +282,26 @@ impl LspUrlMap {
}
}
/// Convert a e.g. `deno-notebook-cell:` specifier to a `file:` specifier.
/// Convert a e.g. `vscode-notebook-cell:` specifier to a `file:` specifier.
/// ```rust
/// assert_eq!(
/// file_like_to_file_specifier(
/// &Url::parse("deno-notebook-cell:/path/to/file.ipynb#abc").unwrap(),
/// &Url::parse("vscode-notebook-cell:/path/to/file.ipynb#abc").unwrap(),
/// ),
/// Some(Url::parse("file:///path/to/file.ipynb.ts?scheme=deno-notebook-cell#abc").unwrap()),
/// Some(Url::parse("file:///path/to/file.ipynb?scheme=untitled#abc").unwrap()),
/// );
fn file_like_to_file_specifier(specifier: &Url) -> Option<Url> {
if matches!(specifier.scheme(), "untitled" | "deno-notebook-cell") {
if matches!(
specifier.scheme(),
"untitled" | "vscode-notebook-cell" | "deno-notebook-cell"
) {
if let Ok(mut s) = ModuleSpecifier::parse(&format!(
"file://{}",
"file:///{}",
&specifier.as_str()[deno_core::url::quirks::internal_components(specifier)
.host_end as usize..],
.host_end as usize..].trim_start_matches('/'),
)) {
s.query_pairs_mut()
.append_pair("scheme", specifier.scheme());
s.set_path(&format!("{}.ts", s.path()));
return Some(s);
}
}
@ -432,11 +434,11 @@ mod tests {
fn test_file_like_to_file_specifier() {
assert_eq!(
file_like_to_file_specifier(
&Url::parse("deno-notebook-cell:/path/to/file.ipynb#abc").unwrap(),
&Url::parse("vscode-notebook-cell:/path/to/file.ipynb#abc").unwrap(),
),
Some(
Url::parse(
"file:///path/to/file.ipynb.ts?scheme=deno-notebook-cell#abc"
"file:///path/to/file.ipynb?scheme=vscode-notebook-cell#abc"
)
.unwrap()
),
@ -446,8 +448,7 @@ mod tests {
&Url::parse("untitled:/path/to/file.ipynb#123").unwrap(),
),
Some(
Url::parse("file:///path/to/file.ipynb.ts?scheme=untitled#123")
.unwrap()
Url::parse("file:///path/to/file.ipynb?scheme=untitled#123").unwrap()
),
);
}

View file

@ -4,7 +4,6 @@ mod args;
mod cache;
mod cdp;
mod emit;
mod errors;
mod factory;
mod file_fetcher;
mod graph_container;
@ -38,10 +37,9 @@ use std::sync::Arc;
use args::TaskFlags;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::error::JsError;
use deno_core::error::CoreError;
use deno_core::futures::FutureExt;
use deno_core::unsync::JoinHandle;
use deno_npm::resolution::SnapshotFromLockfileError;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::fmt_errors::format_js_error;
@ -53,6 +51,7 @@ use factory::CliFactory;
use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME;
use self::npm::ResolveSnapshotError;
use crate::args::flags_from_vec;
use crate::args::DenoSubcommand;
use crate::args::Flags;
@ -202,7 +201,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
match result {
Ok(v) => Ok(v),
Err(script_err) => {
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = script_err.downcast_ref::<ResolvePkgFolderFromDenoReqError>() {
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = util::result::any_and_jserrorbox_downcast_ref::<ResolvePkgFolderFromDenoReqError>(&script_err) {
if flags.node_modules_dir.is_none() {
let mut flags = flags.deref().clone();
let watch = match &flags.subcommand {
@ -373,13 +372,19 @@ fn exit_for_error(error: AnyError) -> ! {
let mut error_string = format!("{error:?}");
let mut error_code = 1;
if let Some(e) = error.downcast_ref::<JsError>() {
error_string = format_js_error(e);
} else if let Some(SnapshotFromLockfileError::IntegrityCheckFailed(e)) =
error.downcast_ref::<SnapshotFromLockfileError>()
if let Some(CoreError::Js(e)) =
util::result::any_and_jserrorbox_downcast_ref::<CoreError>(&error)
{
error_string = e.to_string();
error_code = 10;
error_string = format_js_error(e);
} else if let Some(e @ ResolveSnapshotError { .. }) =
util::result::any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(
&error,
)
{
if let Some(e) = e.maybe_integrity_check_error() {
error_string = e.to_string();
error_code = 10;
}
}
exit_with_message(&error_string, error_code);

View file

@ -10,7 +10,6 @@ mod standalone;
mod args;
mod cache;
mod emit;
mod errors;
mod file_fetcher;
mod http_util;
mod js;
@ -30,8 +29,8 @@ use std::env;
use std::env::current_exe;
use std::sync::Arc;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::JsError;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
@ -41,6 +40,7 @@ use indexmap::IndexMap;
use standalone::DenoCompileFileSystem;
use crate::args::Flags;
use crate::util::result::any_and_jserrorbox_downcast_ref;
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
log::error!(
@ -65,8 +65,10 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
Err(error) => {
let mut error_string = format!("{:?}", error);
if let Some(e) = error.downcast_ref::<JsError>() {
error_string = format_js_error(e);
if let Some(CoreError::Js(js_error)) =
any_and_jserrorbox_downcast_ref::<CoreError>(&error)
{
error_string = format_js_error(js_error);
}
exit_with_message(&error_string, 1);

View file

@ -14,11 +14,9 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::ModuleLoaderError;
use deno_core::futures::future::FutureExt;
use deno_core::futures::Future;
use deno_core::parking_lot::Mutex;
@ -31,6 +29,8 @@ use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::SourceCodeCacheInfo;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_graph::GraphKind;
use deno_graph::JsModule;
use deno_graph::JsonModule;
@ -59,7 +59,6 @@ use crate::cache::CodeCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::errors::get_module_error_class;
use crate::graph_container::MainModuleGraphContainer;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
@ -79,7 +78,7 @@ use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader;
use crate::sys::CliSys;
use crate::tools::check;
use crate::tools::check::MaybeDiagnostics;
use crate::tools::check::CheckError;
use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map;
@ -87,6 +86,21 @@ use crate::util::text_encoding::source_map_from_code;
use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum PrepareModuleLoadError {
#[class(inherit)]
#[error(transparent)]
BuildGraphWithNpmResolution(
#[from] crate::graph_util::BuildGraphWithNpmResolutionError,
),
#[class(inherit)]
#[error(transparent)]
Check(#[from] CheckError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
pub struct ModuleLoadPreparer {
options: Arc<CliOptions>,
lockfile: Option<Arc<CliLockfile>>,
@ -126,7 +140,7 @@ impl ModuleLoadPreparer {
lib: TsTypeLib,
permissions: PermissionsContainer,
ext_overwrite: Option<&String>,
) -> Result<(), MaybeDiagnostics> {
) -> Result<(), PrepareModuleLoadError> {
log::debug!("Preparing module load.");
let _pb_clear_guard = self.progress_bar.clear_guard();
@ -207,7 +221,7 @@ impl ModuleLoadPreparer {
&self,
graph: &ModuleGraph,
roots: &[ModuleSpecifier],
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
self.module_graph_builder.graph_roots_valid(graph, roots)
}
}
@ -424,7 +438,7 @@ impl<TGraphContainer: ModuleGraphContainer>
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, AnyError> {
) -> Result<ModuleSource, ModuleLoaderError> {
let code_source = self.load_code_source(specifier, maybe_referrer).await?;
let code = if self.shared.is_inspecting
|| code_source.media_type == MediaType::Wasm
@ -447,7 +461,7 @@ impl<TGraphContainer: ModuleGraphContainer>
if module_type == ModuleType::Json
&& requested_module_type != RequestedModuleType::Json
{
return Err(generic_error("Attempted to load JSON module without specifying \"type\": \"json\" attribute in the import statement."));
return Err(JsErrorBox::generic("Attempted to load JSON module without specifying \"type\": \"json\" attribute in the import statement.").into());
}
let code_cache = if module_type == ModuleType::JavaScript {
@ -508,7 +522,7 @@ impl<TGraphContainer: ModuleGraphContainer>
fn resolve_referrer(
&self,
referrer: &str,
) -> Result<ModuleSpecifier, AnyError> {
) -> Result<ModuleSpecifier, ModuleLoaderError> {
let referrer = if referrer.is_empty() && self.shared.is_repl {
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
@ -534,7 +548,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&self,
raw_specifier: &str,
referrer: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> {
) -> Result<ModuleSpecifier, ModuleLoaderError> {
let graph = self.graph_container.graph();
let resolution = match graph.get(referrer) {
Some(Module::Js(module)) => module
@ -548,19 +562,25 @@ impl<TGraphContainer: ModuleGraphContainer>
let specifier = match resolution {
Resolution::Ok(resolved) => Cow::Borrowed(&resolved.specifier),
Resolution::Err(err) => {
return Err(custom_error(
"TypeError",
format!("{}\n", err.to_string_with_range()),
));
return Err(
JsErrorBox::type_error(format!("{}\n", err.to_string_with_range()))
.into(),
);
}
Resolution::None => Cow::Owned(self.shared.resolver.resolve(
raw_specifier,
referrer,
deno_graph::Position::zeroed(),
// if we're here, that means it's resolving a dynamic import
ResolutionMode::Import,
NodeResolutionKind::Execution,
)?),
Resolution::None => Cow::Owned(
self
.shared
.resolver
.resolve(
raw_specifier,
referrer,
deno_graph::Position::zeroed(),
// if we're here, that means it's resolving a dynamic import
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?,
),
};
if self.shared.is_repl {
@ -575,7 +595,7 @@ impl<TGraphContainer: ModuleGraphContainer>
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.map_err(AnyError::from);
.map_err(|e| JsErrorBox::from_err(e).into());
}
}
@ -586,7 +606,8 @@ impl<TGraphContainer: ModuleGraphContainer>
.npm_resolver
.as_managed()
.unwrap() // byonm won't create a Module::Npm
.resolve_pkg_folder_from_deno_module(module.nv_reference.nv())?;
.resolve_pkg_folder_from_deno_module(module.nv_reference.nv())
.map_err(JsErrorBox::from_err)?;
self
.shared
.node_resolver
@ -702,7 +723,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&self,
graph: &'graph ModuleGraph,
specifier: &ModuleSpecifier,
) -> Result<Option<CodeOrDeferredEmit<'graph>>, AnyError> {
) -> Result<Option<CodeOrDeferredEmit<'graph>>, JsErrorBox> {
if specifier.scheme() == "node" {
// Node built-in modules should be handled internally.
unreachable!("Deno bug. {} was misconfigured internally.", specifier);
@ -711,8 +732,8 @@ impl<TGraphContainer: ModuleGraphContainer>
let maybe_module = match graph.try_get(specifier) {
Ok(module) => module,
Err(err) => {
return Err(custom_error(
get_module_error_class(err),
return Err(JsErrorBox::new(
err.get_class(),
enhance_graph_error(
&self.shared.sys,
&ModuleGraphError::ModuleError(err.clone()),
@ -740,11 +761,12 @@ impl<TGraphContainer: ModuleGraphContainer>
is_script,
..
})) => {
if self.shared.cjs_tracker.is_cjs_with_known_is_script(
specifier,
*media_type,
*is_script,
)? {
if self
.shared
.cjs_tracker
.is_cjs_with_known_is_script(specifier, *media_type, *is_script)
.map_err(JsErrorBox::from_err)?
{
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
@ -876,16 +898,16 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
specifier: &str,
referrer: &str,
_kind: deno_core::ResolutionKind,
) -> Result<ModuleSpecifier, AnyError> {
) -> Result<ModuleSpecifier, ModuleLoaderError> {
fn ensure_not_jsr_non_jsr_remote_import(
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if referrer.as_str().starts_with(jsr_url().as_str())
&& !specifier.as_str().starts_with(jsr_url().as_str())
&& matches!(specifier.scheme(), "http" | "https")
{
bail!("Importing {} blocked. JSR packages cannot import non-JSR remote modules for security reasons.", specifier);
return Err(JsErrorBox::generic(format!("Importing {} blocked. JSR packages cannot import non-JSR remote modules for security reasons.", specifier)));
}
Ok(())
}
@ -938,7 +960,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
specifier: &ModuleSpecifier,
_maybe_referrer: Option<String>,
is_dynamic: bool,
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
) -> Pin<Box<dyn Future<Output = Result<(), ModuleLoaderError>>>> {
self.0.shared.in_flight_loads_tracker.increase();
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
return Box::pin(deno_core::futures::future::ready(Ok(())));
@ -987,7 +1009,8 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
permissions,
None,
)
.await?;
.await
.map_err(JsErrorBox::from_err)?;
update_permit.commit();
Ok(())
}
@ -1131,35 +1154,37 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a Path,
) -> Result<std::borrow::Cow<'a, Path>, AnyError> {
) -> Result<Cow<'a, Path>, JsErrorBox> {
if let Ok(url) = deno_path_util::url_from_file_path(path) {
// allow reading if it's in the module graph
if self.graph_container.graph().get(&url).is_some() {
return Ok(std::borrow::Cow::Borrowed(path));
return Ok(Cow::Borrowed(path));
}
}
self
.npm_registry_permission_checker
.ensure_read_permission(permissions, path)
.map_err(JsErrorBox::from_err)
}
fn load_text_file_lossy(
&self,
path: &Path,
) -> Result<Cow<'static, str>, AnyError> {
) -> Result<Cow<'static, str>, JsErrorBox> {
// todo(dsherret): use the preloaded module from the graph if available?
let media_type = MediaType::from_path(path);
let text = self.sys.fs_read_to_string_lossy(path)?;
let text = self
.sys
.fs_read_to_string_lossy(path)
.map_err(JsErrorBox::from_err)?;
if media_type.is_emittable() {
let specifier = deno_path_util::url_from_file_path(path)?;
let specifier = deno_path_util::url_from_file_path(path)
.map_err(JsErrorBox::from_err)?;
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Err(
NotSupportedKindInNpmError {
media_type,
specifier,
}
.into(),
);
return Err(JsErrorBox::from_err(NotSupportedKindInNpmError {
media_type,
specifier,
}));
}
self
.emitter
@ -1173,6 +1198,7 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
&text.into(),
)
.map(Cow::Owned)
.map_err(JsErrorBox::from_err)
} else {
Ok(text)
}

View file

@ -1,13 +1,17 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CliNpmReqResolver;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageReq;
use node_resolver::NpmPackageFolderResolver;
use super::CliNpmResolver;
@ -44,18 +48,16 @@ impl CliNpmResolver for CliByonmNpmResolver {
self
}
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
self
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
Arc::new(CliByonmWrapper(self))
}
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
Some(self)
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys> {
ByonmOrManagedNpmResolver::Byonm(self)
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
@ -75,4 +77,14 @@ impl CliNpmResolver for CliByonmNpmResolver {
// so we just return None to signify check caching is not supported
None
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
self
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(ResolvePkgFolderFromDenoReqError::Byonm)
}
}

View file

@ -6,12 +6,9 @@ use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use crate::npm::managed::NpmResolutionPackage;
use deno_npm::NpmResolutionPackage;
#[derive(Default)]
pub struct BinEntries<'a> {
@ -50,6 +47,48 @@ pub fn warn_missing_entrypoint(
);
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum BinEntriesError {
#[class(inherit)]
#[error("Creating '{path}'")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[cfg(unix)]
#[class(inherit)]
#[error("Setting permissions on '{path}'")]
Permissions {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Can't set up '{name}' bin at {path}")]
SetUpBin {
name: String,
path: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[cfg(unix)]
#[class(inherit)]
#[error("Setting permissions on '{path}'")]
RemoveBinSymlink {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl<'a> BinEntries<'a> {
pub fn new() -> Self {
Self::default()
@ -92,15 +131,15 @@ impl<'a> BinEntries<'a> {
mut already_seen: impl FnMut(
&Path,
&str, // bin script
) -> Result<(), AnyError>,
) -> Result<(), BinEntriesError>,
mut new: impl FnMut(
&NpmResolutionPackage,
&Path,
&str, // bin name
&str, // bin script
) -> Result<(), AnyError>,
) -> Result<(), BinEntriesError>,
mut filter: impl FnMut(&NpmResolutionPackage) -> bool,
) -> Result<(), AnyError> {
) -> Result<(), BinEntriesError> {
if !self.collisions.is_empty() && !self.sorted {
// walking the dependency tree to find out the depth of each package
// is sort of expensive, so we only do it if there's a collision
@ -168,11 +207,14 @@ impl<'a> BinEntries<'a> {
bin_node_modules_dir_path: &Path,
filter: impl FnMut(&NpmResolutionPackage) -> bool,
mut handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> {
) -> Result<(), BinEntriesError> {
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
)?;
std::fs::create_dir_all(bin_node_modules_dir_path).map_err(|source| {
BinEntriesError::Creating {
path: bin_node_modules_dir_path.to_path_buf(),
source,
}
})?;
}
self.for_each_entry(
@ -209,7 +251,7 @@ impl<'a> BinEntries<'a> {
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> {
) -> Result<(), BinEntriesError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
@ -226,7 +268,7 @@ impl<'a> BinEntries<'a> {
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
only: &HashSet<&NpmPackageId>,
) -> Result<(), AnyError> {
) -> Result<(), BinEntriesError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
@ -301,7 +343,7 @@ pub fn set_up_bin_entry<'a>(
#[allow(unused_variables)] bin_script: &str,
#[allow(unused_variables)] package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<EntrySetupOutcome<'a>, AnyError> {
) -> Result<EntrySetupOutcome<'a>, BinEntriesError> {
#[cfg(windows)]
{
set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?;
@ -324,14 +366,16 @@ fn set_up_bin_shim(
package: &NpmResolutionPackage,
bin_name: &str,
bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> {
) -> Result<(), BinEntriesError> {
use std::fs;
let mut cmd_shim = bin_node_modules_dir_path.join(bin_name);
cmd_shim.set_extension("cmd");
let shim = format!("@deno run -A npm:{}/{bin_name} %*", package.id.nv);
fs::write(&cmd_shim, shim).with_context(|| {
format!("Can't set up '{}' bin at {}", bin_name, cmd_shim.display())
fs::write(&cmd_shim, shim).map_err(|err| BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: cmd_shim.clone(),
source: Box::new(err.into()),
})?;
Ok(())
@ -340,7 +384,7 @@ fn set_up_bin_shim(
#[cfg(unix)]
/// Make the file at `path` executable if it exists.
/// Returns `true` if the file exists, `false` otherwise.
fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
fn make_executable_if_exists(path: &Path) -> Result<bool, BinEntriesError> {
use std::io;
use std::os::unix::fs::PermissionsExt;
let mut perms = match std::fs::metadata(path) {
@ -355,8 +399,11 @@ fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
if perms.mode() & 0o111 == 0 {
// if the original file is not executable, make it executable
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(path, perms).with_context(|| {
format!("Setting permissions on '{}'", path.display())
std::fs::set_permissions(path, perms).map_err(|source| {
BinEntriesError::Permissions {
path: path.to_path_buf(),
source,
}
})?;
}
@ -395,14 +442,18 @@ fn symlink_bin_entry<'a>(
bin_script: &str,
package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<EntrySetupOutcome<'a>, AnyError> {
) -> Result<EntrySetupOutcome<'a>, BinEntriesError> {
use std::io;
use std::os::unix::fs::symlink;
let link = bin_node_modules_dir_path.join(bin_name);
let original = package_path.join(bin_script);
let found = make_executable_if_exists(&original).with_context(|| {
format!("Can't set up '{}' bin at {}", bin_name, original.display())
let found = make_executable_if_exists(&original).map_err(|source| {
BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original.to_path_buf(),
source: Box::new(source),
}
})?;
if !found {
return Ok(EntrySetupOutcome::MissingEntrypoint {
@ -420,27 +471,25 @@ fn symlink_bin_entry<'a>(
if let Err(err) = symlink(&original_relative, &link) {
if err.kind() == io::ErrorKind::AlreadyExists {
// remove and retry
std::fs::remove_file(&link).with_context(|| {
format!(
"Failed to remove existing bin symlink at {}",
link.display()
)
std::fs::remove_file(&link).map_err(|source| {
BinEntriesError::RemoveBinSymlink {
path: link.clone(),
source,
}
})?;
symlink(&original_relative, &link).with_context(|| {
format!(
"Can't set up '{}' bin at {}",
bin_name,
original_relative.display()
)
symlink(&original_relative, &link).map_err(|source| {
BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original_relative.to_path_buf(),
source: Box::new(source.into()),
}
})?;
return Ok(EntrySetupOutcome::Success);
}
return Err(err).with_context(|| {
format!(
"Can't set up '{}' bin at {}",
bin_name,
original_relative.display()
)
return Err(BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original_relative.to_path_buf(),
source: Box::new(err.into()),
});
}

View file

@ -6,7 +6,6 @@ use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmResolutionPackage;
@ -29,7 +28,7 @@ pub trait LifecycleScriptsStrategy {
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> Result<(), AnyError>;
) -> Result<(), std::io::Error>;
fn has_warned(&self, package: &NpmResolutionPackage) -> bool;
@ -38,7 +37,7 @@ pub trait LifecycleScriptsStrategy {
fn did_run_scripts(
&self,
package: &NpmResolutionPackage,
) -> Result<(), AnyError>;
) -> Result<(), std::io::Error>;
}
pub struct LifecycleScripts<'a> {
@ -84,6 +83,27 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LifecycleScriptsError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
BinEntries(#[from] super::bin_entries::BinEntriesError),
#[class(inherit)]
#[error(
"failed to create npm process state tempfile for running lifecycle scripts"
)]
CreateNpmProcessState(#[source] std::io::Error),
#[class(generic)]
#[error(transparent)]
Task(AnyError),
#[class(generic)]
#[error("failed to run scripts for packages: {}", .0.join(", "))]
RunScripts(Vec<String>),
}
impl<'a> LifecycleScripts<'a> {
pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
if !self.strategy.can_run_scripts() {
@ -141,7 +161,7 @@ impl<'a> LifecycleScripts<'a> {
}
}
pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> {
pub fn warn_not_run_scripts(&self) -> Result<(), std::io::Error> {
if !self.packages_with_scripts_not_run.is_empty() {
self
.strategy
@ -156,7 +176,7 @@ impl<'a> LifecycleScripts<'a> {
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar,
) -> Result<(), AnyError> {
) -> Result<(), LifecycleScriptsError> {
let kill_signal = KillSignal::default();
let _drop_signal = kill_signal.clone().drop_guard();
// we don't run with signals forwarded because once signals
@ -179,7 +199,7 @@ impl<'a> LifecycleScripts<'a> {
root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar,
kill_signal: KillSignal,
) -> Result<(), AnyError> {
) -> Result<(), LifecycleScriptsError> {
self.warn_not_run_scripts()?;
let get_package_path =
|p: &NpmResolutionPackage| self.strategy.package_path(p);
@ -198,7 +218,7 @@ impl<'a> LifecycleScripts<'a> {
snapshot,
packages,
get_package_path,
)?;
);
let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
@ -222,7 +242,8 @@ impl<'a> LifecycleScripts<'a> {
let temp_file_fd =
deno_runtime::ops::process::npm_process_state_tempfile(
process_state.as_bytes(),
).context("failed to create npm process state tempfile for running lifecycle scripts")?;
)
.map_err(LifecycleScriptsError::CreateNpmProcessState)?;
// SAFETY: fd/handle is valid
let _temp_file =
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
@ -240,7 +261,7 @@ impl<'a> LifecycleScripts<'a> {
package,
snapshot,
get_package_path,
)?;
);
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = package.scripts.get(script_name) {
if script_name == "install"
@ -273,7 +294,8 @@ impl<'a> LifecycleScripts<'a> {
kill_signal: kill_signal.clone(),
},
)
.await?;
.await
.map_err(LifecycleScriptsError::Task)?;
let stdout = stdout.unwrap();
let stderr = stderr.unwrap();
if exit_code != 0 {
@ -322,14 +344,12 @@ impl<'a> LifecycleScripts<'a> {
if failed_packages.is_empty() {
Ok(())
} else {
Err(AnyError::msg(format!(
"failed to run scripts for packages: {}",
Err(LifecycleScriptsError::RunScripts(
failed_packages
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>()
.join(", ")
)))
.collect::<Vec<_>>(),
))
}
}
}
@ -349,7 +369,7 @@ fn resolve_baseline_custom_commands<'a>(
snapshot: &'a NpmResolutionSnapshot,
packages: &'a [NpmResolutionPackage],
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
) -> crate::task_runner::TaskCustomCommands {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
@ -390,7 +410,7 @@ fn resolve_custom_commands_from_packages<
snapshot: &'a NpmResolutionSnapshot,
packages: P,
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
) -> crate::task_runner::TaskCustomCommands {
for package in packages {
let package_path = get_package_path(package);
@ -409,7 +429,7 @@ fn resolve_custom_commands_from_packages<
);
}
Ok(commands)
commands
}
// resolves the custom commands from the dependencies of a package
@ -420,7 +440,7 @@ fn resolve_custom_commands_from_deps(
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
) -> crate::task_runner::TaskCustomCommands {
let mut bin_entries = BinEntries::new();
resolve_custom_commands_from_packages(
&mut bin_entries,

View file

@ -0,0 +1,18 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use async_trait::async_trait;
use deno_error::JsErrorBox;
use super::PackageCaching;
pub mod bin_entries;
pub mod lifecycle_scripts;
/// Part of the resolution that interacts with the file system.
#[async_trait(?Send)]
pub trait NpmPackageFsInstaller: std::fmt::Debug + Send + Sync {
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), JsErrorBox>;
}

View file

@ -1,151 +1,61 @@
// Copyright 2018-2025 the Deno authors. MIT license.
//! Code for global npm cache resolution.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_error::JsErrorBox;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use deno_resolver::npm::managed::NpmResolutionCell;
use super::super::resolution::NpmResolution;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsResolver;
use super::common::NpmPackageFsInstaller;
use super::PackageCaching;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
/// Resolves packages from the global npm cache.
#[derive(Debug)]
pub struct GlobalNpmPackageResolver {
pub struct GlobalNpmPackageInstaller {
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
resolution: Arc<NpmResolutionCell>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
}
impl GlobalNpmPackageResolver {
impl GlobalNpmPackageInstaller {
pub fn new(
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
resolution: Arc<NpmResolutionCell>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
) -> Self {
Self {
cache,
tarball_cache,
resolution,
system_info,
lifecycle_scripts,
system_info,
}
}
}
#[async_trait(?Send)]
impl NpmPackageFsResolver for GlobalNpmPackageResolver {
fn node_modules_path(&self) -> Option<&Path> {
None
}
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let folder_id = self
.resolution
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
Some(self.cache.package_folder_for_id(&folder_id))
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
use deno_npm::resolution::PackageNotFoundFromReferrerError;
let Some(referrer_cache_folder_id) = self
.cache
.resolve_package_folder_id_from_specifier(referrer)
else {
return Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
);
};
let resolve_result = self
.resolution
.resolve_package_from_package(name, &referrer_cache_folder_id);
match resolve_result {
Ok(pkg) => match self.maybe_package_folder(&pkg.id) {
Some(folder) => Ok(folder),
None => Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: Some(format!(
"{} -> {}",
referrer_cache_folder_id,
pkg.id.as_serialized()
)),
}
.into(),
),
},
Err(err) => match *err {
PackageNotFoundFromReferrerError::Referrer(cache_folder_id) => Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: Some(cache_folder_id.to_string()),
}
.into(),
),
PackageNotFoundFromReferrerError::Package {
name,
referrer: cache_folder_id_referrer,
} => Err(
PackageNotFoundError {
package_name: name,
referrer: referrer.clone(),
referrer_extra: Some(cache_folder_id_referrer.to_string()),
}
.into(),
),
},
}
}
fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageCacheFolderId>, AnyError> {
Ok(
self
.cache
.resolve_package_folder_id_from_specifier(specifier),
)
}
impl NpmPackageFsInstaller for GlobalNpmPackageInstaller {
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
let package_partitions = match caching {
PackageCaching::All => self
.resolution
@ -155,13 +65,16 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
.subset(&reqs)
.all_system_packages_partitioned(&self.system_info),
};
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
cache_packages(&package_partitions.packages, &self.tarball_cache)
.await
.map_err(JsErrorBox::from_err)?;
// create the copy package folders
for copy in package_partitions.copy_packages {
self
.cache
.ensure_copy_package(&copy.get_package_cache_folder_id())?;
.ensure_copy_package(&copy.get_package_cache_folder_id())
.map_err(JsErrorBox::from_err)?;
}
let mut lifecycle_scripts =
@ -174,7 +87,9 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
lifecycle_scripts.add(package, Cow::Borrowed(&package_folder));
}
lifecycle_scripts.warn_not_run_scripts()?;
lifecycle_scripts
.warn_not_run_scripts()
.map_err(JsErrorBox::from_err)?;
Ok(())
}
@ -183,7 +98,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
async fn cache_packages(
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), AnyError> {
) -> Result<(), deno_npm_cache::EnsurePackageError> {
let mut futures_unordered = FuturesUnordered::new();
for package in packages {
futures_unordered.push(async move {
@ -200,17 +115,17 @@ async fn cache_packages(
}
struct GlobalLifecycleScripts<'a> {
resolver: &'a GlobalNpmPackageResolver,
installer: &'a GlobalNpmPackageInstaller,
path_hash: u64,
}
impl<'a> GlobalLifecycleScripts<'a> {
fn new(resolver: &'a GlobalNpmPackageResolver, root_dir: &Path) -> Self {
fn new(installer: &'a GlobalNpmPackageInstaller, root_dir: &Path) -> Self {
let mut hasher = FastInsecureHasher::new_without_deno_version();
hasher.write(root_dir.to_string_lossy().as_bytes());
let path_hash = hasher.finish();
Self {
resolver,
installer,
path_hash,
}
}
@ -229,13 +144,13 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
false
}
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
self.resolver.cache.package_folder_for_nv(&package.id.nv)
self.installer.cache.package_folder_for_nv(&package.id.nv)
}
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> std::result::Result<(), deno_core::anyhow::Error> {
) -> std::result::Result<(), std::io::Error> {
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
for (package, _) in packages {
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
@ -261,7 +176,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
fn did_run_scripts(
&self,
_package: &NpmResolutionPackage,
) -> std::result::Result<(), deno_core::anyhow::Error> {
) -> Result<(), std::io::Error> {
Ok(())
}

View file

@ -2,7 +2,6 @@
//! Code for local node_modules resolution.
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::hash_map::Entry;
@ -17,40 +16,28 @@ use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::mixed_case_package_name_decode;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
use deno_resolver::npm::get_package_folder_id_folder_name;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_semver::package::PackageNv;
use deno_semver::StackString;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::FsMetadata;
use super::super::resolution::NpmResolution;
use super::common::bin_entries;
use super::common::NpmPackageFsResolver;
use super::common::NpmPackageFsInstaller;
use super::PackageCaching;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::cache::CACHE_PERM;
use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::sys::CliSys;
@ -63,31 +50,30 @@ use crate::util::progress_bar::ProgressMessagePrompt;
/// Resolver that creates a local node_modules directory
/// and resolves packages from it.
#[derive(Debug)]
pub struct LocalNpmPackageResolver {
pub struct LocalNpmPackageInstaller {
cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
resolution: Arc<NpmResolutionCell>,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
root_node_modules_path: PathBuf,
root_node_modules_url: Url,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
root_node_modules_path: PathBuf,
system_info: NpmSystemInfo,
}
impl LocalNpmPackageResolver {
impl LocalNpmPackageInstaller {
#[allow(clippy::too_many_arguments)]
pub fn new(
cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
resolution: Arc<NpmResolutionCell>,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
node_modules_folder: PathBuf,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
) -> Self {
Self {
cache,
@ -96,162 +82,19 @@ impl LocalNpmPackageResolver {
resolution,
tarball_cache,
sys,
root_node_modules_url: Url::from_directory_path(&node_modules_folder)
.unwrap(),
lifecycle_scripts,
root_node_modules_path: node_modules_folder,
system_info,
lifecycle_scripts,
}
}
fn resolve_package_root(&self, path: &Path) -> PathBuf {
let mut last_found = path;
loop {
let parent = last_found.parent().unwrap();
if parent.file_name().unwrap() == "node_modules" {
return last_found.to_path_buf();
} else {
last_found = parent;
}
}
}
fn resolve_folder_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<PathBuf>, std::io::Error> {
let Some(relative_url) =
self.root_node_modules_url.make_relative(specifier)
else {
return Ok(None);
};
if relative_url.starts_with("../") {
return Ok(None);
}
// it's within the directory, so use it
let Some(path) = specifier.to_file_path().ok() else {
return Ok(None);
};
// Canonicalize the path so it's not pointing to the symlinked directory
// in `node_modules` directory of the referrer.
canonicalize_path_maybe_not_exists(&self.sys, &path).map(Some)
}
fn resolve_package_folder_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<PathBuf>, AnyError> {
let Some(local_path) = self.resolve_folder_for_specifier(specifier)? else {
return Ok(None);
};
let package_root_path = self.resolve_package_root(&local_path);
Ok(Some(package_root_path))
}
}
#[async_trait(?Send)]
impl NpmPackageFsResolver for LocalNpmPackageResolver {
fn node_modules_path(&self) -> Option<&Path> {
Some(self.root_node_modules_path.as_ref())
}
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let cache_folder_id = self
.resolution
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
// package is stored at:
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
Some(
self
.root_node_modules_path
.join(".deno")
.join(get_package_folder_id_folder_name(&cache_folder_id))
.join("node_modules")
.join(&cache_folder_id.nv.name),
)
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_local_path = self
.resolve_folder_for_specifier(referrer)
.map_err(|err| PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err,
})?;
let Some(local_path) = maybe_local_path else {
return Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
);
};
let package_root_path = self.resolve_package_root(&local_path);
let mut current_folder = package_root_path.as_path();
while let Some(parent_folder) = current_folder.parent() {
current_folder = parent_folder;
let node_modules_folder = if current_folder.ends_with("node_modules") {
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name(&node_modules_folder, name);
if self.sys.fs_is_dir_no_err(&sub_dir) {
return Ok(sub_dir);
}
if current_folder == self.root_node_modules_path {
break;
}
}
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
)
}
fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageCacheFolderId>, AnyError> {
let Some(folder_path) =
self.resolve_package_folder_from_specifier(specifier)?
else {
return Ok(None);
};
// ex. project/node_modules/.deno/preact@10.24.3/node_modules/preact/
let Some(node_modules_ancestor) = folder_path
.ancestors()
.find(|ancestor| ancestor.ends_with("node_modules"))
else {
return Ok(None);
};
let Some(folder_name) =
node_modules_ancestor.parent().and_then(|p| p.file_name())
else {
return Ok(None);
};
Ok(get_package_folder_id_from_folder_name(
&folder_name.to_string_lossy(),
))
}
impl NpmPackageFsInstaller for LocalNpmPackageInstaller {
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
let snapshot = match caching {
PackageCaching::All => self.resolution.snapshot(),
PackageCaching::Only(reqs) => self.resolution.subset(&reqs),
@ -263,10 +106,12 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
&self.progress_bar,
&self.tarball_cache,
&self.root_node_modules_path,
&self.sys,
&self.system_info,
&self.lifecycle_scripts,
)
.await
.map_err(JsErrorBox::from_err)
}
}
@ -285,6 +130,38 @@ fn local_node_modules_package_contents_path(
.join(&package.id.nv.name)
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum SyncResolutionWithFsError {
#[class(inherit)]
#[error("Creating '{path}'")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(transparent)]
CopyDirRecursive(#[from] crate::util::fs::CopyDirRecursiveError),
#[class(inherit)]
#[error(transparent)]
SymlinkPackageDir(#[from] SymlinkPackageDirError),
#[class(inherit)]
#[error(transparent)]
BinEntries(#[from] bin_entries::BinEntriesError),
#[class(inherit)]
#[error(transparent)]
LifecycleScripts(
#[from] super::common::lifecycle_scripts::LifecycleScriptsError,
),
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
/// Creates a pnpm style folder structure.
#[allow(clippy::too_many_arguments)]
async fn sync_resolution_with_fs(
@ -294,9 +171,10 @@ async fn sync_resolution_with_fs(
progress_bar: &ProgressBar,
tarball_cache: &Arc<CliNpmTarballCache>,
root_node_modules_dir_path: &Path,
sys: &CliSys,
system_info: &NpmSystemInfo,
lifecycle_scripts: &LifecycleScriptsConfig,
) -> Result<(), AnyError> {
) -> Result<(), SyncResolutionWithFsError> {
if snapshot.is_empty()
&& npm_install_deps_provider.workspace_pkgs().is_empty()
{
@ -311,12 +189,18 @@ async fn sync_resolution_with_fs(
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
format!("Creating '{}'", deno_local_registry_dir.display())
fs::create_dir_all(&deno_node_modules_dir).map_err(|source| {
SyncResolutionWithFsError::Creating {
path: deno_local_registry_dir.to_path_buf(),
source,
}
})?;
let bin_node_modules_dir_path = root_node_modules_dir_path.join(".bin");
fs::create_dir_all(&bin_node_modules_dir_path).with_context(|| {
format!("Creating '{}'", bin_node_modules_dir_path.display())
fs::create_dir_all(&bin_node_modules_dir_path).map_err(|source| {
SyncResolutionWithFsError::Creating {
path: deno_local_registry_dir.to_path_buf(),
source,
}
})?;
let single_process_lock = LaxSingleProcessFsFlag::lock(
@ -420,7 +304,8 @@ async fn sync_resolution_with_fs(
cache_futures.push(async move {
tarball_cache
.ensure_package(&package.id.nv, &package.dist)
.await?;
.await
.map_err(JsErrorBox::from_err)?;
let pb_guard = progress_bar.update_with_prompt(
ProgressMessagePrompt::Initialize,
&package.id.nv.to_string(),
@ -432,19 +317,18 @@ async fn sync_resolution_with_fs(
deno_core::unsync::spawn_blocking({
let package_path = package_path.clone();
let sys = sys.clone();
move || {
clone_dir_recursive(
&crate::sys::CliSys::default(),
&cache_folder,
&package_path,
)?;
clone_dir_recursive(&sys, &cache_folder, &package_path)?;
// write out a file that indicates this folder has been initialized
fs::write(initialized_file, tags)?;
Ok::<_, AnyError>(())
Ok::<_, SyncResolutionWithFsError>(())
}
})
.await??;
.await
.map_err(JsErrorBox::from_err)?
.map_err(JsErrorBox::from_err)?;
if package.bin.is_some() {
bin_entries_to_setup.borrow_mut().add(package, package_path);
@ -458,7 +342,7 @@ async fn sync_resolution_with_fs(
// finally stop showing the progress bar
drop(pb_guard); // explicit for clarity
Ok::<_, AnyError>(())
Ok::<_, JsErrorBox>(())
});
} else if matches!(package_state, PackageFolderState::TagsOutdated) {
fs::write(initialized_file, tags)?;
@ -494,11 +378,7 @@ async fn sync_resolution_with_fs(
&package.id.nv.name,
);
clone_dir_recursive(
&crate::sys::CliSys::default(),
&source_path,
&package_path,
)?;
clone_dir_recursive(sys, &source_path, &package_path)?;
// write out a file that indicates this folder has been initialized
fs::write(initialized_file, "")?;
}
@ -597,8 +477,11 @@ async fn sync_resolution_with_fs(
// symlink the dep into the package's child node_modules folder
let dest_node_modules = remote.base_dir.join("node_modules");
if !existing_child_node_modules_dirs.contains(&dest_node_modules) {
fs::create_dir_all(&dest_node_modules).with_context(|| {
format!("Creating '{}'", dest_node_modules.display())
fs::create_dir_all(&dest_node_modules).map_err(|source| {
SyncResolutionWithFsError::Creating {
path: dest_node_modules.clone(),
source,
}
})?;
existing_child_node_modules_dirs.insert(dest_node_modules.clone());
}
@ -813,7 +696,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
fn did_run_scripts(
&self,
package: &NpmResolutionPackage,
) -> std::result::Result<(), deno_core::anyhow::Error> {
) -> std::result::Result<(), std::io::Error> {
std::fs::write(self.ran_scripts_file(package), "")?;
Ok(())
}
@ -821,7 +704,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, std::path::PathBuf)],
) -> Result<(), AnyError> {
) -> Result<(), std::io::Error> {
if !packages.is_empty() {
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
@ -1004,52 +887,42 @@ impl SetupCache {
}
}
fn get_package_folder_id_folder_name(
folder_id: &NpmPackageCacheFolderId,
) -> String {
let copy_str = if folder_id.copy_index == 0 {
Cow::Borrowed("")
} else {
Cow::Owned(format!("_{}", folder_id.copy_index))
};
let nv = &folder_id.nv;
let name = normalize_pkg_name_for_node_modules_deno_folder(&nv.name);
format!("{}@{}{}", name, nv.version, copy_str)
}
fn get_package_folder_id_from_folder_name(
folder_name: &str,
) -> Option<NpmPackageCacheFolderId> {
let folder_name = folder_name.replace('+', "/");
let (name, ending) = folder_name.rsplit_once('@')?;
let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') {
StackString::from_string(mixed_case_package_name_decode(encoded_name)?)
} else {
name.into()
};
let (raw_version, copy_index) = match ending.split_once('_') {
Some((raw_version, copy_index)) => {
let copy_index = copy_index.parse::<u8>().ok()?;
(raw_version, copy_index)
}
None => (ending, 0),
};
let version = deno_semver::Version::parse_from_npm(raw_version).ok()?;
Some(NpmPackageCacheFolderId {
nv: PackageNv { name, version },
copy_index,
})
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum SymlinkPackageDirError {
#[class(inherit)]
#[error("Creating '{parent}'")]
Creating {
parent: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(transparent)]
Other(#[from] std::io::Error),
#[cfg(windows)]
#[class(inherit)]
#[error("Creating junction in node_modules folder")]
FailedCreatingJunction {
#[source]
#[inherit]
source: std::io::Error,
},
}
fn symlink_package_dir(
old_path: &Path,
new_path: &Path,
) -> Result<(), AnyError> {
) -> Result<(), SymlinkPackageDirError> {
let new_parent = new_path.parent().unwrap();
if new_parent.file_name().unwrap() != "node_modules" {
// create the parent folder that will contain the symlink
fs::create_dir_all(new_parent)
.with_context(|| format!("Creating '{}'", new_parent.display()))?;
fs::create_dir_all(new_parent).map_err(|source| {
SymlinkPackageDirError::Creating {
parent: new_parent.to_path_buf(),
source,
}
})?;
}
// need to delete the previous symlink before creating a new one
@ -1075,7 +948,7 @@ fn junction_or_symlink_dir(
old_path_relative: &Path,
old_path: &Path,
new_path: &Path,
) -> Result<(), AnyError> {
) -> Result<(), SymlinkPackageDirError> {
static USE_JUNCTIONS: std::sync::atomic::AtomicBool =
std::sync::atomic::AtomicBool::new(false);
@ -1084,8 +957,9 @@ fn junction_or_symlink_dir(
// needing to elevate privileges on Windows.
// Note: junctions don't support relative paths, so we need to use the
// absolute path here.
return junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder");
return junction::create(old_path, new_path).map_err(|source| {
SymlinkPackageDirError::FailedCreatingJunction { source }
});
}
match symlink_dir(&crate::sys::CliSys::default(), old_path_relative, new_path)
@ -1095,8 +969,9 @@ fn junction_or_symlink_dir(
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>
{
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
junction::create(old_path, new_path).map_err(|source| {
SymlinkPackageDirError::FailedCreatingJunction { source }
})
}
Err(symlink_err) => {
log::warn!(
@ -1104,8 +979,9 @@ fn junction_or_symlink_dir(
colors::yellow("Warning")
);
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
junction::create(old_path, new_path).map_err(|source| {
SymlinkPackageDirError::FailedCreatingJunction { source }
})
}
}
}
@ -1121,37 +997,10 @@ fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
#[cfg(test)]
mod test {
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv;
use test_util::TempDir;
use super::*;
#[test]
fn test_get_package_folder_id_folder_name() {
let cases = vec![
(
NpmPackageCacheFolderId {
nv: PackageNv::from_str("@types/foo@1.2.3").unwrap(),
copy_index: 1,
},
"@types+foo@1.2.3_1".to_string(),
),
(
NpmPackageCacheFolderId {
nv: PackageNv::from_str("JSON@3.2.1").unwrap(),
copy_index: 0,
},
"_jjju6tq@3.2.1".to_string(),
),
];
for (input, output) in cases {
assert_eq!(get_package_folder_id_folder_name(&input), output);
let folder_id = get_package_folder_id_from_folder_name(&output).unwrap();
assert_eq!(folder_id, input);
}
}
#[test]
fn test_setup_cache() {
let temp_dir = TempDir::new();

283
cli/npm/installer/mod.rs Normal file
View file

@ -0,0 +1,283 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::unsync::sync::AtomicFlag;
use deno_error::JsErrorBox;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_runtime::colors;
use deno_semver::package::PackageReq;
pub use self::common::NpmPackageFsInstaller;
use self::global::GlobalNpmPackageInstaller;
use self::local::LocalNpmPackageInstaller;
pub use self::resolution::AddPkgReqsResult;
pub use self::resolution::NpmResolutionInstaller;
use super::NpmResolutionInitializer;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
mod common;
mod global;
mod local;
mod resolution;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PackageCaching<'a> {
Only(Cow<'a, [PackageReq]>),
All,
}
#[derive(Debug)]
pub struct NpmInstaller {
fs_installer: Arc<dyn NpmPackageFsInstaller>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
npm_resolution_initializer: Arc<NpmResolutionInitializer>,
npm_resolution_installer: Arc<NpmResolutionInstaller>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_resolution: Arc<NpmResolutionCell>,
top_level_install_flag: AtomicFlag,
}
impl NpmInstaller {
#[allow(clippy::too_many_arguments)]
pub fn new(
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolution_initializer: Arc<NpmResolutionInitializer>,
npm_resolution_installer: Arc<NpmResolutionInstaller>,
progress_bar: &ProgressBar,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
maybe_lockfile: Option<Arc<CliLockfile>>,
maybe_node_modules_path: Option<PathBuf>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
) -> Self {
let fs_installer: Arc<dyn NpmPackageFsInstaller> =
match maybe_node_modules_path {
Some(node_modules_folder) => Arc::new(LocalNpmPackageInstaller::new(
npm_cache,
npm_install_deps_provider.clone(),
progress_bar.clone(),
npm_resolution.clone(),
sys,
tarball_cache,
node_modules_folder,
lifecycle_scripts,
system_info,
)),
None => Arc::new(GlobalNpmPackageInstaller::new(
npm_cache,
tarball_cache,
npm_resolution.clone(),
lifecycle_scripts,
system_info,
)),
};
Self {
fs_installer,
npm_install_deps_provider,
npm_resolution,
npm_resolution_initializer,
npm_resolution_installer,
maybe_lockfile,
top_level_install_flag: Default::default(),
}
}
/// Adds package requirements to the resolver and ensures everything is setup.
/// This includes setting up the `node_modules` directory, if applicable.
pub async fn add_and_cache_package_reqs(
&self,
packages: &[PackageReq],
) -> Result<(), JsErrorBox> {
self.npm_resolution_initializer.ensure_initialized().await?;
self
.add_package_reqs_raw(
packages,
Some(PackageCaching::Only(packages.into())),
)
.await
.dependencies_result
}
pub async fn add_package_reqs_no_cache(
&self,
packages: &[PackageReq],
) -> Result<(), JsErrorBox> {
self.npm_resolution_initializer.ensure_initialized().await?;
self
.add_package_reqs_raw(packages, None)
.await
.dependencies_result
}
pub async fn add_package_reqs(
&self,
packages: &[PackageReq],
caching: PackageCaching<'_>,
) -> Result<(), JsErrorBox> {
self
.add_package_reqs_raw(packages, Some(caching))
.await
.dependencies_result
}
pub async fn add_package_reqs_raw<'a>(
&self,
packages: &[PackageReq],
caching: Option<PackageCaching<'a>>,
) -> AddPkgReqsResult {
if packages.is_empty() {
return AddPkgReqsResult {
dependencies_result: Ok(()),
results: vec![],
};
}
#[cfg(debug_assertions)]
self.npm_resolution_initializer.debug_assert_initialized();
let mut result = self
.npm_resolution_installer
.add_package_reqs(packages)
.await;
if result.dependencies_result.is_ok() {
if let Some(lockfile) = self.maybe_lockfile.as_ref() {
result.dependencies_result = lockfile.error_if_changed();
}
}
if result.dependencies_result.is_ok() {
if let Some(caching) = caching {
result.dependencies_result = self.cache_packages(caching).await;
}
}
result
}
/// Sets package requirements to the resolver, removing old requirements and adding new ones.
///
/// This will retrieve and resolve package information, but not cache any package files.
pub async fn set_package_reqs(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.npm_resolution_installer
.set_package_reqs(packages)
.await
}
pub async fn inject_synthetic_types_node_package(
&self,
) -> Result<(), JsErrorBox> {
self.npm_resolution_initializer.ensure_initialized().await?;
let reqs = &[PackageReq::from_str("@types/node").unwrap()];
// add and ensure this isn't added to the lockfile
self
.add_package_reqs(reqs, PackageCaching::Only(reqs.into()))
.await?;
Ok(())
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
self
.npm_resolution_installer
.cache_package_info(package_name)
.await
}
pub async fn cache_packages(
&self,
caching: PackageCaching<'_>,
) -> Result<(), JsErrorBox> {
self.npm_resolution_initializer.ensure_initialized().await?;
self.fs_installer.cache_packages(caching).await
}
pub fn ensure_no_pkg_json_dep_errors(
&self,
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match err.source.as_kind() {
deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => {
return Err(Box::new(err.clone()));
}
deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported {
..
} => {
// only warn for this one
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err.source,
err.location,
)
}
}
}
Ok(())
}
/// Ensures that the top level `package.json` dependencies are installed.
/// This may set up the `node_modules` directory.
///
/// Returns `true` if the top level packages are already installed. A
/// return value of `false` means that new packages were added to the NPM resolution.
pub async fn ensure_top_level_package_json_install(
&self,
) -> Result<bool, JsErrorBox> {
if !self.top_level_install_flag.raise() {
return Ok(true); // already did this
}
self.npm_resolution_initializer.ensure_initialized().await?;
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() {
return Ok(true);
}
// check if something needs resolving before bothering to load all
// the package information (which is slow)
if pkg_json_remote_pkgs.iter().all(|pkg| {
self
.npm_resolution
.resolve_pkg_id_from_pkg_req(&pkg.req)
.is_ok()
}) {
log::debug!(
"All package.json deps resolvable. Skipping top level install."
);
return Ok(true); // everything is already resolvable
}
let pkg_reqs = pkg_json_remote_pkgs
.iter()
.map(|pkg| pkg.req.clone())
.collect::<Vec<_>>();
self.add_package_reqs_no_cache(&pkg_reqs).await?;
Ok(false)
}
}

View file

@ -1,27 +1,21 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use capacity_builder::StringBuilder;
use deno_core::error::AnyError;
use deno_error::JsErrorBox;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::NpmPackagesPartitioned;
use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
use deno_npm::resolution::PackageNotFoundFromReferrerError;
use deno_npm::resolution::PackageNvNotFoundError;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
@ -30,7 +24,7 @@ use deno_semver::VersionReq;
use crate::args::CliLockfile;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::util::sync::SyncReadAsyncWriteLock;
use crate::util::sync::TaskQueue;
pub struct AddPkgReqsResult {
/// Results from adding the individual packages.
@ -39,63 +33,51 @@ pub struct AddPkgReqsResult {
/// package requirements.
pub results: Vec<Result<PackageNv, NpmResolutionError>>,
/// The final result of resolving and caching all the package requirements.
pub dependencies_result: Result<(), AnyError>,
pub dependencies_result: Result<(), JsErrorBox>,
}
/// Handles updating and storing npm resolution in memory where the underlying
/// snapshot can be updated concurrently. Additionally handles updating the lockfile
/// based on changes to the resolution.
///
/// This does not interact with the file system.
pub struct NpmResolution {
/// Updates the npm resolution with the provided package requirements.
#[derive(Debug)]
pub struct NpmResolutionInstaller {
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
snapshot: SyncReadAsyncWriteLock<NpmResolutionSnapshot>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<CliLockfile>>,
update_queue: TaskQueue,
}
impl std::fmt::Debug for NpmResolution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let snapshot = self.snapshot.read();
f.debug_struct("NpmResolution")
.field("snapshot", &snapshot.as_valid_serialized().as_serialized())
.finish()
}
}
impl NpmResolution {
pub fn from_serialized(
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
maybe_lockfile: Option<Arc<CliLockfile>>,
) -> Self {
let snapshot =
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
Self::new(registry_info_provider, snapshot, maybe_lockfile)
}
impl NpmResolutionInstaller {
pub fn new(
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
initial_snapshot: NpmResolutionSnapshot,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<CliLockfile>>,
) -> Self {
Self {
registry_info_provider,
snapshot: SyncReadAsyncWriteLock::new(initial_snapshot),
resolution,
maybe_lockfile,
update_queue: Default::default(),
}
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
// this will internally cache the package information
self.registry_info_provider.package_info(package_name).await
}
pub async fn add_package_reqs(
&self,
package_reqs: &[PackageReq],
) -> AddPkgReqsResult {
// only allow one thread in here at a time
let snapshot_lock = self.snapshot.acquire().await;
let _snapshot_lock = self.update_queue.acquire().await;
let result = add_package_reqs_to_snapshot(
&self.registry_info_provider,
package_reqs,
self.maybe_lockfile.clone(),
|| snapshot_lock.read().clone(),
|| self.resolution.snapshot(),
)
.await;
@ -103,10 +85,10 @@ impl NpmResolution {
results: result.results,
dependencies_result: match result.dep_graph_result {
Ok(snapshot) => {
*snapshot_lock.write() = snapshot;
self.resolution.set_snapshot(snapshot);
Ok(())
}
Err(err) => Err(err.into()),
Err(err) => Err(JsErrorBox::from_err(err)),
},
}
}
@ -116,7 +98,7 @@ impl NpmResolution {
package_reqs: &[PackageReq],
) -> Result<(), AnyError> {
// only allow one thread in here at a time
let snapshot_lock = self.snapshot.acquire().await;
let _snapshot_lock = self.update_queue.acquire().await;
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
let snapshot = add_package_reqs_to_snapshot(
@ -124,7 +106,7 @@ impl NpmResolution {
package_reqs,
self.maybe_lockfile.clone(),
|| {
let snapshot = snapshot_lock.read().clone();
let snapshot = self.resolution.snapshot();
let has_removed_package = !snapshot
.package_reqs()
.keys()
@ -140,127 +122,10 @@ impl NpmResolution {
.await
.into_result()?;
*snapshot_lock.write() = snapshot;
self.resolution.set_snapshot(snapshot);
Ok(())
}
pub fn resolve_pkg_cache_folder_id_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Option<NpmPackageCacheFolderId> {
self
.snapshot
.read()
.package_from_id(id)
.map(|p| p.get_package_cache_folder_id())
}
pub fn resolve_pkg_id_from_pkg_cache_folder_id(
&self,
id: &NpmPackageCacheFolderId,
) -> Result<NpmPackageId, PackageCacheFolderIdNotFoundError> {
self
.snapshot
.read()
.resolve_pkg_from_pkg_cache_folder_id(id)
.map(|pkg| pkg.id.clone())
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageCacheFolderId,
) -> Result<NpmResolutionPackage, Box<PackageNotFoundFromReferrerError>> {
self
.snapshot
.read()
.resolve_package_from_package(name, referrer)
.cloned()
}
/// Resolve a node package from a deno module.
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self
.snapshot
.read()
.resolve_pkg_from_pkg_req(req)
.map(|pkg| pkg.id.clone())
}
pub fn resolve_pkg_reqs_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Vec<PackageReq> {
let snapshot = self.snapshot.read();
let mut pkg_reqs = snapshot
.package_reqs()
.iter()
.filter(|(_, nv)| *nv == &id.nv)
.map(|(req, _)| req.clone())
.collect::<Vec<_>>();
pkg_reqs.sort(); // be deterministic
pkg_reqs
}
pub fn resolve_pkg_id_from_deno_module(
&self,
id: &PackageNv,
) -> Result<NpmPackageId, PackageNvNotFoundError> {
self
.snapshot
.read()
.resolve_package_from_deno_module(id)
.map(|pkg| pkg.id.clone())
}
pub fn package_reqs(&self) -> HashMap<PackageReq, PackageNv> {
self.snapshot.read().package_reqs().clone()
}
pub fn all_system_packages(
&self,
system_info: &NpmSystemInfo,
) -> Vec<NpmResolutionPackage> {
self.snapshot.read().all_system_packages(system_info)
}
pub fn all_system_packages_partitioned(
&self,
system_info: &NpmSystemInfo,
) -> NpmPackagesPartitioned {
self
.snapshot
.read()
.all_system_packages_partitioned(system_info)
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.snapshot.read().clone()
}
pub fn serialized_valid_snapshot(
&self,
) -> ValidSerializedNpmResolutionSnapshot {
self.snapshot.read().as_valid_serialized()
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
) -> ValidSerializedNpmResolutionSnapshot {
self
.snapshot
.read()
.as_valid_serialized_for_system(system_info)
}
pub fn subset(&self, package_reqs: &[PackageReq]) -> NpmResolutionSnapshot {
self.snapshot.read().subset(package_reqs)
}
}
async fn add_package_reqs_to_snapshot(
@ -333,6 +198,25 @@ fn populate_lockfile_from_snapshot(
lockfile: &CliLockfile,
snapshot: &NpmResolutionSnapshot,
) {
fn npm_package_to_lockfile_info(
pkg: &NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.iter()
.map(|(name, id)| NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
.collect();
NpmPackageLockfileInfo {
serialized_id: pkg.id.as_serialized(),
integrity: pkg.dist.integrity().for_lockfile(),
dependencies,
}
}
let mut lockfile = lockfile.lock();
for (package_req, nv) in snapshot.package_reqs() {
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
@ -351,22 +235,3 @@ fn populate_lockfile_from_snapshot(
lockfile.insert_npm_package(npm_package_to_lockfile_info(package));
}
}
fn npm_package_to_lockfile_info(
pkg: &NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.iter()
.map(|(name, id)| NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
.collect();
NpmPackageLockfileInfo {
serialized_id: pkg.id.as_serialized(),
integrity: pkg.dist.integrity().for_lockfile(),
dependencies,
}
}

496
cli/npm/managed.rs Normal file
View file

@ -0,0 +1,496 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::managed::ResolvePkgFolderFromDenoModuleError;
use deno_resolver::npm::managed::ResolvePkgFolderFromPkgIdError;
use deno_resolver::npm::managed::ResolvePkgIdFromSpecifierError;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::NpmPackageFolderResolver;
use sys_traits::FsMetadata;
use thiserror::Error;
use super::CliNpmRegistryInfoProvider;
use super::CliNpmResolver;
use super::InnerCliNpmResolverRef;
use crate::args::CliLockfile;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::cache::FastInsecureHasher;
use crate::sys::CliSys;
#[derive(Debug, Clone)]
pub enum CliNpmResolverManagedSnapshotOption {
ResolveFromLockfile(Arc<CliLockfile>),
Specified(Option<ValidSerializedNpmResolutionSnapshot>),
}
#[derive(Debug)]
enum SyncState {
Pending(Option<CliNpmResolverManagedSnapshotOption>),
Err(ResolveSnapshotError),
Success,
}
#[derive(Debug)]
pub struct NpmResolutionInitializer {
npm_registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_resolution: Arc<NpmResolutionCell>,
queue: tokio::sync::Mutex<()>,
sync_state: Mutex<SyncState>,
}
impl NpmResolutionInitializer {
pub fn new(
npm_registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_resolution: Arc<NpmResolutionCell>,
snapshot_option: CliNpmResolverManagedSnapshotOption,
) -> Self {
Self {
npm_registry_info_provider,
npm_resolution,
queue: tokio::sync::Mutex::new(()),
sync_state: Mutex::new(SyncState::Pending(Some(snapshot_option))),
}
}
#[cfg(debug_assertions)]
pub fn debug_assert_initialized(&self) {
if !matches!(*self.sync_state.lock(), SyncState::Success) {
panic!("debug assert: npm resolution must be initialized before calling this code");
}
}
pub async fn ensure_initialized(&self) -> Result<(), JsErrorBox> {
// fast exit if not pending
{
match &*self.sync_state.lock() {
SyncState::Pending(_) => {}
SyncState::Err(err) => return Err(JsErrorBox::from_err(err.clone())),
SyncState::Success => return Ok(()),
}
}
// only allow one task in here at a time
let _guard = self.queue.lock().await;
let snapshot_option = {
let mut sync_state = self.sync_state.lock();
match &mut *sync_state {
SyncState::Pending(snapshot_option) => {
// this should never panic, but if it does it means that a
// previous future was dropped while initialization occurred...
// that should never happen because this is initialized during
// startup
snapshot_option.take().unwrap()
}
// another thread updated the state while we were waiting
SyncState::Err(resolve_snapshot_error) => {
return Err(JsErrorBox::from_err(resolve_snapshot_error.clone()));
}
SyncState::Success => {
return Ok(());
}
}
};
match resolve_snapshot(&self.npm_registry_info_provider, snapshot_option)
.await
{
Ok(maybe_snapshot) => {
if let Some(snapshot) = maybe_snapshot {
self
.npm_resolution
.set_snapshot(NpmResolutionSnapshot::new(snapshot));
}
let mut sync_state = self.sync_state.lock();
*sync_state = SyncState::Success;
Ok(())
}
Err(err) => {
let mut sync_state = self.sync_state.lock();
*sync_state = SyncState::Err(err.clone());
Err(JsErrorBox::from_err(err))
}
}
}
}
pub struct CliManagedNpmResolverCreateOptions {
pub npm_cache_dir: Arc<NpmCacheDir>,
pub sys: CliSys,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
pub npmrc: Arc<ResolvedNpmRc>,
pub npm_resolution: Arc<NpmResolutionCell>,
}
pub fn create_managed_npm_resolver(
options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
let managed_npm_resolver =
Arc::new(ManagedNpmResolver::<CliSys>::new::<CliSys>(
&options.npm_cache_dir,
&options.npmrc,
options.npm_resolution.clone(),
options.sys.clone(),
options.maybe_node_modules_path,
));
Arc::new(ManagedCliNpmResolver::new(
managed_npm_resolver,
options.npm_cache_dir,
options.npmrc,
options.npm_resolution,
options.sys,
options.npm_system_info,
))
}
#[derive(Debug, Error, Clone, JsError)]
#[error("failed reading lockfile '{}'", lockfile_path.display())]
#[class(inherit)]
pub struct ResolveSnapshotError {
lockfile_path: PathBuf,
#[inherit]
#[source]
source: SnapshotFromLockfileError,
}
impl ResolveSnapshotError {
pub fn maybe_integrity_check_error(
&self,
) -> Option<&deno_npm::resolution::IntegrityCheckFailedError> {
match &self.source {
SnapshotFromLockfileError::SnapshotFromLockfile(
deno_npm::resolution::SnapshotFromLockfileError::IntegrityCheckFailed(
err,
),
) => Some(err),
_ => None,
}
}
}
async fn resolve_snapshot(
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
snapshot: CliNpmResolverManagedSnapshotOption,
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, ResolveSnapshotError>
{
match snapshot {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
if !lockfile.overwrite() {
let snapshot = snapshot_from_lockfile(
lockfile.clone(),
&registry_info_provider.as_npm_registry_api(),
)
.await
.map_err(|source| ResolveSnapshotError {
lockfile_path: lockfile.filename.clone(),
source,
})?;
Ok(Some(snapshot))
} else {
Ok(None)
}
}
CliNpmResolverManagedSnapshotOption::Specified(snapshot) => Ok(snapshot),
}
}
#[derive(Debug, Error, Clone, JsError)]
pub enum SnapshotFromLockfileError {
#[error(transparent)]
#[class(inherit)]
IncompleteError(
#[from] deno_npm::resolution::IncompleteSnapshotFromLockfileError,
),
#[error(transparent)]
#[class(inherit)]
SnapshotFromLockfile(#[from] deno_npm::resolution::SnapshotFromLockfileError),
}
async fn snapshot_from_lockfile(
lockfile: Arc<CliLockfile>,
api: &dyn NpmRegistryApi,
) -> Result<ValidSerializedNpmResolutionSnapshot, SnapshotFromLockfileError> {
let (incomplete_snapshot, skip_integrity_check) = {
let lock = lockfile.lock();
(
deno_npm::resolution::incomplete_snapshot_from_lockfile(&lock)?,
lock.overwrite,
)
};
let snapshot = deno_npm::resolution::snapshot_from_lockfile(
deno_npm::resolution::SnapshotFromLockfileParams {
incomplete_snapshot,
api,
skip_integrity_check,
},
)
.await?;
Ok(snapshot)
}
/// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver {
managed_npm_resolver: Arc<ManagedNpmResolver<CliSys>>,
npm_cache_dir: Arc<NpmCacheDir>,
npm_rc: Arc<ResolvedNpmRc>,
sys: CliSys,
resolution: Arc<NpmResolutionCell>,
system_info: NpmSystemInfo,
}
impl std::fmt::Debug for ManagedCliNpmResolver {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ManagedCliNpmResolver")
.field("<omitted>", &"<omitted>")
.finish()
}
}
impl ManagedCliNpmResolver {
#[allow(clippy::too_many_arguments)]
pub fn new(
managed_npm_resolver: Arc<ManagedNpmResolver<CliSys>>,
npm_cache_dir: Arc<NpmCacheDir>,
npm_rc: Arc<ResolvedNpmRc>,
resolution: Arc<NpmResolutionCell>,
sys: CliSys,
system_info: NpmSystemInfo,
) -> Self {
Self {
managed_npm_resolver,
npm_cache_dir,
npm_rc,
resolution,
sys,
system_info,
}
}
pub fn resolve_pkg_folder_from_pkg_id(
&self,
pkg_id: &NpmPackageId,
) -> Result<PathBuf, ResolvePkgFolderFromPkgIdError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_pkg_id(pkg_id)
}
/// Resolves the package id from the provided specifier.
pub fn resolve_pkg_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageId>, ResolvePkgIdFromSpecifierError> {
self
.managed_npm_resolver
.resolve_pkg_id_from_specifier(specifier)
}
pub fn resolve_pkg_reqs_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Vec<PackageReq> {
self.resolution.resolve_pkg_reqs_from_pkg_id(id)
}
pub fn all_system_packages(
&self,
system_info: &NpmSystemInfo,
) -> Vec<NpmResolutionPackage> {
self.resolution.all_system_packages(system_info)
}
/// Checks if the provided package req's folder is cached.
pub fn is_pkg_req_folder_cached(&self, req: &PackageReq) -> bool {
self
.resolve_pkg_id_from_pkg_req(req)
.ok()
.and_then(|id| {
self
.managed_npm_resolver
.resolve_pkg_folder_from_pkg_id(&id)
.ok()
})
.map(|folder| self.sys.fs_exists_no_err(folder))
.unwrap_or(false)
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.resolution.snapshot()
}
pub fn top_package_req_for_name(&self, name: &str) -> Option<PackageReq> {
let package_reqs = self.resolution.package_reqs();
let mut entries = package_reqs
.iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by_key(|(_, nv)| &nv.version);
Some(entries.last()?.0.clone())
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
) -> ValidSerializedNpmResolutionSnapshot {
self
.resolution
.serialized_valid_snapshot_for_system(system_info)
}
pub fn resolve_pkg_folder_from_deno_module(
&self,
nv: &PackageNv,
) -> Result<PathBuf, ResolvePkgFolderFromDenoModuleError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_deno_module(nv)
}
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self.resolution.resolve_pkg_id_from_pkg_req(req)
}
pub fn maybe_node_modules_path(&self) -> Option<&Path> {
self.managed_npm_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache_dir.root_dir()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache_dir.root_dir_url()
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.resolution.serialized_valid_snapshot(),
self.managed_npm_resolver.node_modules_path(),
)
}
}
impl CliNpmResolver for ManagedCliNpmResolver {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self.managed_npm_resolver.clone()
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
}
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys> {
ByonmOrManagedNpmResolver::Managed(self.managed_npm_resolver.clone())
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
// create a new snapshotted npm resolution and resolver
let npm_resolution =
Arc::new(NpmResolutionCell::new(self.resolution.snapshot()));
Arc::new(ManagedCliNpmResolver::new(
Arc::new(ManagedNpmResolver::<CliSys>::new::<CliSys>(
&self.npm_cache_dir,
&self.npm_rc,
npm_resolution.clone(),
self.sys.clone(),
self.root_node_modules_path().map(ToOwned::to_owned),
)),
self.npm_cache_dir.clone(),
self.npm_rc.clone(),
npm_resolution,
self.sys.clone(),
self.system_info.clone(),
))
}
fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Managed(self)
}
fn root_node_modules_path(&self) -> Option<&Path> {
self.managed_npm_resolver.node_modules_path()
}
fn check_state_hash(&self) -> Option<u64> {
// We could go further and check all the individual
// npm packages, but that's probably overkill.
let mut package_reqs = self
.resolution
.package_reqs()
.into_iter()
.collect::<Vec<_>>();
package_reqs.sort_by(|a, b| a.0.cmp(&b.0)); // determinism
let mut hasher = FastInsecureHasher::new_without_deno_version();
// ensure the cache gets busted when turning nodeModulesDir on or off
// as this could cause changes in resolution
hasher
.write_hashable(self.managed_npm_resolver.node_modules_path().is_some());
for (pkg_req, pkg_nv) in package_reqs {
hasher.write_hashable(&pkg_req);
hasher.write_hashable(&pkg_nv);
}
Some(hasher.finish())
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
}
}

View file

@ -1,776 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCacheSetting;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::colors;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use resolution::AddPkgReqsResult;
use self::resolution::NpmResolution;
use self::resolvers::create_npm_fs_resolver;
use self::resolvers::NpmPackageFsResolver;
use super::CliNpmCache;
use super::CliNpmCacheHttpClient;
use super::CliNpmRegistryInfoProvider;
use super::CliNpmResolver;
use super::CliNpmTarballCache;
use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::cache::FastInsecureHasher;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
use crate::util::sync::AtomicFlag;
mod resolution;
mod resolvers;
pub enum CliNpmResolverManagedSnapshotOption {
ResolveFromLockfile(Arc<CliLockfile>),
Specified(Option<ValidSerializedNpmResolutionSnapshot>),
}
pub struct CliManagedNpmResolverCreateOptions {
pub snapshot: CliNpmResolverManagedSnapshotOption,
pub maybe_lockfile: Option<Arc<CliLockfile>>,
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
pub npm_cache_dir: Arc<NpmCacheDir>,
pub sys: CliSys,
pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting,
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
pub npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
pub npmrc: Arc<ResolvedNpmRc>,
pub lifecycle_scripts: LifecycleScriptsConfig,
}
pub async fn create_managed_npm_resolver_for_lsp(
options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
let npm_cache = create_cache(&options);
let http_client = Arc::new(CliNpmCacheHttpClient::new(
options.http_client_provider.clone(),
options.text_only_progress_bar.clone(),
));
let npm_api = create_api(npm_cache.clone(), http_client.clone(), &options);
// spawn due to the lsp's `Send` requirement
deno_core::unsync::spawn(async move {
let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await {
Ok(snapshot) => snapshot,
Err(err) => {
log::warn!("failed to resolve snapshot: {}", err);
None
}
};
create_inner(
http_client,
npm_cache,
options.npm_install_deps_provider,
npm_api,
options.sys,
options.text_only_progress_bar,
options.maybe_lockfile,
options.npmrc,
options.maybe_node_modules_path,
options.npm_system_info,
snapshot,
options.lifecycle_scripts,
)
})
.await
.unwrap()
}
pub async fn create_managed_npm_resolver(
options: CliManagedNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
let npm_cache = create_cache(&options);
let http_client = Arc::new(CliNpmCacheHttpClient::new(
options.http_client_provider.clone(),
options.text_only_progress_bar.clone(),
));
let api = create_api(npm_cache.clone(), http_client.clone(), &options);
let snapshot = resolve_snapshot(&api, options.snapshot).await?;
Ok(create_inner(
http_client,
npm_cache,
options.npm_install_deps_provider,
api,
options.sys,
options.text_only_progress_bar,
options.maybe_lockfile,
options.npmrc,
options.maybe_node_modules_path,
options.npm_system_info,
snapshot,
options.lifecycle_scripts,
))
}
#[allow(clippy::too_many_arguments)]
fn create_inner(
http_client: Arc<CliNpmCacheHttpClient>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
sys: CliSys,
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_rc: Arc<ResolvedNpmRc>,
node_modules_dir_path: Option<PathBuf>,
npm_system_info: NpmSystemInfo,
snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Arc<dyn CliNpmResolver> {
let resolution = Arc::new(NpmResolution::from_serialized(
registry_info_provider.clone(),
snapshot,
maybe_lockfile.clone(),
));
let tarball_cache = Arc::new(CliNpmTarballCache::new(
npm_cache.clone(),
http_client,
sys.clone(),
npm_rc.clone(),
));
let fs_resolver = create_npm_fs_resolver(
npm_cache.clone(),
&npm_install_deps_provider,
&text_only_progress_bar,
resolution.clone(),
sys.clone(),
tarball_cache.clone(),
node_modules_dir_path,
npm_system_info.clone(),
lifecycle_scripts.clone(),
);
Arc::new(ManagedCliNpmResolver::new(
fs_resolver,
maybe_lockfile,
registry_info_provider,
npm_cache,
npm_install_deps_provider,
resolution,
sys,
tarball_cache,
text_only_progress_bar,
npm_system_info,
lifecycle_scripts,
))
}
fn create_cache(
options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmCache> {
Arc::new(CliNpmCache::new(
options.npm_cache_dir.clone(),
options.sys.clone(),
NpmCacheSetting::from_cache_setting(&options.cache_setting),
options.npmrc.clone(),
))
}
fn create_api(
cache: Arc<CliNpmCache>,
http_client: Arc<CliNpmCacheHttpClient>,
options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmRegistryInfoProvider> {
Arc::new(CliNpmRegistryInfoProvider::new(
cache,
http_client,
options.npmrc.clone(),
))
}
async fn resolve_snapshot(
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
snapshot: CliNpmResolverManagedSnapshotOption,
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, AnyError> {
match snapshot {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
if !lockfile.overwrite() {
let snapshot = snapshot_from_lockfile(
lockfile.clone(),
&registry_info_provider.as_npm_registry_api(),
)
.await
.with_context(|| {
format!("failed reading lockfile '{}'", lockfile.filename.display())
})?;
Ok(Some(snapshot))
} else {
Ok(None)
}
}
CliNpmResolverManagedSnapshotOption::Specified(snapshot) => Ok(snapshot),
}
}
async fn snapshot_from_lockfile(
lockfile: Arc<CliLockfile>,
api: &dyn NpmRegistryApi,
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
let (incomplete_snapshot, skip_integrity_check) = {
let lock = lockfile.lock();
(
deno_npm::resolution::incomplete_snapshot_from_lockfile(&lock)?,
lock.overwrite,
)
};
let snapshot = deno_npm::resolution::snapshot_from_lockfile(
deno_npm::resolution::SnapshotFromLockfileParams {
incomplete_snapshot,
api,
skip_integrity_check,
},
)
.await?;
Ok(snapshot)
}
#[derive(Debug)]
struct ManagedInNpmPackageChecker {
root_dir: Url,
}
impl InNpmPackageChecker for ManagedInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
specifier.as_ref().starts_with(self.root_dir.as_str())
}
}
pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> {
pub root_cache_dir_url: &'a Url,
pub maybe_node_modules_path: Option<&'a Path>,
}
pub fn create_managed_in_npm_pkg_checker(
options: CliManagedInNpmPkgCheckerCreateOptions,
) -> Arc<dyn InNpmPackageChecker> {
let root_dir = match options.maybe_node_modules_path {
Some(node_modules_folder) => {
deno_path_util::url_from_directory_path(node_modules_folder).unwrap()
}
None => options.root_cache_dir_url.clone(),
};
debug_assert!(root_dir.as_str().ends_with('/'));
Arc::new(ManagedInNpmPackageChecker { root_dir })
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PackageCaching<'a> {
Only(Cow<'a, [PackageReq]>),
All,
}
/// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver {
fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
sys: CliSys,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
top_level_install_flag: AtomicFlag,
lifecycle_scripts: LifecycleScriptsConfig,
}
impl std::fmt::Debug for ManagedCliNpmResolver {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ManagedNpmResolver")
.field("<omitted>", &"<omitted>")
.finish()
}
}
impl ManagedCliNpmResolver {
#[allow(clippy::too_many_arguments)]
pub fn new(
fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self {
Self {
fs_resolver,
maybe_lockfile,
registry_info_provider,
npm_cache,
npm_install_deps_provider,
text_only_progress_bar,
resolution,
sys,
tarball_cache,
npm_system_info,
top_level_install_flag: Default::default(),
lifecycle_scripts,
}
}
pub fn resolve_pkg_folder_from_pkg_id(
&self,
pkg_id: &NpmPackageId,
) -> Result<PathBuf, AnyError> {
let path = self.fs_resolver.package_folder(pkg_id)?;
let path = canonicalize_path_maybe_not_exists(&self.sys, &path)?;
log::debug!(
"Resolved package folder of {} to {}",
pkg_id.as_serialized(),
path.display()
);
Ok(path)
}
/// Resolves the package id from the provided specifier.
pub fn resolve_pkg_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageId>, AnyError> {
let Some(cache_folder_id) = self
.fs_resolver
.resolve_package_cache_folder_id_from_specifier(specifier)?
else {
return Ok(None);
};
Ok(Some(
self
.resolution
.resolve_pkg_id_from_pkg_cache_folder_id(&cache_folder_id)?,
))
}
pub fn resolve_pkg_reqs_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Vec<PackageReq> {
self.resolution.resolve_pkg_reqs_from_pkg_id(id)
}
/// Attempts to get the package size in bytes.
pub fn package_size(
&self,
package_id: &NpmPackageId,
) -> Result<u64, AnyError> {
let package_folder = self.fs_resolver.package_folder(package_id)?;
Ok(crate::util::fs::dir_size(&package_folder)?)
}
pub fn all_system_packages(
&self,
system_info: &NpmSystemInfo,
) -> Vec<NpmResolutionPackage> {
self.resolution.all_system_packages(system_info)
}
/// Checks if the provided package req's folder is cached.
pub fn is_pkg_req_folder_cached(&self, req: &PackageReq) -> bool {
self
.resolve_pkg_id_from_pkg_req(req)
.ok()
.and_then(|id| self.fs_resolver.package_folder(&id).ok())
.map(|folder| folder.exists())
.unwrap_or(false)
}
/// Adds package requirements to the resolver and ensures everything is setup.
/// This includes setting up the `node_modules` directory, if applicable.
pub async fn add_and_cache_package_reqs(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(
packages,
Some(PackageCaching::Only(packages.into())),
)
.await
.dependencies_result
}
pub async fn add_package_reqs_no_cache(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, None)
.await
.dependencies_result
}
pub async fn add_package_reqs(
&self,
packages: &[PackageReq],
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, Some(caching))
.await
.dependencies_result
}
pub async fn add_package_reqs_raw<'a>(
&self,
packages: &[PackageReq],
caching: Option<PackageCaching<'a>>,
) -> AddPkgReqsResult {
if packages.is_empty() {
return AddPkgReqsResult {
dependencies_result: Ok(()),
results: vec![],
};
}
let mut result = self.resolution.add_package_reqs(packages).await;
if result.dependencies_result.is_ok() {
if let Some(lockfile) = self.maybe_lockfile.as_ref() {
result.dependencies_result = lockfile.error_if_changed();
}
}
if result.dependencies_result.is_ok() {
if let Some(caching) = caching {
result.dependencies_result = self.cache_packages(caching).await;
}
}
result
}
/// Sets package requirements to the resolver, removing old requirements and adding new ones.
///
/// This will retrieve and resolve package information, but not cache any package files.
pub async fn set_package_reqs(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self.resolution.set_package_reqs(packages).await
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.resolution.snapshot()
}
pub fn top_package_req_for_name(&self, name: &str) -> Option<PackageReq> {
let package_reqs = self.resolution.package_reqs();
let mut entries = package_reqs
.iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by_key(|(_, nv)| &nv.version);
Some(entries.last()?.0.clone())
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
) -> ValidSerializedNpmResolutionSnapshot {
self
.resolution
.serialized_valid_snapshot_for_system(system_info)
}
pub async fn inject_synthetic_types_node_package(
&self,
) -> Result<(), AnyError> {
let reqs = &[PackageReq::from_str("@types/node").unwrap()];
// add and ensure this isn't added to the lockfile
self
.add_package_reqs(reqs, PackageCaching::Only(reqs.into()))
.await?;
Ok(())
}
pub async fn cache_packages(
&self,
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self.fs_resolver.cache_packages(caching).await
}
pub fn resolve_pkg_folder_from_deno_module(
&self,
nv: &PackageNv,
) -> Result<PathBuf, AnyError> {
let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(nv)?;
self.resolve_pkg_folder_from_pkg_id(&pkg_id)
}
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self.resolution.resolve_pkg_id_from_pkg_req(req)
}
pub fn ensure_no_pkg_json_dep_errors(
&self,
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match err.source.as_kind() {
deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => {
return Err(Box::new(err.clone()));
}
deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported {
..
} => {
// only warn for this one
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err.source,
err.location,
)
}
}
}
Ok(())
}
/// Ensures that the top level `package.json` dependencies are installed.
/// This may set up the `node_modules` directory.
///
/// Returns `true` if the top level packages are already installed. A
/// return value of `false` means that new packages were added to the NPM resolution.
pub async fn ensure_top_level_package_json_install(
&self,
) -> Result<bool, AnyError> {
if !self.top_level_install_flag.raise() {
return Ok(true); // already did this
}
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() {
return Ok(true);
}
// check if something needs resolving before bothering to load all
// the package information (which is slow)
if pkg_json_remote_pkgs.iter().all(|pkg| {
self
.resolution
.resolve_pkg_id_from_pkg_req(&pkg.req)
.is_ok()
}) {
log::debug!(
"All package.json deps resolvable. Skipping top level install."
);
return Ok(true); // everything is already resolvable
}
let pkg_reqs = pkg_json_remote_pkgs
.iter()
.map(|pkg| pkg.req.clone())
.collect::<Vec<_>>();
self.add_package_reqs_no_cache(&pkg_reqs).await?;
Ok(false)
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, AnyError> {
// this will internally cache the package information
self
.registry_info_provider
.package_info(package_name)
.await
.map_err(|err| err.into())
}
pub fn maybe_node_modules_path(&self) -> Option<&Path> {
self.fs_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache.root_dir_path()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache.root_dir_url()
}
}
fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
impl NpmPackageFolderResolver for ManagedCliNpmResolver {
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError> {
let path = self
.fs_resolver
.resolve_package_folder_from_package(name, referrer)?;
let path =
canonicalize_path_maybe_not_exists(&self.sys, &path).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err,
}
})?;
log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
Ok(path)
}
}
impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.resolution.serialized_valid_snapshot(),
self.fs_resolver.node_modules_path(),
)
}
}
impl CliNpmReqResolver for ManagedCliNpmResolver {
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
_referrer: &ModuleSpecifier,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
let pkg_id = self
.resolve_pkg_id_from_pkg_req(req)
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
self
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
}
}
impl CliNpmResolver for ManagedCliNpmResolver {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self
}
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
self
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
// create a new snapshotted npm resolution and resolver
let npm_resolution = Arc::new(NpmResolution::new(
self.registry_info_provider.clone(),
self.resolution.snapshot(),
self.maybe_lockfile.clone(),
));
Arc::new(ManagedCliNpmResolver::new(
create_npm_fs_resolver(
self.npm_cache.clone(),
&self.npm_install_deps_provider,
&self.text_only_progress_bar,
npm_resolution.clone(),
self.sys.clone(),
self.tarball_cache.clone(),
self.root_node_modules_path().map(ToOwned::to_owned),
self.npm_system_info.clone(),
self.lifecycle_scripts.clone(),
),
self.maybe_lockfile.clone(),
self.registry_info_provider.clone(),
self.npm_cache.clone(),
self.npm_install_deps_provider.clone(),
npm_resolution,
self.sys.clone(),
self.tarball_cache.clone(),
self.text_only_progress_bar.clone(),
self.npm_system_info.clone(),
self.lifecycle_scripts.clone(),
))
}
fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Managed(self)
}
fn root_node_modules_path(&self) -> Option<&Path> {
self.fs_resolver.node_modules_path()
}
fn check_state_hash(&self) -> Option<u64> {
// We could go further and check all the individual
// npm packages, but that's probably overkill.
let mut package_reqs = self
.resolution
.package_reqs()
.into_iter()
.collect::<Vec<_>>();
package_reqs.sort_by(|a, b| a.0.cmp(&b.0)); // determinism
let mut hasher = FastInsecureHasher::new_without_deno_version();
// ensure the cache gets busted when turning nodeModulesDir on or off
// as this could cause changes in resolution
hasher.write_hashable(self.fs_resolver.node_modules_path().is_some());
for (pkg_req, pkg_nv) in package_reqs {
hasher.write_hashable(&pkg_req);
hasher.write_hashable(&pkg_nv);
}
Some(hasher.finish())
}
}

View file

@ -1,53 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod bin_entries;
pub mod lifecycle_scripts;
use std::path::Path;
use std::path::PathBuf;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use node_resolver::errors::PackageFolderResolveError;
use super::super::PackageCaching;
/// Part of the resolution that interacts with the file system.
#[async_trait(?Send)]
pub trait NpmPackageFsResolver: Send + Sync {
/// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&Path>;
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
fn package_folder(
&self,
package_id: &NpmPackageId,
) -> Result<PathBuf, AnyError> {
self.maybe_package_folder(package_id).ok_or_else(|| {
deno_core::anyhow::anyhow!(
"Package folder not found for '{}'",
package_id.as_serialized()
)
})
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, PackageFolderResolveError>;
fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError>;
}

View file

@ -1,55 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod common;
mod global;
mod local;
use std::path::PathBuf;
use std::sync::Arc;
use deno_npm::NpmSystemInfo;
pub use self::common::NpmPackageFsResolver;
use self::global::GlobalNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
use super::resolution::NpmResolution;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
#[allow(clippy::too_many_arguments)]
pub fn create_npm_fs_resolver(
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
progress_bar: &ProgressBar,
resolution: Arc<NpmResolution>,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
maybe_node_modules_path: Option<PathBuf>,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Arc<dyn NpmPackageFsResolver> {
match maybe_node_modules_path {
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
npm_cache,
npm_install_deps_provider.clone(),
progress_bar.clone(),
resolution,
sys,
tarball_cache,
node_modules_folder,
system_info,
lifecycle_scripts,
)),
None => Arc::new(GlobalNpmPackageResolver::new(
npm_cache,
tarball_cache,
resolution,
system_info,
lifecycle_scripts,
)),
}
}

View file

@ -1,38 +1,37 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod byonm;
pub mod installer;
mod managed;
mod permission_checker;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::CliNpmReqResolver;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use http::HeaderName;
use http::HeaderValue;
use managed::create_managed_in_npm_pkg_checker;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver;
pub use self::managed::PackageCaching;
pub use self::managed::NpmResolutionInitializer;
pub use self::managed::ResolveSnapshotError;
pub use self::permission_checker::NpmRegistryReadPermissionChecker;
pub use self::permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::file_fetcher::CliFileFetcher;
@ -90,14 +89,16 @@ impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
| Json { .. }
| ToStr { .. }
| RedirectHeaderParse { .. }
| TooManyRedirects => None,
| TooManyRedirects
| NotFound
| Other(_) => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code)
}
};
deno_npm_cache::DownloadError {
status_code,
error: err.into(),
error: JsErrorBox::from_err(err),
}
})
}
@ -108,61 +109,33 @@ pub enum CliNpmResolverCreateOptions {
Byonm(CliByonmNpmResolverCreateOptions),
}
pub async fn create_cli_npm_resolver_for_lsp(
pub fn create_cli_npm_resolver(
options: CliNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
use CliNpmResolverCreateOptions::*;
match options {
Managed(options) => {
managed::create_managed_npm_resolver_for_lsp(options).await
}
Managed(options) => managed::create_managed_npm_resolver(options),
Byonm(options) => Arc::new(ByonmNpmResolver::new(options)),
}
}
pub async fn create_cli_npm_resolver(
options: CliNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
use CliNpmResolverCreateOptions::*;
match options {
Managed(options) => managed::create_managed_npm_resolver(options).await,
Byonm(options) => Ok(Arc::new(ByonmNpmResolver::new(options))),
}
}
pub enum CreateInNpmPkgCheckerOptions<'a> {
Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>),
Byonm,
}
pub fn create_in_npm_pkg_checker(
options: CreateInNpmPkgCheckerOptions,
) -> Arc<dyn InNpmPackageChecker> {
match options {
CreateInNpmPkgCheckerOptions::Managed(options) => {
create_managed_in_npm_pkg_checker(options)
}
CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker),
}
}
pub enum InnerCliNpmResolverRef<'a> {
Managed(&'a ManagedCliNpmResolver),
#[allow(dead_code)]
Byonm(&'a CliByonmNpmResolver),
}
pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
// todo(dsherret): replace with an enum
pub trait CliNpmResolver: Send + Sync + std::fmt::Debug {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver>;
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver>;
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider>;
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
None
}
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys>;
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
@ -182,6 +155,12 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
}
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>;
fn root_node_modules_path(&self) -> Option<&Path>;
/// Returns a hash returning the state of the npm resolver

View file

@ -6,9 +6,8 @@ use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_error::JsErrorBox;
use deno_runtime::deno_node::NodePermissions;
use sys_traits::FsCanonicalize;
@ -28,6 +27,16 @@ pub struct NpmRegistryReadPermissionChecker {
mode: NpmRegistryReadPermissionCheckerMode,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("failed canonicalizing '{path}'")]
struct EnsureRegistryReadPermissionError {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
}
impl NpmRegistryReadPermissionChecker {
pub fn new(sys: CliSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
Self {
@ -42,7 +51,7 @@ impl NpmRegistryReadPermissionChecker {
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
) -> Result<Cow<'a, Path>, JsErrorBox> {
if permissions.query_read_all() {
return Ok(Cow::Borrowed(path)); // skip permissions checks below
}
@ -52,7 +61,9 @@ impl NpmRegistryReadPermissionChecker {
if path.components().any(|c| c.as_os_str() == "node_modules") {
Ok(Cow::Borrowed(path))
} else {
permissions.check_read_path(path).map_err(Into::into)
permissions
.check_read_path(path)
.map_err(JsErrorBox::from_err)
}
}
NpmRegistryReadPermissionCheckerMode::Global(registry_path)
@ -66,7 +77,7 @@ impl NpmRegistryReadPermissionChecker {
if is_path_in_node_modules {
let mut cache = self.cache.lock();
let mut canonicalize =
|path: &Path| -> Result<Option<PathBuf>, AnyError> {
|path: &Path| -> Result<Option<PathBuf>, JsErrorBox> {
match cache.get(path) {
Some(canon) => Ok(Some(canon.clone())),
None => match self.sys.fs_canonicalize(path) {
@ -78,9 +89,12 @@ impl NpmRegistryReadPermissionChecker {
if e.kind() == ErrorKind::NotFound {
return Ok(None);
}
Err(AnyError::from(e)).with_context(|| {
format!("failed canonicalizing '{}'", path.display())
})
Err(JsErrorBox::from_err(
EnsureRegistryReadPermissionError {
path: path.to_path_buf(),
source: e,
},
))
}
},
}
@ -98,7 +112,9 @@ impl NpmRegistryReadPermissionChecker {
}
}
permissions.check_read_path(path).map_err(Into::into)
permissions
.check_read_path(path)
.map_err(JsErrorBox::from_err)
}
}
}

View file

@ -3,13 +3,11 @@
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_error::JsErrorBox;
use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::StartTime;
@ -78,7 +76,7 @@ pub fn op_pledge_test_permissions(
pub fn op_restore_test_permissions(
state: &mut OpState,
#[serde] token: Uuid,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if let Some(permissions_holder) = state.try_take::<PermissionsHolder>() {
if token != permissions_holder.0 {
panic!("restore test permissions token does not match the stored token");
@ -88,7 +86,7 @@ pub fn op_restore_test_permissions(
state.put::<PermissionsContainer>(permissions);
Ok(())
} else {
Err(generic_error("no permissions to restore"))
Err(JsErrorBox::generic("no permissions to restore"))
}
}
@ -106,9 +104,9 @@ fn op_register_bench(
only: bool,
warmup: bool,
#[buffer] ret_buf: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if ret_buf.len() != 4 {
return Err(type_error(format!(
return Err(JsErrorBox::type_error(format!(
"Invalid ret_buf length: {}",
ret_buf.len()
)));

View file

@ -94,10 +94,12 @@ pub fn op_jupyter_input(
None
}
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum JupyterBroadcastError {
#[class(inherit)]
#[error(transparent)]
SerdeJson(serde_json::Error),
#[class(generic)]
#[error(transparent)]
ZeroMq(AnyError),
}

View file

@ -2,25 +2,36 @@
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_ast::ParseDiagnostic;
use deno_core::op2;
use crate::tools::lint;
deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],);
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LintError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
ParseDiagnostic(#[from] ParseDiagnostic),
#[class(type)]
#[error("Failed to parse path as URL: {0}")]
PathParse(std::path::PathBuf),
}
#[op2]
#[buffer]
fn op_lint_create_serialized_ast(
#[string] file_name: &str,
#[string] source: String,
) -> Result<Vec<u8>, AnyError> {
) -> Result<Vec<u8>, LintError> {
let file_text = deno_ast::strip_bom(source);
let path = std::env::current_dir()?.join(file_name);
let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| {
generic_error(format!("Failed to parse path as URL: {}", path.display()))
})?;
let specifier = ModuleSpecifier::from_file_path(&path)
.map_err(|_| LintError::PathParse(path))?;
let media_type = MediaType::from_specifier(&specifier);
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
specifier,

View file

@ -3,13 +3,11 @@
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_error::JsErrorBox;
use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionsContainer;
use uuid::Uuid;
@ -73,7 +71,7 @@ pub fn op_pledge_test_permissions(
pub fn op_restore_test_permissions(
state: &mut OpState,
#[serde] token: Uuid,
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if let Some(permissions_holder) = state.try_take::<PermissionsHolder>() {
if token != permissions_holder.0 {
panic!("restore test permissions token does not match the stored token");
@ -83,7 +81,7 @@ pub fn op_restore_test_permissions(
state.put::<PermissionsContainer>(permissions);
Ok(())
} else {
Err(generic_error("no permissions to restore"))
Err(JsErrorBox::generic("no permissions to restore"))
}
}
@ -103,9 +101,9 @@ fn op_register_test(
#[smi] line_number: u32,
#[smi] column_number: u32,
#[buffer] ret_buf: &mut [u8],
) -> Result<(), AnyError> {
) -> Result<(), JsErrorBox> {
if ret_buf.len() != 4 {
return Err(type_error(format!(
return Err(JsErrorBox::type_error(format!(
"Invalid ret_buf length: {}",
ret_buf.len()
)));

View file

@ -1,27 +1,25 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use dashmap::DashMap;
use dashmap::DashSet;
use deno_ast::MediaType;
use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::source::ResolveError;
use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs;
@ -30,26 +28,25 @@ use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_semver::package::PackageReq;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use thiserror::Error;
use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::sys::CliSys;
use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub type CjsTracker = deno_resolver::cjs::CjsTracker<CliSys>;
pub type IsCjsResolver = deno_resolver::cjs::IsCjsResolver<CliSys>;
pub type CliSloppyImportsCachedFs = SloppyImportsCachedFs<CliSys>;
pub type CliSloppyImportsResolver =
SloppyImportsResolver<SloppyImportsCachedFs>;
SloppyImportsResolver<CliSloppyImportsCachedFs>;
pub type CliDenoResolver = deno_resolver::DenoResolver<
RealIsBuiltInNodeModuleChecker,
SloppyImportsCachedFs,
CliSloppyImportsCachedFs,
CliSys,
>;
pub type CliNpmReqResolver =
@ -61,7 +58,8 @@ pub struct ModuleCodeStringSource {
pub media_type: MediaType,
}
#[derive(Debug, Error)]
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
@ -166,48 +164,30 @@ impl NpmModuleLoader {
}
}
pub struct CliResolverOptions {
pub deno_resolver: Arc<CliDenoResolver>,
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
pub bare_node_builtins_enabled: bool,
}
#[derive(Debug, Default)]
pub struct FoundPackageJsonDepFlag(AtomicFlag);
/// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings.
#[derive(Debug)]
pub struct CliResolver {
deno_resolver: Arc<CliDenoResolver>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
found_package_json_dep_flag: AtomicFlag,
bare_node_builtins_enabled: bool,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
warned_pkgs: DashSet<PackageReq>,
}
impl CliResolver {
pub fn new(options: CliResolverOptions) -> Self {
pub fn new(
deno_resolver: Arc<CliDenoResolver>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
) -> Self {
Self {
deno_resolver: options.deno_resolver,
npm_resolver: options.npm_resolver,
found_package_json_dep_flag: Default::default(),
bare_node_builtins_enabled: options.bare_node_builtins_enabled,
deno_resolver,
found_package_json_dep_flag,
warned_pkgs: Default::default(),
}
}
// todo(dsherret): move this off CliResolver as CliResolver is acting
// like a factory by doing this (it's beyond its responsibility)
pub fn create_graph_npm_resolver(
&self,
npm_caching: NpmCachingStrategy,
) -> WorkerCliNpmGraphResolver {
WorkerCliNpmGraphResolver {
npm_resolver: self.npm_resolver.as_ref(),
found_package_json_dep_flag: &self.found_package_json_dep_flag,
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
npm_caching,
}
}
pub fn resolve(
&self,
raw_specifier: &str,
@ -225,15 +205,17 @@ impl CliResolver {
) => match mapped_resolution_error {
MappedResolutionError::Specifier(e) => ResolveError::Specifier(e),
// deno_graph checks specifically for an ImportMapError
MappedResolutionError::ImportMap(e) => ResolveError::Other(e.into()),
err => ResolveError::Other(err.into()),
MappedResolutionError::ImportMap(e) => ResolveError::ImportMap(e),
MappedResolutionError::Workspace(e) => {
ResolveError::Other(JsErrorBox::from_err(e))
}
},
err => ResolveError::Other(err.into()),
err => ResolveError::Other(JsErrorBox::from_err(err)),
})?;
if resolution.found_package_json_dep {
// mark that we need to do an "npm install" later
self.found_package_json_dep_flag.raise();
self.found_package_json_dep_flag.0.raise();
}
if let Some(diagnostic) = resolution.maybe_diagnostic {
@ -260,15 +242,31 @@ impl CliResolver {
}
#[derive(Debug)]
pub struct WorkerCliNpmGraphResolver<'a> {
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
found_package_json_dep_flag: &'a AtomicFlag,
pub struct CliNpmGraphResolver {
npm_installer: Option<Arc<NpmInstaller>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
bare_node_builtins_enabled: bool,
npm_caching: NpmCachingStrategy,
}
impl CliNpmGraphResolver {
pub fn new(
npm_installer: Option<Arc<NpmInstaller>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
bare_node_builtins_enabled: bool,
npm_caching: NpmCachingStrategy,
) -> Self {
Self {
npm_installer,
found_package_json_dep_flag,
bare_node_builtins_enabled,
npm_caching,
}
}
}
#[async_trait(?Send)]
impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
impl deno_graph::source::NpmResolver for CliNpmGraphResolver {
fn resolve_builtin_node_module(
&self,
specifier: &ModuleSpecifier,
@ -298,17 +296,12 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
}
fn load_and_cache_npm_package_info(&self, package_name: &str) {
match self.npm_resolver {
Some(npm_resolver) if npm_resolver.as_managed().is_some() => {
let npm_resolver = npm_resolver.clone();
let package_name = package_name.to_string();
deno_core::unsync::spawn(async move {
if let Some(managed) = npm_resolver.as_managed() {
let _ignore = managed.cache_package_info(&package_name).await;
}
});
}
_ => {}
if let Some(npm_installer) = &self.npm_installer {
let npm_installer = npm_installer.clone();
let package_name = package_name.to_string();
deno_core::unsync::spawn(async move {
let _ignore = npm_installer.cache_package_info(&package_name).await;
});
}
}
@ -316,17 +309,11 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
&self,
package_reqs: &[PackageReq],
) -> NpmResolvePkgReqsResult {
match &self.npm_resolver {
Some(npm_resolver) => {
let npm_resolver = match npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(npm_resolver) => npm_resolver,
// if we are using byonm, then this should never be called because
// we don't use deno_graph's npm resolution in this case
InnerCliNpmResolverRef::Byonm(_) => unreachable!(),
};
let top_level_result = if self.found_package_json_dep_flag.is_raised() {
npm_resolver
match &self.npm_installer {
Some(npm_installer) => {
let top_level_result = if self.found_package_json_dep_flag.0.is_raised()
{
npm_installer
.ensure_top_level_package_json_install()
.await
.map(|_| ())
@ -334,15 +321,13 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
Ok(())
};
let result = npm_resolver
let result = npm_installer
.add_package_reqs_raw(
package_reqs,
match self.npm_caching {
NpmCachingStrategy::Eager => {
Some(crate::npm::PackageCaching::All)
}
NpmCachingStrategy::Eager => Some(PackageCaching::All),
NpmCachingStrategy::Lazy => {
Some(crate::npm::PackageCaching::Only(package_reqs.into()))
Some(PackageCaching::Only(package_reqs.into()))
}
NpmCachingStrategy::Manual => None,
},
@ -356,26 +341,28 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
.map(|r| {
r.map_err(|err| match err {
NpmResolutionError::Registry(e) => {
NpmLoadError::RegistryInfo(Arc::new(e.into()))
NpmLoadError::RegistryInfo(Arc::new(e))
}
NpmResolutionError::Resolution(e) => {
NpmLoadError::PackageReqResolution(Arc::new(e.into()))
NpmLoadError::PackageReqResolution(Arc::new(e))
}
NpmResolutionError::DependencyEntry(e) => {
NpmLoadError::PackageReqResolution(Arc::new(e.into()))
NpmLoadError::PackageReqResolution(Arc::new(e))
}
})
})
.collect(),
dep_graph_result: match top_level_result {
Ok(()) => result.dependencies_result.map_err(Arc::new),
Ok(()) => result
.dependencies_result
.map_err(|e| Arc::new(e) as Arc<dyn deno_error::JsErrorClass>),
Err(err) => Err(Arc::new(err)),
},
}
}
None => {
let err = Arc::new(anyhow!(
"npm specifiers were requested; but --no-npm is specified"
let err = Arc::new(JsErrorBox::generic(
"npm specifiers were requested; but --no-npm is specified",
));
NpmResolvePkgReqsResult {
results: package_reqs
@ -392,60 +379,3 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
self.bare_node_builtins_enabled
}
}
#[derive(Debug)]
pub struct SloppyImportsCachedFs {
sys: CliSys,
cache: Option<
DashMap<
PathBuf,
Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry>,
>,
>,
}
impl SloppyImportsCachedFs {
pub fn new(sys: CliSys) -> Self {
Self {
sys,
cache: Some(Default::default()),
}
}
pub fn new_without_stat_cache(fs: CliSys) -> Self {
Self {
sys: fs,
cache: None,
}
}
}
impl deno_resolver::sloppy_imports::SloppyImportResolverFs
for SloppyImportsCachedFs
{
fn stat_sync(
&self,
path: &Path,
) -> Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry> {
if let Some(cache) = &self.cache {
if let Some(entry) = cache.get(path) {
return *entry;
}
}
let entry = self.sys.fs_metadata(path).ok().and_then(|stat| {
if stat.file_type().is_file() {
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::File)
} else if stat.file_type().is_dir() {
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::Dir)
} else {
None
}
});
if let Some(cache) = &self.cache {
cache.insert(path.to_owned(), entry);
}
entry
}
}

View file

@ -73,6 +73,7 @@ use super::serialization::SourceMapStore;
use super::virtual_fs::output_vfs;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::FileSystemCaseSensitivity;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsFileSubDataKind;
use super::virtual_fs::VfsRoot;
@ -202,6 +203,7 @@ pub struct Metadata {
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
#[allow(clippy::too_many_arguments)]
@ -379,7 +381,11 @@ pub fn extract_standalone(
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root))
Arc::new(FileBackedVfs::new(
Cow::Borrowed(vfs_files_data),
fs_root,
metadata.vfs_case_sensitivity,
))
};
Ok(Some(StandaloneData {
metadata,
@ -851,6 +857,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_lazy_caching: self.cli_options.unstable_npm_lazy_caching(),
},
otel_config: self.cli_options.otel_config(),
vfs_case_sensitivity: vfs.case_sensitivity,
};
write_binary_bytes(
@ -984,11 +991,15 @@ impl<'a> DenoCompileBinaryWriter<'a> {
// it's better to not expose the user's cache directory, so take it out
// of there
let case_sensitivity = vfs.case_sensitivity();
let parent = global_cache_root_path.parent().unwrap();
let parent_dir = vfs.get_dir_mut(parent).unwrap();
let index = parent_dir
.entries
.binary_search(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME)
.binary_search(
DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME,
case_sensitivity,
)
.unwrap();
let npm_global_cache_dir_entry = parent_dir.entries.remove(index);
@ -998,7 +1009,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME);
for ancestor in parent.ancestors() {
let dir = vfs.get_dir_mut(ancestor).unwrap();
if let Ok(index) = dir.entries.binary_search(&last_name) {
if let Ok(index) =
dir.entries.binary_search(&last_name, case_sensitivity)
{
dir.entries.remove(index);
}
last_name = Cow::Owned(dir.name.clone());
@ -1009,7 +1022,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
// now build the vfs and add the global cache dir entry there
let mut built_vfs = vfs.build();
built_vfs.entries.insert(npm_global_cache_dir_entry);
built_vfs
.entries
.insert(npm_global_cache_dir_entry, case_sensitivity);
built_vfs
}
InnerCliNpmResolverRef::Byonm(_) => vfs.build(),

View file

@ -21,9 +21,8 @@ use deno_config::workspace::MappedResolutionError;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::error::ModuleLoaderError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::v8_set_flags;
@ -36,9 +35,15 @@ use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::create_in_npm_pkg_checker;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
@ -81,16 +86,14 @@ use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::npm::NpmRegistryReadPermissionCheckerMode;
use crate::npm::NpmResolutionInitializer;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::NpmModuleLoader;
@ -182,25 +185,32 @@ impl ModuleLoader for EmbeddedModuleLoader {
raw_specifier: &str,
referrer: &str,
kind: ResolutionKind,
) -> Result<ModuleSpecifier, AnyError> {
) -> Result<ModuleSpecifier, ModuleLoaderError> {
let referrer = if referrer == "." {
if kind != ResolutionKind::MainModule {
return Err(generic_error(format!(
"Expected to resolve main module, got {:?} instead.",
kind
)));
return Err(
JsErrorBox::generic(format!(
"Expected to resolve main module, got {:?} instead.",
kind
))
.into(),
);
}
let current_dir = std::env::current_dir().unwrap();
deno_core::resolve_path(".", &current_dir)?
} else {
ModuleSpecifier::parse(referrer).map_err(|err| {
type_error(format!("Referrer uses invalid specifier: {}", err))
JsErrorBox::type_error(format!(
"Referrer uses invalid specifier: {}",
err
))
})?
};
let referrer_kind = if self
.shared
.cjs_tracker
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))
.map_err(JsErrorBox::from_err)?
{
ResolutionMode::Require
} else {
@ -217,7 +227,8 @@ impl ModuleLoader for EmbeddedModuleLoader {
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)?
)
.map_err(JsErrorBox::from_err)?
.into_url(),
);
}
@ -245,14 +256,18 @@ impl ModuleLoader for EmbeddedModuleLoader {
Some(&referrer),
referrer_kind,
NodeResolutionKind::Execution,
)?,
)
.map_err(JsErrorBox::from_err)?,
),
Ok(MappedResolution::PackageJson {
dep_result,
sub_path,
alias,
..
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
}) => match dep_result
.as_ref()
.map_err(|e| JsErrorBox::from_err(e.clone()))?
{
PackageJsonDepValue::Req(req) => self
.shared
.npm_req_resolver
@ -263,7 +278,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(AnyError::from),
.map_err(|e| JsErrorBox::from_err(e).into()),
PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = self
.shared
@ -271,7 +286,8 @@ impl ModuleLoader for EmbeddedModuleLoader {
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)?;
)
.map_err(JsErrorBox::from_err)?;
Ok(
self
.shared
@ -282,7 +298,8 @@ impl ModuleLoader for EmbeddedModuleLoader {
Some(&referrer),
referrer_kind,
NodeResolutionKind::Execution,
)?,
)
.map_err(JsErrorBox::from_err)?,
)
}
},
@ -291,12 +308,18 @@ impl ModuleLoader for EmbeddedModuleLoader {
if let Ok(reference) =
NpmPackageReqReference::from_specifier(&specifier)
{
return Ok(self.shared.npm_req_resolver.resolve_req_reference(
&reference,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)?);
return Ok(
self
.shared
.npm_req_resolver
.resolve_req_reference(
&reference,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?,
);
}
if specifier.scheme() == "jsr" {
@ -318,18 +341,22 @@ impl ModuleLoader for EmbeddedModuleLoader {
Err(err)
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
{
let maybe_res = self.shared.npm_req_resolver.resolve_if_for_npm_pkg(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)?;
let maybe_res = self
.shared
.npm_req_resolver
.resolve_if_for_npm_pkg(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?;
if let Some(res) = maybe_res {
return Ok(res.into_url());
}
Err(err.into())
Err(JsErrorBox::from_err(err).into())
}
Err(err) => Err(err.into()),
Err(err) => Err(JsErrorBox::from_err(err).into()),
}
}
@ -360,9 +387,9 @@ impl ModuleLoader for EmbeddedModuleLoader {
{
Ok(response) => response,
Err(err) => {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:#}", err),
)));
return deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:#}", err)).into(),
));
}
};
return deno_core::ModuleLoadResponse::Sync(Ok(
@ -420,9 +447,9 @@ impl ModuleLoader for EmbeddedModuleLoader {
{
Ok(is_maybe_cjs) => is_maybe_cjs,
Err(err) => {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:?}", err),
)));
return deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:?}", err)).into(),
));
}
};
if is_maybe_cjs {
@ -482,12 +509,16 @@ impl ModuleLoader for EmbeddedModuleLoader {
))
}
}
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{MODULE_NOT_FOUND}: {}", original_specifier),
))),
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:?}", err),
))),
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!(
"{MODULE_NOT_FOUND}: {}",
original_specifier
))
.into(),
)),
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:?}", err)).into(),
)),
}
}
@ -553,7 +584,7 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a std::path::Path,
) -> Result<Cow<'a, std::path::Path>, AnyError> {
) -> Result<Cow<'a, std::path::Path>, JsErrorBox> {
if self.shared.modules.has_file(path) {
// allow reading if the file is in the snapshot
return Ok(Cow::Borrowed(path));
@ -563,17 +594,23 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
.shared
.npm_registry_permission_checker
.ensure_read_permission(permissions, path)
.map_err(JsErrorBox::from_err)
}
fn load_text_file_lossy(
&self,
path: &std::path::Path,
) -> Result<Cow<'static, str>, AnyError> {
let file_entry = self.shared.vfs.file_entry(path)?;
) -> Result<Cow<'static, str>, JsErrorBox> {
let file_entry = self
.shared
.vfs
.file_entry(path)
.map_err(JsErrorBox::from_err)?;
let file_bytes = self
.shared
.vfs
.read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)?;
.read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)
.map_err(JsErrorBox::from_err)?;
Ok(from_utf8_lossy_cow(file_bytes))
}
@ -626,10 +663,10 @@ struct StandaloneRootCertStoreProvider {
}
impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> {
self.cell.get_or_try_init(|| {
get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone())
.map_err(|err| err.into())
.map_err(JsErrorBox::from_err)
})
}
}
@ -653,18 +690,12 @@ pub async fn run(
ca_data: metadata.ca_data.map(CaData::Bytes),
cell: Default::default(),
});
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
let http_client_provider = Arc::new(HttpClientProvider::new(
Some(root_cert_store_provider.clone()),
metadata.unsafely_ignore_certificate_errors.clone(),
));
// use a dummy npm registry url
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
let root_dir_url =
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let cache_setting = CacheSetting::Only;
let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone()));
let npm_registry_permission_checker = {
let mode = match &metadata.node_modules {
@ -704,34 +735,24 @@ pub async fn run(
.map(|node_modules_dir| root_path.join(node_modules_dir));
let in_npm_pkg_checker =
create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: maybe_node_modules_path.as_deref(),
},
));
let npm_resolution =
Arc::new(NpmResolutionCell::new(NpmResolutionSnapshot::new(snapshot)));
let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliManagedNpmResolverCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
snapshot,
)),
maybe_lockfile: None,
http_client_provider: http_client_provider.clone(),
npm_resolution,
npm_cache_dir,
npm_install_deps_provider: Arc::new(
// this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(),
),
sys: sys.clone(),
text_only_progress_bar: progress_bar,
cache_setting,
maybe_node_modules_path,
npm_system_info: Default::default(),
npmrc,
lifecycle_scripts: Default::default(),
},
))
.await?;
));
(in_npm_pkg_checker, npm_resolver)
}
Some(binary::NodeModules::Byonm {
@ -747,8 +768,7 @@ pub async fn run(
pkg_json_resolver: pkg_json_resolver.clone(),
root_node_modules_dir,
}),
)
.await?;
);
(in_npm_pkg_checker, npm_resolver)
}
None => {
@ -762,32 +782,23 @@ pub async fn run(
));
let in_npm_pkg_checker =
create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: None,
},
));
let npm_resolution = Arc::new(NpmResolutionCell::default());
let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliManagedNpmResolverCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
http_client_provider: http_client_provider.clone(),
npm_install_deps_provider: Arc::new(
// this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(),
),
npm_resolution,
sys: sys.clone(),
cache_setting,
text_only_progress_bar: progress_bar,
npm_cache_dir,
maybe_lockfile: None,
maybe_node_modules_path: None,
npm_system_info: Default::default(),
npmrc: create_default_npmrc(),
lifecycle_scripts: Default::default(),
},
))
.await?;
));
(in_npm_pkg_checker, npm_resolver)
}
};
@ -799,6 +810,7 @@ pub async fn run(
npm_resolver.clone().into_npm_pkg_folder_resolver(),
pkg_json_resolver.clone(),
sys.clone(),
node_resolver::ConditionsFromResolutionMode::default(),
));
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
@ -815,11 +827,10 @@ pub async fn run(
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
let npm_req_resolver =
Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
sys: sys.clone(),
in_npm_pkg_checker: in_npm_pkg_checker.clone(),
node_resolver: node_resolver.clone(),
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
npm_resolver: npm_resolver.clone().into_byonm_or_managed(),
}));
let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
@ -961,6 +972,7 @@ pub async fn run(
None,
Box::new(module_loader_factory),
node_resolver,
None,
npm_resolver,
pkg_json_resolver,
root_cert_store_provider,

View file

@ -27,6 +27,7 @@ use indexmap::IndexMap;
use super::binary::Metadata;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileSystemCaseSensitivity;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VirtualDirectoryEntries;
use crate::standalone::virtual_fs::VirtualDirectory;

View file

@ -2,6 +2,7 @@
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs::File;
@ -74,6 +75,7 @@ impl WindowsSystemRootablePath {
#[derive(Debug)]
pub struct BuiltVfs {
pub root_path: WindowsSystemRootablePath,
pub case_sensitivity: FileSystemCaseSensitivity,
pub entries: VirtualDirectoryEntries,
pub files: Vec<Vec<u8>>,
}
@ -95,6 +97,7 @@ pub struct VfsBuilder {
file_offsets: HashMap<String, u64>,
/// The minimum root directory that should be included in the VFS.
min_root_dir: Option<WindowsSystemRootablePath>,
case_sensitivity: FileSystemCaseSensitivity,
}
impl VfsBuilder {
@ -108,9 +111,23 @@ impl VfsBuilder {
current_offset: 0,
file_offsets: Default::default(),
min_root_dir: Default::default(),
// This is not exactly correct because file systems on these OSes
// may be case-sensitive or not based on the directory, but this
// is a good enough approximation and limitation. In the future,
// we may want to store this information per directory instead
// depending on the feedback we get.
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
FileSystemCaseSensitivity::Insensitive
} else {
FileSystemCaseSensitivity::Sensitive
},
}
}
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
self.case_sensitivity
}
/// Add a directory that might be the minimum root directory
/// of the VFS.
///
@ -215,7 +232,10 @@ impl VfsBuilder {
continue;
}
let name = component.as_os_str().to_string_lossy();
let index = match current_dir.entries.binary_search(&name) {
let index = match current_dir
.entries
.binary_search(&name, self.case_sensitivity)
{
Ok(index) => index,
Err(insert_index) => {
current_dir.entries.0.insert(
@ -252,7 +272,9 @@ impl VfsBuilder {
continue;
}
let name = component.as_os_str().to_string_lossy();
let entry = current_dir.entries.get_mut_by_name(&name)?;
let entry = current_dir
.entries
.get_mut_by_name(&name, self.case_sensitivity)?;
match entry {
VfsEntry::Dir(dir) => {
current_dir = dir;
@ -304,6 +326,7 @@ impl VfsBuilder {
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display());
let checksum = util::checksum::gen(&[&data]);
let case_sensitivity = self.case_sensitivity;
let offset = if let Some(offset) = self.file_offsets.get(&checksum) {
// duplicate file, reuse an old offset
*offset
@ -318,7 +341,7 @@ impl VfsBuilder {
offset,
len: data.len() as u64,
};
match dir.entries.binary_search(&name) {
match dir.entries.binary_search(&name, case_sensitivity) {
Ok(index) => {
let entry = &mut dir.entries.0[index];
match entry {
@ -379,10 +402,11 @@ impl VfsBuilder {
std::fs::read_link(path)
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
);
let case_sensitivity = self.case_sensitivity;
let target = normalize_path(path.parent().unwrap().join(&target));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
match dir.entries.binary_search(&name) {
match dir.entries.binary_search(&name, case_sensitivity) {
Ok(_) => {} // previously inserted
Err(insert_index) => {
dir.entries.0.insert(
@ -478,6 +502,7 @@ impl VfsBuilder {
}
BuiltVfs {
root_path: current_path,
case_sensitivity: self.case_sensitivity,
entries: current_dir.entries,
files: self.files,
}
@ -906,6 +931,14 @@ impl VfsEntry {
}
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum FileSystemCaseSensitivity {
#[serde(rename = "s")]
Sensitive,
#[serde(rename = "i")]
Insensitive,
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct VirtualDirectoryEntries(Vec<VfsEntry>);
@ -928,23 +961,54 @@ impl VirtualDirectoryEntries {
self.0.len()
}
pub fn get_by_name(&self, name: &str) -> Option<&VfsEntry> {
self.binary_search(name).ok().map(|index| &self.0[index])
pub fn get_by_name(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&VfsEntry> {
self
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &self.0[index])
}
pub fn get_mut_by_name(&mut self, name: &str) -> Option<&mut VfsEntry> {
pub fn get_mut_by_name(
&mut self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&mut VfsEntry> {
self
.binary_search(name)
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &mut self.0[index])
}
pub fn binary_search(&self, name: &str) -> Result<usize, usize> {
self.0.binary_search_by(|e| e.name().cmp(name))
pub fn binary_search(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Result<usize, usize> {
match case_sensitivity {
FileSystemCaseSensitivity::Sensitive => {
self.0.binary_search_by(|e| e.name().cmp(name))
}
FileSystemCaseSensitivity::Insensitive => self.0.binary_search_by(|e| {
e.name()
.chars()
.zip(name.chars())
.map(|(a, b)| a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()))
.find(|&ord| ord != Ordering::Equal)
.unwrap_or_else(|| e.name().len().cmp(&name.len()))
}),
}
}
pub fn insert(&mut self, entry: VfsEntry) {
match self.binary_search(entry.name()) {
pub fn insert(
&mut self,
entry: VfsEntry,
case_sensitivity: FileSystemCaseSensitivity,
) {
match self.binary_search(entry.name(), case_sensitivity) {
Ok(index) => {
self.0[index] = entry;
}
@ -1039,19 +1103,21 @@ impl VfsRoot {
fn find_entry<'a>(
&'a self,
path: &Path,
case_sensitivity: FileSystemCaseSensitivity,
) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> {
self.find_entry_inner(path, &mut HashSet::new())
self.find_entry_inner(path, &mut HashSet::new(), case_sensitivity)
}
fn find_entry_inner<'a>(
&'a self,
path: &Path,
seen: &mut HashSet<PathBuf>,
case_sensitivity: FileSystemCaseSensitivity,
) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> {
let mut path = Cow::Borrowed(path);
loop {
let (resolved_path, entry) =
self.find_entry_no_follow_inner(&path, seen)?;
self.find_entry_no_follow_inner(&path, seen, case_sensitivity)?;
match entry {
VfsEntryRef::Symlink(symlink) => {
if !seen.insert(path.to_path_buf()) {
@ -1072,14 +1138,16 @@ impl VfsRoot {
fn find_entry_no_follow(
&self,
path: &Path,
case_sensitivity: FileSystemCaseSensitivity,
) -> std::io::Result<(PathBuf, VfsEntryRef)> {
self.find_entry_no_follow_inner(path, &mut HashSet::new())
self.find_entry_no_follow_inner(path, &mut HashSet::new(), case_sensitivity)
}
fn find_entry_no_follow_inner<'a>(
&'a self,
path: &Path,
seen: &mut HashSet<PathBuf>,
case_sensitivity: FileSystemCaseSensitivity,
) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> {
let relative_path = match path.strip_prefix(&self.root_path) {
Ok(p) => p,
@ -1101,7 +1169,8 @@ impl VfsRoot {
}
VfsEntryRef::Symlink(symlink) => {
let dest = symlink.resolve_dest_from_root(&self.root_path);
let (resolved_path, entry) = self.find_entry_inner(&dest, seen)?;
let (resolved_path, entry) =
self.find_entry_inner(&dest, seen, case_sensitivity)?;
final_path = resolved_path; // overwrite with the new resolved path
match entry {
VfsEntryRef::Dir(dir) => {
@ -1126,7 +1195,7 @@ impl VfsRoot {
let component = component.to_string_lossy();
current_entry = current_dir
.entries
.get_by_name(&component)
.get_by_name(&component, case_sensitivity)
.ok_or_else(|| {
std::io::Error::new(std::io::ErrorKind::NotFound, "path not found")
})?
@ -1392,13 +1461,19 @@ impl FileBackedVfsMetadata {
pub struct FileBackedVfs {
vfs_data: Cow<'static, [u8]>,
fs_root: VfsRoot,
case_sensitivity: FileSystemCaseSensitivity,
}
impl FileBackedVfs {
pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self {
pub fn new(
data: Cow<'static, [u8]>,
fs_root: VfsRoot,
case_sensitivity: FileSystemCaseSensitivity,
) -> Self {
Self {
vfs_data: data,
fs_root,
case_sensitivity,
}
}
@ -1451,7 +1526,9 @@ impl FileBackedVfs {
}
pub fn read_link(&self, path: &Path) -> std::io::Result<PathBuf> {
let (_, entry) = self.fs_root.find_entry_no_follow(path)?;
let (_, entry) = self
.fs_root
.find_entry_no_follow(path, self.case_sensitivity)?;
match entry {
VfsEntryRef::Symlink(symlink) => {
Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path))
@ -1464,17 +1541,19 @@ impl FileBackedVfs {
}
pub fn lstat(&self, path: &Path) -> std::io::Result<FileBackedVfsMetadata> {
let (_, entry) = self.fs_root.find_entry_no_follow(path)?;
let (_, entry) = self
.fs_root
.find_entry_no_follow(path, self.case_sensitivity)?;
Ok(entry.as_metadata())
}
pub fn stat(&self, path: &Path) -> std::io::Result<FileBackedVfsMetadata> {
let (_, entry) = self.fs_root.find_entry(path)?;
let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?;
Ok(entry.as_metadata())
}
pub fn canonicalize(&self, path: &Path) -> std::io::Result<PathBuf> {
let (path, _) = self.fs_root.find_entry(path)?;
let (path, _) = self.fs_root.find_entry(path, self.case_sensitivity)?;
Ok(path)
}
@ -1536,7 +1615,7 @@ impl FileBackedVfs {
}
pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> {
let (_, entry) = self.fs_root.find_entry(path)?;
let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?;
match entry {
VfsEntryRef::Dir(dir) => Ok(dir),
VfsEntryRef::Symlink(_) => unreachable!(),
@ -1548,7 +1627,7 @@ impl FileBackedVfs {
}
pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> {
let (_, entry) = self.fs_root.find_entry(path)?;
let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?;
match entry {
VfsEntryRef::Dir(_) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
@ -1754,6 +1833,7 @@ mod test {
root_path: dest_path.to_path_buf(),
start_file_offset: 0,
},
FileSystemCaseSensitivity::Sensitive,
),
)
}

View file

@ -7,8 +7,9 @@ use std::sync::Arc;
use std::time::Duration;
use deno_config::glob::WalkEntry;
use deno_core::error::generic_error;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::JsError;
use deno_core::futures::future;
use deno_core::futures::stream;
@ -20,6 +21,7 @@ use deno_core::unsync::spawn_blocking;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::PollEventLoopOptions;
use deno_error::JsErrorBox;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util::create_and_run_current_thread;
@ -164,17 +166,14 @@ async fn bench_specifier(
.await
{
Ok(()) => Ok(()),
Err(error) => {
if error.is::<JsError>() {
sender.send(BenchEvent::UncaughtError(
specifier.to_string(),
Box::new(error.downcast::<JsError>().unwrap()),
))?;
Ok(())
} else {
Err(error)
}
Err(CoreError::Js(error)) => {
sender.send(BenchEvent::UncaughtError(
specifier.to_string(),
Box::new(error),
))?;
Ok(())
}
Err(e) => Err(e.into()),
}
}
@ -185,7 +184,7 @@ async fn bench_specifier_inner(
specifier: ModuleSpecifier,
sender: &UnboundedSender<BenchEvent>,
filter: TestFilter,
) -> Result<(), AnyError> {
) -> Result<(), CoreError> {
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Bench,
@ -232,14 +231,18 @@ async fn bench_specifier_inner(
.partial_cmp(&groups.get_index_of(&d2.group).unwrap())
.unwrap()
});
sender.send(BenchEvent::Plan(BenchPlan {
origin: specifier.to_string(),
total: benchmarks.len(),
used_only,
names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(),
}))?;
sender
.send(BenchEvent::Plan(BenchPlan {
origin: specifier.to_string(),
total: benchmarks.len(),
used_only,
names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(),
}))
.map_err(JsErrorBox::from_err)?;
for (desc, function) in benchmarks {
sender.send(BenchEvent::Wait(desc.id))?;
sender
.send(BenchEvent::Wait(desc.id))
.map_err(JsErrorBox::from_err)?;
let call = worker.js_runtime.call(&function);
let result = worker
.js_runtime
@ -247,8 +250,11 @@ async fn bench_specifier_inner(
.await?;
let scope = &mut worker.js_runtime.handle_scope();
let result = v8::Local::new(scope, result);
let result = serde_v8::from_v8::<BenchResult>(scope, result)?;
sender.send(BenchEvent::Result(desc.id, result))?;
let result = serde_v8::from_v8::<BenchResult>(scope, result)
.map_err(JsErrorBox::from_err)?;
sender
.send(BenchEvent::Result(desc.id, result))
.map_err(JsErrorBox::from_err)?;
}
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
@ -380,13 +386,13 @@ async fn bench_specifiers(
reporter.report_end(&report);
if used_only {
return Err(generic_error(
return Err(anyhow!(
"Bench failed because the \"only\" option was used",
));
}
if report.failed > 0 {
return Err(generic_error("Bench failed"));
return Err(anyhow!("Bench failed"));
}
Ok(())
@ -466,7 +472,7 @@ pub async fn run_benchmarks(
)
.await?;
if !workspace_files_factory.found_specifiers() {
return Err(generic_error("No test modules found"));
return Err(anyhow!("No test modules found"));
}
workspace_files_factory.check().await?;

View file

@ -2,14 +2,14 @@
use std::collections::HashSet;
use std::collections::VecDeque;
use std::error::Error;
use std::fmt;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_config::deno_json;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_error::JsErrorBox;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
@ -37,6 +37,7 @@ use crate::graph_util::maybe_additional_sloppy_imports_message;
use crate::graph_util::BuildFastCheckGraphOptions;
use crate::graph_util::ModuleGraphBuilder;
use crate::node::CliNodeResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
use crate::tsc;
@ -106,46 +107,47 @@ pub struct CheckOptions {
pub type_check_mode: TypeCheckMode,
}
#[derive(Debug)]
pub enum MaybeDiagnostics {
Diagnostics(Diagnostics),
Other(AnyError),
}
impl fmt::Display for MaybeDiagnostics {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
MaybeDiagnostics::Diagnostics(d) => d.fmt(f),
MaybeDiagnostics::Other(err) => err.fmt(f),
}
}
}
impl Error for MaybeDiagnostics {}
impl From<AnyError> for MaybeDiagnostics {
fn from(err: AnyError) -> Self {
MaybeDiagnostics::Other(err)
}
}
pub struct TypeChecker {
caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>,
npm_installer: Option<Arc<NpmInstaller>>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CheckError {
#[class(inherit)]
#[error(transparent)]
Diagnostics(#[from] Diagnostics),
#[class(inherit)]
#[error(transparent)]
ConfigFile(#[from] deno_json::ConfigFileError),
#[class(inherit)]
#[error(transparent)]
ToMaybeJsxImportSourceConfig(
#[from] deno_json::ToMaybeJsxImportSourceConfigError,
),
#[class(inherit)]
#[error(transparent)]
TscExec(#[from] tsc::ExecError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
impl TypeChecker {
#[allow(clippy::too_many_arguments)]
pub fn new(
caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
) -> Self {
@ -155,6 +157,7 @@ impl TypeChecker {
cli_options,
module_graph_builder,
node_resolver,
npm_installer,
npm_resolver,
sys,
}
@ -168,14 +171,14 @@ impl TypeChecker {
&self,
graph: ModuleGraph,
options: CheckOptions,
) -> Result<Arc<ModuleGraph>, MaybeDiagnostics> {
) -> Result<Arc<ModuleGraph>, CheckError> {
let (graph, mut diagnostics) =
self.check_diagnostics(graph, options).await?;
diagnostics.emit_warnings();
if diagnostics.is_empty() {
Ok(graph)
} else {
Err(MaybeDiagnostics::Diagnostics(diagnostics))
Err(diagnostics.into())
}
}
@ -187,7 +190,7 @@ impl TypeChecker {
&self,
mut graph: ModuleGraph,
options: CheckOptions,
) -> Result<(Arc<ModuleGraph>, Diagnostics), AnyError> {
) -> Result<(Arc<ModuleGraph>, Diagnostics), CheckError> {
if !options.type_check_mode.is_true() || graph.roots.is_empty() {
return Ok((graph.into(), Default::default()));
}
@ -195,9 +198,9 @@ impl TypeChecker {
// node built-in specifiers use the @types/node package to determine
// types, so inject that now (the caller should do this after the lockfile
// has been written)
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
if let Some(npm_installer) = &self.npm_installer {
if graph.has_node_specifier {
npm_resolver.inject_synthetic_types_node_package().await?;
npm_installer.inject_synthetic_types_node_package().await?;
}
}

View file

@ -8,9 +8,9 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
@ -330,7 +330,7 @@ async fn resolve_compile_executable_output_path(
.map(PathBuf::from)
}
output_path.ok_or_else(|| generic_error(
output_path.ok_or_else(|| anyhow!(
"An executable name was not provided. One could not be inferred from the URL. Aborting.",
)).map(|output_path| {
get_os_specific_filepath(output_path, &compile_flags.target)

View file

@ -17,7 +17,6 @@ use deno_config::glob::PathOrPattern;
use deno_config::glob::PathOrPatternSet;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::sourcemap::SourceMap;
@ -430,7 +429,7 @@ fn collect_coverages(
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), file_patterns)?;
.collect_file_patterns(&CliSys::default(), file_patterns);
let coverage_patterns = FilePatterns {
base: initial_cwd.to_path_buf(),
@ -505,7 +504,7 @@ pub fn cover_files(
coverage_flags: CoverageFlags,
) -> Result<(), AnyError> {
if coverage_flags.files.include.is_empty() {
return Err(generic_error("No matching coverage profiles found"));
return Err(anyhow!("No matching coverage profiles found"));
}
let factory = CliFactory::from_flags(flags);
@ -527,7 +526,7 @@ pub fn cover_files(
cli_options.initial_cwd(),
)?;
if script_coverages.is_empty() {
return Err(generic_error("No coverage files found"));
return Err(anyhow!("No coverage files found"));
}
let script_coverages = filter_coverages(
script_coverages,
@ -536,7 +535,7 @@ pub fn cover_files(
in_npm_pkg_checker.as_ref(),
);
if script_coverages.is_empty() {
return Err(generic_error("No covered files included in the report"));
return Err(anyhow!("No covered files included in the report"));
}
let proc_coverages: Vec<_> = script_coverages

View file

@ -62,10 +62,11 @@ async fn generate_doc_nodes_for_builtin_types(
)],
Vec::new(),
);
let roots = vec![source_file_specifier.clone()];
let mut graph = deno_graph::ModuleGraph::new(GraphKind::TypesOnly);
graph
.build(
vec![source_file_specifier.clone()],
roots.clone(),
&loader,
deno_graph::BuildOptions {
imports: Vec::new(),
@ -85,14 +86,13 @@ async fn generate_doc_nodes_for_builtin_types(
let doc_parser = doc::DocParser::new(
&graph,
parser,
&roots,
doc::DocParserOptions {
diagnostics: false,
private: doc_flags.private,
},
)?;
let nodes = doc_parser.parse_module(&source_file_specifier)?.definitions;
Ok(IndexMap::from([(source_file_specifier, nodes)]))
Ok(doc_parser.parse()?)
}
pub async fn doc(
@ -158,19 +158,13 @@ pub async fn doc(
let doc_parser = doc::DocParser::new(
&graph,
&capturing_parser,
&module_specifiers,
doc::DocParserOptions {
private: doc_flags.private,
diagnostics: doc_flags.lint,
},
)?;
let mut doc_nodes_by_url =
IndexMap::with_capacity(module_specifiers.len());
for module_specifier in module_specifiers {
let nodes = doc_parser.parse_with_reexports(&module_specifier)?;
doc_nodes_by_url.insert(module_specifier, nodes);
}
let doc_nodes_by_url = doc_parser.parse()?;
if doc_flags.lint {
let diagnostics = doc_parser.take_diagnostics();
@ -191,29 +185,9 @@ pub async fn doc(
.await?;
let (_, deno_ns) = deno_ns.into_iter().next().unwrap();
let short_path = Rc::new(ShortPath::new(
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(),
None,
None,
None,
));
deno_doc::html::compute_namespaced_symbols(
&deno_ns
.into_iter()
.map(|node| deno_doc::html::DocNodeWithContext {
origin: short_path.clone(),
ns_qualifiers: Rc::new([]),
kind_with_drilldown:
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind()),
inner: Rc::new(node),
drilldown_name: None,
parent: None,
})
.collect::<Vec<_>>(),
)
Some(deno_ns)
} else {
Default::default()
None
};
let mut main_entrypoint = None;
@ -393,7 +367,7 @@ impl UsageComposer for DocComposer {
fn generate_docs_directory(
doc_nodes_by_url: IndexMap<ModuleSpecifier, Vec<doc::DocNode>>,
html_options: &DocHtmlFlag,
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
built_in_types: Option<Vec<doc::DocNode>>,
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
main_entrypoint: Option<ModuleSpecifier>,
) -> Result<(), AnyError> {
@ -426,12 +400,12 @@ fn generate_docs_directory(
None
};
let options = deno_doc::html::GenerateOptions {
let mut options = deno_doc::html::GenerateOptions {
package_name: html_options.name.clone(),
main_entrypoint,
rewrite_map,
href_resolver: Rc::new(DocResolver {
deno_ns,
deno_ns: Default::default(),
strip_trailing_html: html_options.strip_trailing_html,
}),
usage_composer: Rc::new(DocComposer),
@ -451,7 +425,58 @@ fn generate_docs_directory(
})),
};
let mut files = deno_doc::html::generate(options, doc_nodes_by_url)
if let Some(built_in_types) = built_in_types {
let ctx = deno_doc::html::GenerateCtx::create_basic(
deno_doc::html::GenerateOptions {
package_name: None,
main_entrypoint: Some(
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(),
),
href_resolver: Rc::new(DocResolver {
deno_ns: Default::default(),
strip_trailing_html: false,
}),
usage_composer: Rc::new(DocComposer),
rewrite_map: Default::default(),
category_docs: Default::default(),
disable_search: Default::default(),
symbol_redirect_map: Default::default(),
default_symbol_map: Default::default(),
markdown_renderer: deno_doc::html::comrak::create_renderer(
None, None, None,
),
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
head_inject: None,
},
IndexMap::from([(
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(),
built_in_types,
)]),
)?;
let deno_ns = deno_doc::html::compute_namespaced_symbols(
&ctx,
Box::new(
ctx
.doc_nodes
.values()
.next()
.unwrap()
.iter()
.map(std::borrow::Cow::Borrowed),
),
);
options.href_resolver = Rc::new(DocResolver {
deno_ns,
strip_trailing_html: html_options.strip_trailing_html,
});
}
let ctx =
deno_doc::html::GenerateCtx::create_basic(options, doc_nodes_by_url)?;
let mut files = deno_doc::html::generate(ctx)
.context("Failed to generate HTML documentation")?;
files.insert("prism.js".to_string(), PRISM_JS.to_string());

View file

@ -26,7 +26,6 @@ use deno_config::glob::FilePatterns;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
@ -167,7 +166,7 @@ fn resolve_paths_with_options_batches(
Vec::with_capacity(members_fmt_options.len());
for (_ctx, member_fmt_options) in members_fmt_options {
let files =
collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
collect_fmt_files(cli_options, member_fmt_options.files.clone());
if !files.is_empty() {
paths_with_options_batches.push(PathsWithOptions {
base: member_fmt_options.files.base.clone(),
@ -177,7 +176,7 @@ fn resolve_paths_with_options_batches(
}
}
if paths_with_options_batches.is_empty() {
return Err(generic_error("No target files found."));
return Err(anyhow!("No target files found."));
}
Ok(paths_with_options_batches)
}
@ -224,7 +223,7 @@ async fn format_files(
fn collect_fmt_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
) -> Vec<PathBuf> {
FileCollector::new(|e| {
is_supported_ext_fmt(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
@ -484,7 +483,7 @@ pub fn format_html(
}
if let Some(error_msg) = inner(&error, file_path) {
AnyError::from(generic_error(error_msg))
AnyError::msg(error_msg)
} else {
AnyError::from(error)
}
@ -732,9 +731,9 @@ impl Formatter for CheckFormatter {
Ok(())
} else {
let not_formatted_files_str = files_str(not_formatted_files_count);
Err(generic_error(format!(
Err(anyhow!(
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
)))
))
}
}
}

View file

@ -11,6 +11,7 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::serde_json;
use deno_core::url;
use deno_error::JsErrorClass;
use deno_graph::Dependency;
use deno_graph::GraphKind;
use deno_graph::Module;
@ -385,8 +386,11 @@ impl NpmInfo {
npm_snapshot: &'a NpmResolutionSnapshot,
) {
self.packages.insert(package.id.clone(), package.clone());
if let Ok(size) = npm_resolver.package_size(&package.id) {
self.package_sizes.insert(package.id.clone(), size);
if let Ok(folder) = npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)
{
if let Ok(size) = crate::util::fs::dir_size(&folder) {
self.package_sizes.insert(package.id.clone(), size);
}
}
for id in package.dependencies.values() {
if !self.packages.contains_key(id) {
@ -664,9 +668,10 @@ impl<'a> GraphDisplayContext<'a> {
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)",
Loader(err) => {
match deno_runtime::errors::get_error_class_name(err) {
Some("NotCapable") => "(not capable, requires --allow-import)",
_ => "(loading error)",
if err.get_class() == "NotCapable" {
"(not capable, requires --allow-import)"
} else {
"(loading error)"
}
}
Jsr(_) => "(loading error)",

View file

@ -12,9 +12,9 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
@ -54,9 +54,7 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> {
if EXEC_NAME_RE.is_match(exec_name) {
Ok(())
} else {
Err(generic_error(format!(
"Invalid executable name: {exec_name}"
)))
Err(anyhow!("Invalid executable name: {exec_name}"))
}
}
@ -223,7 +221,7 @@ pub async fn uninstall(
// ensure directory exists
if let Ok(metadata) = fs::metadata(&installation_dir) {
if !metadata.is_dir() {
return Err(generic_error("Installation path is not a directory"));
return Err(anyhow!("Installation path is not a directory"));
}
}
@ -247,10 +245,10 @@ pub async fn uninstall(
}
if !removed {
return Err(generic_error(format!(
return Err(anyhow!(
"No installation found for {}",
uninstall_flags.name
)));
));
}
// There might be some extra files to delete
@ -302,8 +300,8 @@ async fn install_local(
InstallFlagsLocal::TopLevel => {
let factory = CliFactory::from_flags(flags);
// surface any errors in the package.json
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
npm_resolver.ensure_no_pkg_json_dep_errors()?;
if let Some(npm_installer) = factory.npm_installer_if_managed()? {
npm_installer.ensure_no_pkg_json_dep_errors()?;
}
crate::tools::registry::cache_top_level_deps(&factory, None).await?;
@ -423,14 +421,14 @@ async fn create_install_shim(
// ensure directory exists
if let Ok(metadata) = fs::metadata(&shim_data.installation_dir) {
if !metadata.is_dir() {
return Err(generic_error("Installation path is not a directory"));
return Err(anyhow!("Installation path is not a directory"));
}
} else {
fs::create_dir_all(&shim_data.installation_dir)?;
};
if shim_data.file_path.exists() && !install_flags_global.force {
return Err(generic_error(
return Err(anyhow!(
"Existing installation found. Aborting (Use -f to overwrite).",
));
};
@ -492,7 +490,7 @@ async fn resolve_shim_data(
let name = match name {
Some(name) => name,
None => return Err(generic_error(
None => return Err(anyhow!(
"An executable name was not provided. One could not be inferred from the URL. Aborting.",
)),
};
@ -524,9 +522,7 @@ async fn resolve_shim_data(
let log_level = match log_level {
Level::Debug => "debug",
Level::Info => "info",
_ => {
return Err(generic_error(format!("invalid log level {log_level}")))
}
_ => return Err(anyhow!(format!("invalid log level {log_level}"))),
};
executable_args.push(log_level.to_string());
}

View file

@ -2,9 +2,9 @@
use std::sync::Arc;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::located_script_name;
@ -67,7 +67,7 @@ pub async fn kernel(
// TODO(bartlomieju): should we run with all permissions?
let permissions =
PermissionsContainer::allow_all(factory.permission_desc_parser()?.clone());
let npm_resolver = factory.npm_resolver().await?.clone();
let npm_installer = factory.npm_installer_if_managed()?.cloned();
let resolver = factory.resolver().await?.clone();
let worker_factory = factory.create_cli_main_worker_factory().await?;
let (stdio_tx, stdio_rx) = mpsc::unbounded_channel();
@ -115,7 +115,7 @@ pub async fn kernel(
let worker = worker.into_main_worker();
let mut repl_session = repl::ReplSession::initialize(
cli_options,
npm_resolver,
npm_installer,
resolver,
worker,
main_module,
@ -137,10 +137,10 @@ pub async fn kernel(
}
let cwd_url =
Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| {
generic_error(format!(
anyhow!(
"Unable to construct URL from the path of cwd: {}",
cli_options.initial_cwd().to_string_lossy(),
))
)
})?;
repl_session.set_test_reporter_factory(Box::new(move || {
Box::new(

View file

@ -18,7 +18,6 @@ use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
use deno_config::workspace::WorkspaceDirectory;
use deno_core::anyhow::anyhow;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
@ -77,9 +76,7 @@ pub async fn lint(
) -> Result<(), AnyError> {
if lint_flags.watch.is_some() {
if lint_flags.is_stdin() {
return Err(generic_error(
"Lint watch on standard input is not supported.",
));
return Err(anyhow!("Lint watch on standard input is not supported.",));
}
return lint_with_watch(flags, lint_flags).await;
@ -223,7 +220,7 @@ fn resolve_paths_with_options_batches(
let mut paths_with_options_batches =
Vec::with_capacity(members_lint_options.len());
for (dir, lint_options) in members_lint_options {
let files = collect_lint_files(cli_options, lint_options.files.clone())?;
let files = collect_lint_files(cli_options, lint_options.files.clone());
if !files.is_empty() {
paths_with_options_batches.push(PathsWithOptions {
dir,
@ -233,7 +230,7 @@ fn resolve_paths_with_options_batches(
}
}
if paths_with_options_batches.is_empty() {
return Err(generic_error("No target files found."));
return Err(anyhow!("No target files found."));
}
Ok(paths_with_options_batches)
}
@ -446,7 +443,7 @@ impl WorkspaceLinter {
fn collect_lint_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
) -> Vec<PathBuf> {
FileCollector::new(|e| {
is_script_ext(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
@ -534,7 +531,7 @@ fn lint_stdin(
}
let mut source_code = String::new();
if stdin().read_to_string(&mut source_code).is_err() {
return Err(generic_error("Failed to read from stdin"));
return Err(anyhow!("Failed to read from stdin"));
}
let linter = CliLinter::new(CliLinterOptions {

View file

@ -7,7 +7,7 @@ use std::sync::Arc;
use deno_ast::SourceRange;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_error::JsErrorBox;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_graph::Range;
@ -187,7 +187,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
let resolution = self
.workspace_resolver
.resolve(specifier_text, &referrer_range.specifier)
.map_err(|err| ResolveError::Other(err.into()))?;
.map_err(|err| ResolveError::Other(JsErrorBox::from_err(err)))?;
match resolution {
deno_config::workspace::MappedResolution::Normal {
@ -220,7 +220,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
}
| deno_config::workspace::MappedResolution::PackageJson { .. } => {
// this error is ignored
Err(ResolveError::Other(anyhow!("")))
Err(ResolveError::Other(JsErrorBox::generic("")))
}
}
}

View file

@ -233,7 +233,7 @@ pub fn collect_publish_paths(
) -> Result<Vec<CollectedPublishPath>, AnyError> {
let diagnostics_collector = opts.diagnostics_collector;
let publish_paths =
collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns)?;
collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns);
let publish_paths_set = publish_paths.iter().cloned().collect::<HashSet<_>>();
let capacity = publish_paths.len() + opts.force_include_paths.len();
let mut paths = HashSet::with_capacity(capacity);
@ -321,7 +321,7 @@ fn collect_paths(
cli_options: &CliOptions,
diagnostics_collector: &PublishDiagnosticsCollector,
file_patterns: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
) -> Vec<PathBuf> {
FileCollector::new(|e| {
if !e.metadata.file_type().is_file() {
if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {

View file

@ -861,10 +861,8 @@ async fn npm_install_after_modification(
// make a new CliFactory to pick up the updated config file
let cli_factory = CliFactory::from_flags(flags);
// surface any errors in the package.json
let npm_resolver = cli_factory.npm_resolver().await?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
npm_resolver.ensure_no_pkg_json_dep_errors()?;
}
let npm_installer = cli_factory.npm_installer()?;
npm_installer.ensure_no_pkg_json_dep_errors()?;
// npm install
cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?;

View file

@ -12,16 +12,19 @@ use crate::factory::CliFactory;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use crate::npm::installer::PackageCaching;
pub async fn cache_top_level_deps(
// todo(dsherret): don't pass the factory into this function. Instead use ctor deps
factory: &CliFactory,
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?;
let npm_installer = factory.npm_installer_if_managed()?;
let cli_options = factory.cli_options()?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
if let Some(npm_installer) = &npm_installer {
npm_installer
.ensure_top_level_package_json_install()
.await?;
if let Some(lockfile) = cli_options.maybe_lockfile() {
lockfile.error_if_changed()?;
}
@ -138,10 +141,8 @@ pub async fn cache_top_level_deps(
maybe_graph_error = graph_builder.graph_roots_valid(graph, &roots);
}
if let Some(npm_resolver) = npm_resolver.as_managed() {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
if let Some(npm_installer) = &npm_installer {
npm_installer.cache_packages(PackageCaching::All).await?;
}
maybe_graph_error?;

View file

@ -42,6 +42,7 @@ use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::jsr::JsrFetchResolver;
use crate::module_loader::ModuleLoadPreparer;
use crate::npm::installer::NpmInstaller;
use crate::npm::CliNpmResolver;
use crate::npm::NpmFetchResolver;
use crate::util::sync::AtomicFlag;
@ -451,6 +452,7 @@ pub struct DepManager {
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub(crate) npm_fetch_resolver: Arc<NpmFetchResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Arc<NpmInstaller>,
permissions_container: PermissionsContainer,
main_module_graph_container: Arc<MainModuleGraphContainer>,
lockfile: Option<Arc<CliLockfile>>,
@ -460,6 +462,7 @@ pub struct DepManagerArgs {
pub module_load_preparer: Arc<ModuleLoadPreparer>,
pub jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub npm_fetch_resolver: Arc<NpmFetchResolver>,
pub npm_installer: Arc<NpmInstaller>,
pub npm_resolver: Arc<dyn CliNpmResolver>,
pub permissions_container: PermissionsContainer,
pub main_module_graph_container: Arc<MainModuleGraphContainer>,
@ -477,6 +480,7 @@ impl DepManager {
module_load_preparer,
jsr_fetch_resolver,
npm_fetch_resolver,
npm_installer,
npm_resolver,
permissions_container,
main_module_graph_container,
@ -490,6 +494,7 @@ impl DepManager {
dependencies_resolved: AtomicFlag::lowered(),
module_load_preparer,
npm_fetch_resolver,
npm_installer,
npm_resolver,
permissions_container,
main_module_graph_container,
@ -556,7 +561,10 @@ impl DepManager {
return Ok(());
}
npm_resolver.ensure_top_level_package_json_install().await?;
self
.npm_installer
.ensure_top_level_package_json_install()
.await?;
let mut roots = Vec::new();
let mut info_futures = FuturesUnordered::new();
for dep in &self.deps {

View file

@ -445,6 +445,7 @@ async fn dep_manager_args(
jsr_fetch_resolver,
npm_fetch_resolver,
npm_resolver: factory.npm_resolver().await?.clone(),
npm_installer: factory.npm_installer()?.clone(),
permissions_container: factory.root_permissions_container()?.clone(),
main_module_graph_container: factory
.main_module_graph_container()

View file

@ -660,6 +660,7 @@ mod tests {
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_runtime::deno_node::PackageJson;
use deno_semver::Version;
use import_map::ImportMapWithDiagnostics;
@ -668,7 +669,6 @@ mod tests {
use test_util::testdata_path;
use super::*;
use crate::resolver::SloppyImportsCachedFs;
use crate::sys::CliSys;
fn parse_ast(specifier: &Url, source_code: &str) -> ParsedSource {

View file

@ -4,8 +4,10 @@ use std::cell::RefCell;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_error::JsErrorBox;
use tokio::sync::mpsc::channel;
use tokio::sync::mpsc::unbounded_channel;
use tokio::sync::mpsc::Receiver;
@ -47,7 +49,7 @@ pub enum RustylineSyncMessage {
}
pub enum RustylineSyncResponse {
PostMessage(Result<Value, AnyError>),
PostMessage(Result<Value, CoreError>),
LspCompletions(Vec<ReplCompletionItem>),
}
@ -61,7 +63,7 @@ impl RustylineSyncMessageSender {
&self,
method: &str,
params: Option<T>,
) -> Result<Value, AnyError> {
) -> Result<Value, CoreError> {
if let Err(err) =
self
.message_tx
@ -69,10 +71,11 @@ impl RustylineSyncMessageSender {
method: method.to_string(),
params: params
.map(|params| serde_json::to_value(params))
.transpose()?,
.transpose()
.map_err(JsErrorBox::from_err)?,
})
{
Err(anyhow!("{}", err))
Err(JsErrorBox::from_err(err).into())
} else {
match self.response_rx.borrow_mut().blocking_recv().unwrap() {
RustylineSyncResponse::PostMessage(result) => result,

View file

@ -164,7 +164,7 @@ pub async fn run(
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
let permissions = factory.root_permissions_container()?;
let npm_resolver = factory.npm_resolver().await?.clone();
let npm_installer = factory.npm_installer_if_managed()?.cloned();
let resolver = factory.resolver().await?.clone();
let file_fetcher = factory.file_fetcher()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
@ -187,7 +187,7 @@ pub async fn run(
let worker = worker.into_main_worker();
let session = ReplSession::initialize(
cli_options,
npm_resolver,
npm_installer,
resolver,
worker,
main_module.clone(),

View file

@ -16,8 +16,9 @@ use deno_ast::ParsedSource;
use deno_ast::SourcePos;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_core::error::generic_error;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::channel::mpsc::UnboundedReceiver;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
@ -27,6 +28,7 @@ use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_core::PollEventLoopOptions;
use deno_error::JsErrorBox;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
@ -43,7 +45,7 @@ use crate::args::CliOptions;
use crate::cdp;
use crate::colors;
use crate::lsp::ReplLanguageServer;
use crate::npm::CliNpmResolver;
use crate::npm::installer::NpmInstaller;
use crate::resolver::CliResolver;
use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter;
@ -179,7 +181,7 @@ struct ReplJsxState {
}
pub struct ReplSession {
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
resolver: Arc<CliResolver>,
pub worker: MainWorker,
session: LocalInspectorSession,
@ -198,7 +200,7 @@ pub struct ReplSession {
impl ReplSession {
pub async fn initialize(
cli_options: &CliOptions,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
resolver: Arc<CliResolver>,
mut worker: MainWorker,
main_module: ModuleSpecifier,
@ -250,10 +252,10 @@ impl ReplSession {
let cwd_url =
Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| {
generic_error(format!(
anyhow!(
"Unable to construct URL from the path of cwd: {}",
cli_options.initial_cwd().to_string_lossy(),
))
)
})?;
let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
@ -263,7 +265,7 @@ impl ReplSession {
)?;
let experimental_decorators = transpile_options.use_ts_decorators;
let mut repl_session = ReplSession {
npm_resolver,
npm_installer,
resolver,
worker,
session,
@ -322,7 +324,7 @@ impl ReplSession {
&mut self,
method: &str,
params: Option<T>,
) -> Result<Value, AnyError> {
) -> Result<Value, CoreError> {
self
.worker
.js_runtime
@ -339,7 +341,7 @@ impl ReplSession {
.await
}
pub async fn run_event_loop(&mut self) -> Result<(), AnyError> {
pub async fn run_event_loop(&mut self) -> Result<(), CoreError> {
self.worker.run_event_loop(true).await
}
@ -400,21 +402,29 @@ impl ReplSession {
}
Err(err) => {
// handle a parsing diagnostic
match err.downcast_ref::<deno_ast::ParseDiagnostic>() {
match crate::util::result::any_and_jserrorbox_downcast_ref::<
deno_ast::ParseDiagnostic,
>(&err)
{
Some(diagnostic) => {
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
}
None => match err.downcast_ref::<ParseDiagnosticsError>() {
Some(diagnostics) => Ok(EvaluationOutput::Error(
diagnostics
.0
.iter()
.map(format_diagnostic)
.collect::<Vec<_>>()
.join("\n\n"),
)),
None => Err(err),
},
None => {
match crate::util::result::any_and_jserrorbox_downcast_ref::<
ParseDiagnosticsError,
>(&err)
{
Some(diagnostics) => Ok(EvaluationOutput::Error(
diagnostics
.0
.iter()
.map(format_diagnostic)
.collect::<Vec<_>>()
.join("\n\n"),
)),
None => Err(err),
}
}
}
}
}
@ -694,8 +704,8 @@ impl ReplSession {
&mut self,
program: &swc_ast::Program,
) -> Result<(), AnyError> {
let Some(npm_resolver) = self.npm_resolver.as_managed() else {
return Ok(()); // don't auto-install for byonm
let Some(npm_installer) = &self.npm_installer else {
return Ok(());
};
let mut collector = ImportCollector::new();
@ -727,13 +737,13 @@ impl ReplSession {
let has_node_specifier =
resolved_imports.iter().any(|url| url.scheme() == "node");
if !npm_imports.is_empty() || has_node_specifier {
npm_resolver
npm_installer
.add_and_cache_package_reqs(&npm_imports)
.await?;
// prevent messages in the repl about @types/node not being cached
if has_node_specifier {
npm_resolver.inject_synthetic_types_node_package().await?;
npm_installer.inject_synthetic_types_node_package().await?;
}
}
Ok(())
@ -742,7 +752,7 @@ impl ReplSession {
async fn evaluate_expression(
&mut self,
expression: &str,
) -> Result<cdp::EvaluateResponse, AnyError> {
) -> Result<cdp::EvaluateResponse, CoreError> {
self
.post_message_with_event_loop(
"Runtime.evaluate",
@ -765,7 +775,9 @@ impl ReplSession {
}),
)
.await
.and_then(|res| serde_json::from_value(res).map_err(|e| e.into()))
.and_then(|res| {
serde_json::from_value(res).map_err(|e| JsErrorBox::from_err(e).into())
})
}
}

View file

@ -4,13 +4,13 @@ use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::StreamExt;
use deno_core::serde_json::json;
use deno_core::serde_json::{self};
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_error::JsErrorBox;
use deno_terminal::colors;
use tokio::select;
@ -66,19 +66,19 @@ pub struct HmrRunner {
#[async_trait::async_trait(?Send)]
impl crate::worker::HmrRunner for HmrRunner {
// TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs`
async fn start(&mut self) -> Result<(), AnyError> {
async fn start(&mut self) -> Result<(), CoreError> {
self.enable_debugger().await
}
// TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs`
async fn stop(&mut self) -> Result<(), AnyError> {
async fn stop(&mut self) -> Result<(), CoreError> {
self
.watcher_communicator
.change_restart_mode(WatcherRestartMode::Automatic);
self.disable_debugger().await
}
async fn run(&mut self) -> Result<(), AnyError> {
async fn run(&mut self) -> Result<(), CoreError> {
self
.watcher_communicator
.change_restart_mode(WatcherRestartMode::Manual);
@ -87,13 +87,13 @@ impl crate::worker::HmrRunner for HmrRunner {
select! {
biased;
Some(notification) = session_rx.next() => {
let notification = serde_json::from_value::<cdp::Notification>(notification)?;
let notification = serde_json::from_value::<cdp::Notification>(notification).map_err(JsErrorBox::from_err)?;
if notification.method == "Runtime.exceptionThrown" {
let exception_thrown = serde_json::from_value::<cdp::ExceptionThrown>(notification.params)?;
let exception_thrown = serde_json::from_value::<cdp::ExceptionThrown>(notification.params).map_err(JsErrorBox::from_err)?;
let (message, description) = exception_thrown.exception_details.get_message_and_description();
break Err(generic_error(format!("{} {}", message, description)));
break Err(JsErrorBox::generic(format!("{} {}", message, description)).into());
} else if notification.method == "Debugger.scriptParsed" {
let params = serde_json::from_value::<cdp::ScriptParsed>(notification.params)?;
let params = serde_json::from_value::<cdp::ScriptParsed>(notification.params).map_err(JsErrorBox::from_err)?;
if params.url.starts_with("file://") {
let file_url = Url::parse(&params.url).unwrap();
let file_path = file_url.to_file_path().unwrap();
@ -105,7 +105,7 @@ impl crate::worker::HmrRunner for HmrRunner {
}
}
changed_paths = self.watcher_communicator.watch_for_changed_paths() => {
let changed_paths = changed_paths?;
let changed_paths = changed_paths.map_err(JsErrorBox::from_err)?;
let Some(changed_paths) = changed_paths else {
let _ = self.watcher_communicator.force_restart();
@ -187,7 +187,7 @@ impl HmrRunner {
}
// TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs`
async fn enable_debugger(&mut self) -> Result<(), AnyError> {
async fn enable_debugger(&mut self) -> Result<(), CoreError> {
self
.session
.post_message::<()>("Debugger.enable", None)
@ -200,7 +200,7 @@ impl HmrRunner {
}
// TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs`
async fn disable_debugger(&mut self) -> Result<(), AnyError> {
async fn disable_debugger(&mut self) -> Result<(), CoreError> {
self
.session
.post_message::<()>("Debugger.disable", None)
@ -216,7 +216,7 @@ impl HmrRunner {
&mut self,
script_id: &str,
source: &str,
) -> Result<cdp::SetScriptSourceResponse, AnyError> {
) -> Result<cdp::SetScriptSourceResponse, CoreError> {
let result = self
.session
.post_message(
@ -229,15 +229,16 @@ impl HmrRunner {
)
.await?;
Ok(serde_json::from_value::<cdp::SetScriptSourceResponse>(
result,
)?)
Ok(
serde_json::from_value::<cdp::SetScriptSourceResponse>(result)
.map_err(JsErrorBox::from_err)?,
)
}
async fn dispatch_hmr_event(
&mut self,
script_id: &str,
) -> Result<(), AnyError> {
) -> Result<(), CoreError> {
let expr = format!(
"dispatchEvent(new CustomEvent(\"hmr\", {{ detail: {{ path: \"{}\" }} }}));",
script_id

View file

@ -12,6 +12,7 @@ use crate::args::EvalFlags;
use crate::args::Flags;
use crate::args::WatchFlagsWithPaths;
use crate::factory::CliFactory;
use crate::npm::installer::PackageCaching;
use crate::util;
use crate::util::file_watcher::WatcherRestartMode;
@ -202,18 +203,17 @@ pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
// ensure an "npm install" is done if the user has explicitly
// opted into using a managed node_modules directory
if cli_options.node_modules_dir()? == Some(NodeModulesDirMode::Auto) {
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if let Some(npm_installer) = factory.npm_installer_if_managed()? {
let already_done = npm_installer
.ensure_top_level_package_json_install()
.await?;
if !already_done
&& matches!(
cli_options.default_npm_caching_strategy(),
crate::graph_util::NpmCachingStrategy::Eager
)
{
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
npm_installer.cache_packages(PackageCaching::All).await?;
}
}
}

View file

@ -73,7 +73,7 @@ async fn do_serve(
)
.await?;
let worker_count = match worker_count {
None | Some(1) => return worker.run().await,
None | Some(1) => return worker.run().await.map_err(Into::into),
Some(c) => c,
};
@ -133,7 +133,7 @@ async fn run_worker(
worker.run_for_watcher().await?;
Ok(0)
} else {
worker.run().await
worker.run().await.map_err(Into::into)
}
}

View file

@ -36,6 +36,8 @@ use crate::args::TaskFlags;
use crate::colors;
use crate::factory::CliFactory;
use crate::node::CliNodeResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::task_runner;
use crate::task_runner::run_future_forwarding_signals;
@ -203,6 +205,7 @@ pub async fn execute_script(
}]
};
let npm_installer = factory.npm_installer_if_managed()?;
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let env_vars = task_runner::real_env_vars();
@ -216,6 +219,7 @@ pub async fn execute_script(
let task_runner = TaskRunner {
task_flags: &task_flags,
npm_installer: npm_installer.map(|n| n.as_ref()),
npm_resolver: npm_resolver.as_ref(),
node_resolver: node_resolver.as_ref(),
env_vars,
@ -266,6 +270,7 @@ struct RunSingleOptions<'a> {
struct TaskRunner<'a> {
task_flags: &'a TaskFlags,
npm_installer: Option<&'a NpmInstaller>,
npm_resolver: &'a dyn CliNpmResolver,
node_resolver: &'a CliNodeResolver,
env_vars: HashMap<String, String>,
@ -458,11 +463,11 @@ impl<'a> TaskRunner<'a> {
return Ok(0);
};
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
if let Some(npm_installer) = self.npm_installer {
npm_installer
.ensure_top_level_package_json_install()
.await?;
npm_installer.cache_packages(PackageCaching::All).await?;
}
let cwd = match &self.task_flags.cwd {
@ -497,11 +502,11 @@ impl<'a> TaskRunner<'a> {
argv: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
// ensure the npm packages are installed if using a managed resolver
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
if let Some(npm_installer) = self.npm_installer {
npm_installer
.ensure_top_level_package_json_install()
.await?;
npm_installer.cache_packages(PackageCaching::All).await?;
}
let cwd = match &self.task_flags.cwd {

View file

@ -37,7 +37,8 @@ const HALF_SYNC_MARKER: &[u8; 4] = &[226, 128, 139, 0];
const BUFFER_SIZE: usize = 4096;
/// The test channel has been closed and cannot be used to send further messages.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, deno_error::JsError)]
#[class(generic)]
pub struct ChannelClosedError;
impl std::error::Error for ChannelClosedError {}

View file

@ -25,10 +25,9 @@ use deno_ast::MediaType;
use deno_config::glob::FilePatterns;
use deno_config::glob::WalkEntry;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context as _;
use deno_core::error::generic_error;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::JsError;
use deno_core::futures::future;
use deno_core::futures::stream;
@ -49,6 +48,7 @@ use deno_core::v8;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_error::JsErrorBox;
use deno_runtime::deno_io::Stdio;
use deno_runtime::deno_io::StdioPipe;
use deno_runtime::deno_permissions::Permissions;
@ -105,6 +105,8 @@ use reporters::PrettyTestReporter;
use reporters::TapTestReporter;
use reporters::TestReporter;
use crate::tools::test::channel::ChannelClosedError;
/// How many times we're allowed to spin the event loop before considering something a leak.
const MAX_SANITIZER_LOOP_SPINS: usize = 16;
@ -585,7 +587,7 @@ async fn configure_main_worker(
permissions_container: PermissionsContainer,
worker_sender: TestEventWorkerSender,
options: &TestSpecifierOptions,
) -> Result<(Option<Box<dyn CoverageCollector>>, MainWorker), anyhow::Error> {
) -> Result<(Option<Box<dyn CoverageCollector>>, MainWorker), CoreError> {
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Test,
@ -613,21 +615,15 @@ async fn configure_main_worker(
let mut worker = worker.into_main_worker();
match res {
Ok(()) => Ok(()),
Err(error) => {
// TODO(mmastrac): It would be nice to avoid having this error pattern repeated
if error.is::<JsError>() {
send_test_event(
&worker.js_runtime.op_state(),
TestEvent::UncaughtError(
specifier.to_string(),
Box::new(error.downcast::<JsError>().unwrap()),
),
)?;
Ok(())
} else {
Err(error)
}
Err(CoreError::Js(err)) => {
send_test_event(
&worker.js_runtime.op_state(),
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)),
)
.map_err(JsErrorBox::from_err)?;
Ok(())
}
Err(err) => Err(err),
}?;
Ok((coverage_collector, worker))
}
@ -664,21 +660,14 @@ pub async fn test_specifier(
.await
{
Ok(()) => Ok(()),
Err(error) => {
// TODO(mmastrac): It would be nice to avoid having this error pattern repeated
if error.is::<JsError>() {
send_test_event(
&worker.js_runtime.op_state(),
TestEvent::UncaughtError(
specifier.to_string(),
Box::new(error.downcast::<JsError>().unwrap()),
),
)?;
Ok(())
} else {
Err(error)
}
Err(CoreError::Js(err)) => {
send_test_event(
&worker.js_runtime.op_state(),
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)),
)?;
Ok(())
}
Err(e) => Err(e.into()),
}
}
@ -691,7 +680,7 @@ async fn test_specifier_inner(
specifier: ModuleSpecifier,
fail_fast_tracker: FailFastTracker,
options: TestSpecifierOptions,
) -> Result<(), AnyError> {
) -> Result<(), CoreError> {
// Ensure that there are no pending exceptions before we start running tests
worker.run_up_to_duration(Duration::from_millis(0)).await?;
@ -738,7 +727,7 @@ pub fn worker_has_tests(worker: &mut MainWorker) -> bool {
/// Yields to tokio to allow async work to process, and then polls
/// the event loop once.
#[must_use = "The event loop result should be checked"]
pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), AnyError> {
pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), CoreError> {
// Allow any ops that to do work in the tokio event loop to do so
tokio::task::yield_now().await;
// Spin the event loop once
@ -757,13 +746,11 @@ pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), AnyError> {
pub fn send_test_event(
op_state: &RefCell<OpState>,
event: TestEvent,
) -> Result<(), AnyError> {
Ok(
op_state
.borrow_mut()
.borrow_mut::<TestEventSender>()
.send(event)?,
)
) -> Result<(), ChannelClosedError> {
op_state
.borrow_mut()
.borrow_mut::<TestEventSender>()
.send(event)
}
pub async fn run_tests_for_worker(
@ -959,13 +946,10 @@ async fn run_tests_for_worker_inner(
let result = match result {
Ok(r) => r,
Err(error) => {
if error.is::<JsError>() {
if let CoreError::Js(js_error) = error {
send_test_event(
&state_rc,
TestEvent::UncaughtError(
specifier.to_string(),
Box::new(error.downcast::<JsError>().unwrap()),
),
TestEvent::UncaughtError(specifier.to_string(), Box::new(js_error)),
)?;
fail_fast_tracker.add_failure();
send_test_event(
@ -975,7 +959,7 @@ async fn run_tests_for_worker_inner(
had_uncaught_error = true;
continue;
} else {
return Err(error);
return Err(error.into());
}
}
};
@ -1365,25 +1349,20 @@ pub async fn report_tests(
reporter.report_summary(&elapsed, &tests, &test_steps);
if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) {
return (
Err(generic_error(format!(
"Test reporter failed to flush: {}",
err
))),
Err(anyhow!("Test reporter failed to flush: {}", err)),
receiver,
);
}
if used_only {
return (
Err(generic_error(
"Test failed because the \"only\" option was used",
)),
Err(anyhow!("Test failed because the \"only\" option was used",)),
receiver,
);
}
if failed {
return (Err(generic_error("Test failed")), receiver);
return (Err(anyhow!("Test failed")), receiver);
}
(Ok(()), receiver)
@ -1524,7 +1503,7 @@ pub async fn run_tests(
.await?;
if !test_flags.permit_no_files && !workspace_files_factory.found_specifiers()
{
return Err(generic_error("No test modules found"));
return Err(anyhow!("No test modules found"));
}
workspace_files_factory.check().await?;
@ -1539,10 +1518,10 @@ pub async fn run_tests(
None,
TestSpecifiersOptions {
cwd: Url::from_directory_path(initial_cwd).map_err(|_| {
generic_error(format!(
anyhow!(
"Unable to construct URL from the path of cwd: {}",
initial_cwd.to_string_lossy(),
))
cli_options.initial_cwd().to_string_lossy(),
)
})?,
concurrent_jobs: test_flags
.concurrent_jobs
@ -1635,10 +1614,10 @@ pub async fn run_tests_with_watch(
changed_paths.map(|p| p.into_iter().collect()).as_ref(),
TestSpecifiersOptions {
cwd: Url::from_directory_path(initial_cwd).map_err(|_| {
generic_error(format!(
anyhow!(
"Unable to construct URL from the path of cwd: {}",
initial_cwd.to_string_lossy(),
))
cli_options.initial_cwd().to_string_lossy(),
)
})?,
concurrent_jobs: test_flags
.concurrent_jobs

View file

@ -129,7 +129,7 @@ impl TestReporter for CompoundTestReporter {
if errors.is_empty() {
Ok(())
} else {
bail!(
anyhow::bail!(
"error in one or more wrapped reporters:\n{}",
errors
.iter()

View file

@ -3,6 +3,8 @@
use std::collections::VecDeque;
use std::path::PathBuf;
use deno_core::anyhow::Context;
use super::fmt::to_relative_path_or_remote_url;
use super::*;

View file

@ -723,7 +723,7 @@ delete Object.prototype.__proto__;
}
: arg;
if (fileReference.fileName.startsWith("npm:")) {
/** @type {[string, ts.Extension] | undefined} */
/** @type {[string, ts.Extension | null] | undefined} */
const resolved = ops.op_resolve(
containingFilePath,
[
@ -735,7 +735,7 @@ delete Object.prototype.__proto__;
],
],
)?.[0];
if (resolved) {
if (resolved && resolved[1]) {
return {
resolvedTypeReferenceDirective: {
primary: true,
@ -785,7 +785,7 @@ delete Object.prototype.__proto__;
debug(` base: ${base}`);
debug(` specifiers: ${specifiers.map((s) => s[1]).join(", ")}`);
}
/** @type {Array<[string, ts.Extension] | undefined>} */
/** @type {Array<[string, ts.Extension | null] | undefined>} */
const resolved = ops.op_resolve(
base,
specifiers,
@ -793,7 +793,7 @@ delete Object.prototype.__proto__;
if (resolved) {
/** @type {Array<ts.ResolvedModuleWithFailedLookupLocations>} */
const result = resolved.map((item) => {
if (item) {
if (item && item[1]) {
const [resolvedFileName, extension] = item;
return {
resolvedModule: {

View file

@ -320,7 +320,8 @@ impl fmt::Display for Diagnostic {
}
}
#[derive(Clone, Debug, Default, Eq, PartialEq)]
#[derive(Clone, Debug, Default, Eq, PartialEq, deno_error::JsError)]
#[class(generic)]
pub struct Diagnostics(pub Vec<Diagnostic>);
impl Diagnostics {

View file

@ -83,8 +83,30 @@ declare var window: Window & typeof globalThis;
declare var self: Window & typeof globalThis;
/** @category Platform */
declare var closed: boolean;
/** @category Platform */
/**
* Exits the current Deno process.
*
* This function terminates the process by signaling the runtime to exit.
* Similar to exit(0) in posix. Its behavior is similar to the `window.close()`
* method in the browser, but specific to the Deno runtime.
*
* Note: Use this function cautiously, as it will stop the execution of the
* entire Deno program immediately.
*
* @see https://developer.mozilla.org/en-US/docs/Web/API/Window/close
*
* @example
* ```ts
* console.log("About to close the Deno process.");
* close(); // The process will terminate here.
* console.log("This will not be logged."); // This line will never execute.
* ```
*
* @category Platform
*/
declare function close(): void;
/** @category Events */
declare var onerror: ((this: Window, ev: ErrorEvent) => any) | null;
/** @category Events */

View file

@ -3,12 +3,10 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::ascii_str;
use deno_core::error::AnyError;
@ -30,6 +28,7 @@ use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_graph::ResolutionResolved;
use deno_resolver::npm::managed::ResolvePkgFolderFromDenoModuleError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::NodeJsErrorCode;
@ -454,13 +453,6 @@ impl State {
}
}
fn normalize_specifier(
specifier: &str,
current_dir: &Path,
) -> Result<ModuleSpecifier, AnyError> {
resolve_url_or_path(specifier, current_dir).map_err(|err| err.into())
}
#[op2]
#[string]
fn op_create_hash(s: &mut OpState, #[string] text: &str) -> String {
@ -531,6 +523,21 @@ pub fn as_ts_script_kind(media_type: MediaType) -> i32 {
pub const MISSING_DEPENDENCY_SPECIFIER: &str =
"internal:///missing_dependency.d.ts";
#[derive(Debug, Error, deno_error::JsError)]
pub enum LoadError {
#[class(generic)]
#[error("Unable to load {path}: {error}")]
LoadFromNodeModule { path: String, error: std::io::Error },
#[class(inherit)]
#[error(
"Error converting a string module specifier for \"op_resolve\": {0}"
)]
ModuleResolution(#[from] deno_core::ModuleResolutionError),
#[class(inherit)]
#[error("{0}")]
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct LoadResponse {
@ -545,24 +552,28 @@ struct LoadResponse {
fn op_load(
state: &mut OpState,
#[string] load_specifier: &str,
) -> Result<Option<LoadResponse>, AnyError> {
) -> Result<Option<LoadResponse>, LoadError> {
op_load_inner(state, load_specifier)
}
fn op_load_inner(
state: &mut OpState,
load_specifier: &str,
) -> Result<Option<LoadResponse>, AnyError> {
) -> Result<Option<LoadResponse>, LoadError> {
fn load_from_node_modules(
specifier: &ModuleSpecifier,
npm_state: Option<&RequestNpmState>,
media_type: &mut MediaType,
is_cjs: &mut bool,
) -> Result<String, AnyError> {
) -> Result<String, LoadError> {
*media_type = MediaType::from_specifier(specifier);
let file_path = specifier.to_file_path().unwrap();
let code = std::fs::read_to_string(&file_path)
.with_context(|| format!("Unable to load {}", file_path.display()))?;
let code = std::fs::read_to_string(&file_path).map_err(|err| {
LoadError::LoadFromNodeModule {
path: file_path.display().to_string(),
error: err,
}
})?;
let code: Arc<str> = code.into();
*is_cjs = npm_state
.map(|npm_state| {
@ -575,8 +586,7 @@ fn op_load_inner(
let state = state.borrow_mut::<State>();
let specifier = normalize_specifier(load_specifier, &state.current_dir)
.context("Error converting a string module specifier for \"op_load\".")?;
let specifier = resolve_url_or_path(load_specifier, &state.current_dir)?;
let mut hash: Option<String> = None;
let mut media_type = MediaType::Unknown;
@ -688,6 +698,24 @@ fn op_load_inner(
}))
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum ResolveError {
#[class(inherit)]
#[error(
"Error converting a string module specifier for \"op_resolve\": {0}"
)]
ModuleResolution(#[from] deno_core::ModuleResolutionError),
#[class(inherit)]
#[error("{0}")]
PackageSubpathResolve(PackageSubpathResolveError),
#[class(inherit)]
#[error("{0}")]
ResolvePkgFolderFromDenoModule(#[from] ResolvePkgFolderFromDenoModuleError),
#[class(inherit)]
#[error("{0}")]
ResolveNonGraphSpecifierTypes(#[from] ResolveNonGraphSpecifierTypesError),
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ResolveArgs {
@ -717,7 +745,7 @@ fn op_resolve(
state: &mut OpState,
#[string] base: String,
#[serde] specifiers: Vec<(bool, String)>,
) -> Result<Vec<(String, &'static str)>, AnyError> {
) -> Result<Vec<(String, Option<&'static str>)>, ResolveError> {
op_resolve_inner(state, ResolveArgs { base, specifiers })
}
@ -725,32 +753,30 @@ fn op_resolve(
fn op_resolve_inner(
state: &mut OpState,
args: ResolveArgs,
) -> Result<Vec<(String, &'static str)>, AnyError> {
) -> Result<Vec<(String, Option<&'static str>)>, ResolveError> {
let state = state.borrow_mut::<State>();
let mut resolved: Vec<(String, &'static str)> =
let mut resolved: Vec<(String, Option<&'static str>)> =
Vec::with_capacity(args.specifiers.len());
let referrer = if let Some(remapped_specifier) =
state.maybe_remapped_specifier(&args.base)
{
remapped_specifier.clone()
} else {
normalize_specifier(&args.base, &state.current_dir).context(
"Error converting a string module specifier for \"op_resolve\".",
)?
resolve_url_or_path(&args.base, &state.current_dir)?
};
let referrer_module = state.graph.get(&referrer);
for (is_cjs, specifier) in args.specifiers {
if specifier.starts_with("node:") {
resolved.push((
MISSING_DEPENDENCY_SPECIFIER.to_string(),
MediaType::Dts.as_ts_extension(),
Some(MediaType::Dts.as_ts_extension()),
));
continue;
}
if specifier.starts_with("asset:///") {
let ext = MediaType::from_str(&specifier).as_ts_extension();
resolved.push((specifier, ext));
resolved.push((specifier, Some(ext)));
continue;
}
@ -830,14 +856,15 @@ fn op_resolve_inner(
(
specifier_str,
match media_type {
MediaType::Css => ".js", // surface these as .js for typescript
media_type => media_type.as_ts_extension(),
MediaType::Css => Some(".js"), // surface these as .js for typescript
MediaType::Unknown => None,
media_type => Some(media_type.as_ts_extension()),
},
)
}
None => (
MISSING_DEPENDENCY_SPECIFIER.to_string(),
MediaType::Dts.as_ts_extension(),
Some(MediaType::Dts.as_ts_extension()),
),
};
log::debug!("Resolved {} from {} to {:?}", specifier, referrer, result);
@ -852,7 +879,7 @@ fn resolve_graph_specifier_types(
referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
state: &State,
) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> {
) -> Result<Option<(ModuleSpecifier, MediaType)>, ResolveError> {
let graph = &state.graph;
let maybe_module = match graph.try_get(specifier) {
Ok(Some(module)) => Some(module),
@ -914,7 +941,7 @@ fn resolve_graph_specifier_types(
Err(err) => match err.code() {
NodeJsErrorCode::ERR_TYPES_NOT_FOUND
| NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None,
_ => return Err(err.into()),
_ => return Err(ResolveError::PackageSubpathResolve(err)),
},
};
Ok(Some(into_specifier_and_media_type(maybe_url)))
@ -936,10 +963,12 @@ fn resolve_graph_specifier_types(
}
}
#[derive(Debug, Error)]
enum ResolveNonGraphSpecifierTypesError {
#[derive(Debug, Error, deno_error::JsError)]
pub enum ResolveNonGraphSpecifierTypesError {
#[class(inherit)]
#[error(transparent)]
ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError),
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(#[from] PackageSubpathResolveError),
}
@ -1036,10 +1065,20 @@ fn op_respond_inner(state: &mut OpState, args: RespondArgs) {
state.maybe_response = Some(args);
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum ExecError {
#[class(generic)]
#[error("The response for the exec request was not set.")]
ResponseNotSet,
#[class(inherit)]
#[error(transparent)]
Core(deno_core::error::CoreError),
}
/// Execute a request on the supplied snapshot, returning a response which
/// contains information, like any emitted files, diagnostics, statistics and
/// optionally an updated TypeScript build info.
pub fn exec(request: Request) -> Result<Response, AnyError> {
pub fn exec(request: Request) -> Result<Response, ExecError> {
// tsc cannot handle root specifiers that don't have one of the "acceptable"
// extensions. Therefore, we have to check the root modules against their
// extensions and remap any that are unacceptable to tsc and add them to the
@ -1115,7 +1154,9 @@ pub fn exec(request: Request) -> Result<Response, AnyError> {
..Default::default()
});
runtime.execute_script(located_script_name!(), exec_source)?;
runtime
.execute_script(located_script_name!(), exec_source)
.map_err(ExecError::Core)?;
let op_state = runtime.op_state();
let mut op_state = op_state.borrow_mut();
@ -1132,7 +1173,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> {
stats,
})
} else {
Err(anyhow!("The response for the exec request was not set."))
Err(ExecError::ResponseNotSet)
}
}
@ -1141,6 +1182,7 @@ mod tests {
use deno_core::futures::future;
use deno_core::serde_json;
use deno_core::OpState;
use deno_error::JsErrorBox;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use test_util::PathRef;
@ -1167,13 +1209,20 @@ mod tests {
.replace("://", "_")
.replace('/', "-");
let source_path = self.fixtures.join(specifier_text);
let response = source_path.read_to_bytes_if_exists().map(|c| {
Some(deno_graph::source::LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
content: c.into(),
let response = source_path
.read_to_bytes_if_exists()
.map(|c| {
Some(deno_graph::source::LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
content: c.into(),
})
})
});
.map_err(|e| {
deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic(
e.to_string(),
)))
});
Box::pin(future::ready(response))
}
}
@ -1210,7 +1259,7 @@ mod tests {
async fn test_exec(
specifier: &ModuleSpecifier,
) -> Result<Response, AnyError> {
) -> Result<Response, ExecError> {
let hash_data = 123; // something random
let fixtures = test_util::testdata_path().join("tsc2");
let loader = MockLoader { fixtures };
@ -1392,7 +1441,10 @@ mod tests {
},
)
.expect("should have invoked op");
assert_eq!(actual, vec![("https://deno.land/x/b.ts".into(), ".ts")]);
assert_eq!(
actual,
vec![("https://deno.land/x/b.ts".into(), Some(".ts"))]
);
}
#[tokio::test]
@ -1411,7 +1463,10 @@ mod tests {
},
)
.expect("should have not errored");
assert_eq!(actual, vec![(MISSING_DEPENDENCY_SPECIFIER.into(), ".d.ts")]);
assert_eq!(
actual,
vec![(MISSING_DEPENDENCY_SPECIFIER.into(), Some(".d.ts"))]
);
}
#[tokio::test]

View file

@ -10,7 +10,7 @@ use std::time::Duration;
use deno_config::glob::PathOrPatternSet;
use deno_core::error::AnyError;
use deno_core::error::JsError;
use deno_core::error::CoreError;
use deno_core::futures::Future;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
@ -23,6 +23,7 @@ use notify::RecommendedWatcher;
use notify::RecursiveMode;
use notify::Watcher;
use tokio::select;
use tokio::sync::broadcast::error::RecvError;
use tokio::sync::mpsc;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::time::sleep;
@ -80,10 +81,13 @@ where
{
let result = watch_future.await;
if let Err(err) = result {
let error_string = match err.downcast_ref::<JsError>() {
Some(e) => format_js_error(e),
None => format!("{err:?}"),
};
let error_string =
match crate::util::result::any_and_jserrorbox_downcast_ref::<CoreError>(
&err,
) {
Some(CoreError::Js(e)) => format_js_error(e),
_ => format!("{err:?}"),
};
log::error!(
"{}: {}",
colors::red_bold("error"),
@ -171,9 +175,9 @@ impl WatcherCommunicator {
pub async fn watch_for_changed_paths(
&self,
) -> Result<Option<Vec<PathBuf>>, AnyError> {
) -> Result<Option<Vec<PathBuf>>, RecvError> {
let mut rx = self.changed_paths_rx.resubscribe();
rx.recv().await.map_err(AnyError::from)
rx.recv().await
}
pub fn change_restart_mode(&self, restart_mode: WatcherRestartMode) {

View file

@ -13,7 +13,6 @@ use deno_config::glob::PathOrPattern;
use deno_config::glob::PathOrPatternSet;
use deno_config::glob::WalkEntry;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier;
@ -129,7 +128,7 @@ pub fn collect_specifiers(
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(vendor_folder)
.collect_file_patterns(&CliSys::default(), files)?;
.collect_file_patterns(&CliSys::default(), files);
let mut collected_files_as_urls = collected_files
.iter()
.map(|f| specifier_from_file_path(f).unwrap())
@ -169,7 +168,7 @@ pub fn clone_dir_recursive<
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
) -> Result<(), CopyDirRecursiveError> {
if cfg!(target_vendor = "apple") {
if let Some(parent) = to.parent() {
sys.fs_create_dir_all(parent)?;
@ -200,6 +199,47 @@ pub fn clone_dir_recursive<
Ok(())
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CopyDirRecursiveError {
#[class(inherit)]
#[error("Creating {path}")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Creating {path}")]
Reading {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Dir {from} to {to}")]
Dir {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error("Copying {from} to {to}")]
Copying {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error(transparent)]
Other(#[from] Error),
}
/// Copies a directory to another directory.
///
/// Note: Does not handle symlinks.
@ -213,13 +253,20 @@ pub fn copy_dir_recursive<
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
sys
.fs_create_dir_all(to)
.with_context(|| format!("Creating {}", to.display()))?;
let read_dir = sys
.fs_read_dir(from)
.with_context(|| format!("Reading {}", from.display()))?;
) -> Result<(), CopyDirRecursiveError> {
sys.fs_create_dir_all(to).map_err(|source| {
CopyDirRecursiveError::Creating {
path: to.to_path_buf(),
source,
}
})?;
let read_dir =
sys
.fs_read_dir(from)
.map_err(|source| CopyDirRecursiveError::Reading {
path: from.to_path_buf(),
source,
})?;
for entry in read_dir {
let entry = entry?;
@ -228,12 +275,20 @@ pub fn copy_dir_recursive<
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
copy_dir_recursive(sys, &new_from, &new_to).with_context(|| {
format!("Dir {} to {}", new_from.display(), new_to.display())
copy_dir_recursive(sys, &new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Dir {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source: Box::new(source),
}
})?;
} else if file_type.is_file() {
sys.fs_copy(&new_from, &new_to).with_context(|| {
format!("Copying {} to {}", new_from.display(), new_to.display())
sys.fs_copy(&new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Copying {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source,
}
})?;
}
}

View file

@ -1,6 +1,13 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::convert::Infallible;
use std::fmt::Debug;
use std::fmt::Display;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
pub trait InfallibleResultExt<T> {
fn unwrap_infallible(self) -> T;
@ -14,3 +21,23 @@ impl<T> InfallibleResultExt<T> for Result<T, Infallible> {
}
}
}
pub fn any_and_jserrorbox_downcast_ref<
E: Display + Debug + Send + Sync + 'static,
>(
err: &AnyError,
) -> Option<&E> {
err
.downcast_ref::<E>()
.or_else(|| {
err
.downcast_ref::<JsErrorBox>()
.and_then(|e| e.as_any().downcast_ref::<E>())
})
.or_else(|| {
err.downcast_ref::<CoreError>().and_then(|e| match e {
CoreError::JsNative(e) => e.as_any().downcast_ref::<E>(),
_ => None,
})
})
}

View file

@ -1,11 +1,9 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod async_flag;
mod sync_read_async_write_lock;
mod task_queue;
pub use async_flag::AsyncFlag;
pub use deno_core::unsync::sync::AtomicFlag;
pub use sync_read_async_write_lock::SyncReadAsyncWriteLock;
pub use task_queue::TaskQueue;
pub use task_queue::TaskQueuePermit;

View file

@ -1,62 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::parking_lot::RwLock;
use deno_core::parking_lot::RwLockReadGuard;
use deno_core::parking_lot::RwLockWriteGuard;
use super::TaskQueue;
use super::TaskQueuePermit;
/// A lock that can be read synchronously at any time (including when
/// being written to), but must write asynchronously.
pub struct SyncReadAsyncWriteLockWriteGuard<'a, T: Send + Sync> {
_update_permit: TaskQueuePermit<'a>,
data: &'a RwLock<T>,
}
impl<'a, T: Send + Sync> SyncReadAsyncWriteLockWriteGuard<'a, T> {
pub fn read(&self) -> RwLockReadGuard<'_, T> {
self.data.read()
}
/// Warning: Only `write()` with data you created within this
/// write this `SyncReadAsyncWriteLockWriteGuard`.
///
/// ```rs
/// let mut data = lock.write().await;
///
/// let mut data = data.read().clone();
/// data.value = 2;
/// *data.write() = data;
/// ```
pub fn write(&self) -> RwLockWriteGuard<'_, T> {
self.data.write()
}
}
/// A lock that can only be
pub struct SyncReadAsyncWriteLock<T: Send + Sync> {
data: RwLock<T>,
update_queue: TaskQueue,
}
impl<T: Send + Sync> SyncReadAsyncWriteLock<T> {
pub fn new(data: T) -> Self {
Self {
data: RwLock::new(data),
update_queue: TaskQueue::default(),
}
}
pub fn read(&self) -> RwLockReadGuard<'_, T> {
self.data.read()
}
pub async fn acquire(&self) -> SyncReadAsyncWriteLockWriteGuard<'_, T> {
let update_permit = self.update_queue.acquire().await;
SyncReadAsyncWriteLockWriteGuard {
_update_permit: update_permit,
data: &self.data,
}
}
}

View file

@ -8,6 +8,7 @@ use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::FutureExt;
use deno_core::url::Url;
use deno_core::v8;
@ -17,6 +18,7 @@ use deno_core::FeatureChecker;
use deno_core::ModuleLoader;
use deno_core::PollEventLoopOptions;
use deno_core::SharedArrayBufferStore;
use deno_error::JsErrorBox;
use deno_runtime::code_cache;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_fs;
@ -50,9 +52,10 @@ use crate::args::CliLockfile;
use crate::args::DenoSubcommand;
use crate::args::NpmCachingStrategy;
use crate::args::StorageKeyResolver;
use crate::errors;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
use crate::util::checksum;
@ -80,9 +83,9 @@ pub trait ModuleLoaderFactory: Send + Sync {
#[async_trait::async_trait(?Send)]
pub trait HmrRunner: Send + Sync {
async fn start(&mut self) -> Result<(), AnyError>;
async fn stop(&mut self) -> Result<(), AnyError>;
async fn run(&mut self) -> Result<(), AnyError>;
async fn start(&mut self) -> Result<(), CoreError>;
async fn stop(&mut self) -> Result<(), CoreError>;
async fn run(&mut self) -> Result<(), CoreError>;
}
pub trait CliCodeCache: code_cache::CodeCache {
@ -147,6 +150,7 @@ struct SharedWorkerState {
maybe_lockfile: Option<Arc<CliLockfile>>,
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Arc<dyn CliNpmResolver>,
pkg_json_resolver: Arc<CliPackageJsonResolver>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
@ -195,7 +199,7 @@ impl CliMainWorker {
Ok(())
}
pub async fn run(&mut self) -> Result<i32, AnyError> {
pub async fn run(&mut self) -> Result<i32, CoreError> {
let mut maybe_coverage_collector =
self.maybe_setup_coverage_collector().await?;
let mut maybe_hmr_runner = self.maybe_setup_hmr_runner().await?;
@ -216,7 +220,7 @@ impl CliMainWorker {
let result;
select! {
hmr_result = hmr_future => {
result = hmr_result;
result = hmr_result.map_err(Into::into);
},
event_loop_result = event_loop_future => {
result = event_loop_result;
@ -331,12 +335,12 @@ impl CliMainWorker {
executor.execute().await
}
pub async fn execute_main_module(&mut self) -> Result<(), AnyError> {
pub async fn execute_main_module(&mut self) -> Result<(), CoreError> {
let id = self.worker.preload_main_module(&self.main_module).await?;
self.worker.evaluate_module(id).await
}
pub async fn execute_side_module(&mut self) -> Result<(), AnyError> {
pub async fn execute_side_module(&mut self) -> Result<(), CoreError> {
let id = self.worker.preload_side_module(&self.main_module).await?;
self.worker.evaluate_module(id).await
}
@ -393,7 +397,7 @@ impl CliMainWorker {
&mut self,
name: &'static str,
source_code: &'static str,
) -> Result<v8::Global<v8::Value>, AnyError> {
) -> Result<v8::Global<v8::Value>, CoreError> {
self.worker.js_runtime.execute_script(name, source_code)
}
}
@ -422,6 +426,7 @@ impl CliMainWorkerFactory {
maybe_lockfile: Option<Arc<CliLockfile>>,
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: Arc<dyn CliNpmResolver>,
pkg_json_resolver: Arc<CliPackageJsonResolver>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
@ -446,6 +451,7 @@ impl CliMainWorkerFactory {
maybe_lockfile,
module_loader_factory,
node_resolver,
npm_installer,
npm_resolver,
pkg_json_resolver,
root_cert_store_provider,
@ -465,7 +471,7 @@ impl CliMainWorkerFactory {
&self,
mode: WorkerExecutionMode,
main_module: ModuleSpecifier,
) -> Result<CliMainWorker, AnyError> {
) -> Result<CliMainWorker, CoreError> {
self
.create_custom_worker(
mode,
@ -484,7 +490,7 @@ impl CliMainWorkerFactory {
permissions: PermissionsContainer,
custom_extensions: Vec<Extension>,
stdio: deno_runtime::deno_io::Stdio,
) -> Result<CliMainWorker, AnyError> {
) -> Result<CliMainWorker, CoreError> {
let shared = &self.shared;
let CreateModuleLoaderResult {
module_loader,
@ -495,34 +501,33 @@ impl CliMainWorkerFactory {
let main_module = if let Ok(package_ref) =
NpmPackageReqReference::from_specifier(&main_module)
{
if let Some(npm_resolver) = shared.npm_resolver.as_managed() {
if let Some(npm_installer) = &shared.npm_installer {
let reqs = &[package_ref.req().clone()];
npm_resolver
npm_installer
.add_package_reqs(
reqs,
if matches!(
shared.default_npm_caching_strategy,
NpmCachingStrategy::Lazy
) {
crate::npm::PackageCaching::Only(reqs.into())
PackageCaching::Only(reqs.into())
} else {
crate::npm::PackageCaching::All
PackageCaching::All
},
)
.await?;
}
// use a fake referrer that can be used to discover the package.json if necessary
let referrer =
ModuleSpecifier::from_directory_path(self.shared.fs.cwd()?)
.unwrap()
.join("package.json")?;
let referrer = ModuleSpecifier::from_directory_path(
self.shared.fs.cwd().map_err(JsErrorBox::from_err)?,
)
.unwrap()
.join("package.json")?;
let package_folder = shared
.npm_resolver
.resolve_pkg_folder_from_deno_module_req(
package_ref.req(),
&referrer,
)?;
.resolve_pkg_folder_from_deno_module_req(package_ref.req(), &referrer)
.map_err(JsErrorBox::from_err)?;
let main_module = self
.resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?;
@ -633,7 +638,6 @@ impl CliMainWorkerFactory {
should_break_on_first_statement: shared.options.inspect_brk,
should_wait_for_inspector_session: shared.options.inspect_wait,
strace_ops: shared.options.strace_ops.clone(),
get_error_class_fn: Some(&errors::get_error_class_name),
cache_storage_dir,
origin_storage_dir,
stdio,
@ -834,7 +838,6 @@ fn create_web_worker_callback(
create_web_worker_cb,
format_js_error_fn: Some(Arc::new(format_js_error)),
worker_type: args.worker_type,
get_error_class_fn: Some(&errors::get_error_class_name),
stdio: stdio.clone(),
cache_storage_dir,
strace_ops: shared.options.strace_ops.clone(),

View file

@ -2,7 +2,7 @@
[package]
name = "deno_broadcast_channel"
version = "0.178.0"
version = "0.179.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@ -16,6 +16,7 @@ path = "lib.rs"
[dependencies]
async-trait.workspace = true
deno_core.workspace = true
deno_error.workspace = true
thiserror.workspace = true
tokio.workspace = true
uuid.workspace = true

View file

@ -12,6 +12,7 @@ use deno_core::JsBuffer;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
use deno_error::JsErrorBox;
pub use in_memory_broadcast_channel::InMemoryBroadcastChannel;
pub use in_memory_broadcast_channel::InMemoryBroadcastChannelResource;
use tokio::sync::broadcast::error::SendError as BroadcastSendError;
@ -19,18 +20,26 @@ use tokio::sync::mpsc::error::SendError as MpscSendError;
pub const UNSTABLE_FEATURE_NAME: &str = "broadcast-channel";
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum BroadcastChannelError {
#[class(inherit)]
#[error(transparent)]
Resource(deno_core::error::AnyError),
Resource(
#[from]
#[inherit]
deno_core::error::ResourceError,
),
#[class(generic)]
#[error(transparent)]
MPSCSendError(MpscSendError<Box<dyn std::fmt::Debug + Send + Sync>>),
#[class(generic)]
#[error(transparent)]
BroadcastSendError(
BroadcastSendError<Box<dyn std::fmt::Debug + Send + Sync>>,
),
#[class(inherit)]
#[error(transparent)]
Other(deno_core::error::AnyError),
Other(#[inherit] JsErrorBox),
}
impl<T: std::fmt::Debug + Send + Sync + 'static> From<MpscSendError<T>>
@ -100,10 +109,7 @@ pub fn op_broadcast_unsubscribe<BC>(
where
BC: BroadcastChannel + 'static,
{
let resource = state
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let resource = state.resource_table.get::<BC::Resource>(rid)?;
let bc = state.borrow::<BC>();
bc.unsubscribe(&resource)
}
@ -118,11 +124,7 @@ pub async fn op_broadcast_send<BC>(
where
BC: BroadcastChannel + 'static,
{
let resource = state
.borrow()
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?;
let bc = state.borrow().borrow::<BC>().clone();
bc.send(&resource, name, buf.to_vec()).await
}
@ -136,11 +138,7 @@ pub async fn op_broadcast_recv<BC>(
where
BC: BroadcastChannel + 'static,
{
let resource = state
.borrow()
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?;
let bc = state.borrow().borrow::<BC>().clone();
bc.recv(&resource).await
}

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cache"
version = "0.116.0"
version = "0.117.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@ -16,6 +16,7 @@ path = "lib.rs"
[dependencies]
async-trait.workspace = true
deno_core.workspace = true
deno_error.workspace = true
rusqlite.workspace = true
serde.workspace = true
sha2.workspace = true

27
ext/cache/lib.rs vendored
View file

@ -6,7 +6,6 @@ use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::error::type_error;
use deno_core::op2;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
@ -14,22 +13,38 @@ use deno_core::ByteString;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
use deno_error::JsErrorBox;
mod sqlite;
pub use sqlite::SqliteBackedCache;
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CacheError {
#[class(type)]
#[error("CacheStorage is not available in this context")]
ContextUnsupported,
#[class(generic)]
#[error(transparent)]
Sqlite(#[from] rusqlite::Error),
#[class(generic)]
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[class(inherit)]
#[error(transparent)]
Resource(deno_core::error::AnyError),
Resource(#[from] deno_core::error::ResourceError),
#[class(inherit)]
#[error(transparent)]
Other(deno_core::error::AnyError),
Other(JsErrorBox),
#[class(inherit)]
#[error("{0}")]
Io(#[from] std::io::Error),
#[class(generic)]
#[error("Failed to create cache storage directory {}", .dir.display())]
CacheStorageDirectory {
dir: PathBuf,
#[source]
source: std::io::Error,
},
}
#[derive(Clone)]
@ -237,9 +252,7 @@ where
state.put(cache);
Ok(state.borrow::<CA>().clone())
} else {
Err(CacheError::Other(type_error(
"CacheStorage is not available in this context",
)))
Err(CacheError::ContextUnsupported)
}
}

21
ext/cache/sqlite.rs vendored
View file

@ -8,8 +8,6 @@ use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use async_trait::async_trait;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::future::poll_fn;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::spawn_blocking;
@ -45,14 +43,12 @@ pub struct SqliteBackedCache {
impl SqliteBackedCache {
pub fn new(cache_storage_dir: PathBuf) -> Result<Self, CacheError> {
{
std::fs::create_dir_all(&cache_storage_dir)
.with_context(|| {
format!(
"Failed to create cache storage directory {}",
cache_storage_dir.display()
)
})
.map_err(CacheError::Other)?;
std::fs::create_dir_all(&cache_storage_dir).map_err(|source| {
CacheError::CacheStorageDirectory {
dir: cache_storage_dir.clone(),
source,
}
})?;
let path = cache_storage_dir.join("cache_metadata.db");
let connection = rusqlite::Connection::open(&path).unwrap_or_else(|_| {
panic!("failed to open cache db at {}", path.display())
@ -385,7 +381,10 @@ impl CacheResponseResource {
}
}
async fn read(self: Rc<Self>, data: &mut [u8]) -> Result<usize, AnyError> {
async fn read(
self: Rc<Self>,
data: &mut [u8],
) -> Result<usize, std::io::Error> {
let resource = deno_core::RcRef::map(&self, |r| &r.file);
let mut file = resource.borrow_mut().await;
let nread = file.read(data).await?;

View file

@ -2,7 +2,7 @@
[package]
name = "deno_canvas"
version = "0.53.0"
version = "0.54.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@ -15,6 +15,7 @@ path = "lib.rs"
[dependencies]
deno_core.workspace = true
deno_error.workspace = true
deno_webgpu.workspace = true
image = { version = "0.24.7", default-features = false, features = ["png"] }
serde = { workspace = true, features = ["derive"] }

View file

@ -12,10 +12,12 @@ use image::RgbaImage;
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CanvasError {
#[class(type)]
#[error("Color type '{0:?}' not supported")]
UnsupportedColorType(ColorType),
#[class(generic)]
#[error(transparent)]
Image(#[from] image::ImageError),
}

View file

@ -2,7 +2,7 @@
[package]
name = "deno_console"
version = "0.184.0"
version = "0.185.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cron"
version = "0.64.0"
version = "0.65.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@ -18,6 +18,7 @@ anyhow.workspace = true
async-trait.workspace = true
chrono = { workspace = true, features = ["now"] }
deno_core.workspace = true
deno_error.workspace = true
saffron.workspace = true
thiserror.workspace = true
tokio.workspace = true

Some files were not shown because too many files have changed in this diff Show more