0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-12 16:59:32 -05:00

Compare commits

..

1 commit

Author SHA1 Message Date
Cre3per
5f08590716
Merge f3bf9ccef1 into 3fb8fc1ba7 2025-01-14 17:14:28 +03:00
255 changed files with 8547 additions and 11187 deletions

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 36;
const cacheVersion = 33;
const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
@ -14,7 +14,7 @@ const windowsX86Runner = "windows-2022";
const windowsX86XlRunner = "windows-2022-xl";
const macosX86Runner = "macos-13";
const macosArmRunner = "macos-14";
const selfHostedMacosArmRunner = "ghcr.io/cirruslabs/macos-runner:sonoma";
const selfHostedMacosArmRunner = "self-hosted";
const Runners = {
linuxX86: {
@ -41,14 +41,8 @@ const Runners = {
macosArm: {
os: "macos",
arch: "aarch64",
runner: macosArmRunner,
},
macosArmSelfHosted: {
os: "macos",
arch: "aarch64",
// Actually use self-hosted runner only in denoland/deno on `main` branch and for tags (release) builds.
runner:
`\${{ github.repository == 'denoland/deno' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
},
windowsX86: {
os: "windows",
@ -390,7 +384,7 @@ const ci = {
job: "test",
profile: "debug",
}, {
...Runners.macosArmSelfHosted,
...Runners.macosArm,
job: "test",
profile: "release",
skip_pr: true,
@ -492,7 +486,7 @@ const ci = {
},
{
name: "Cache Cargo home",
uses: "cirruslabs/cache@v4",
uses: "actions/cache@v4",
with: {
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir

View file

@ -68,12 +68,12 @@ jobs:
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: macos
arch: aarch64
runner: macos-14
runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test
profile: debug
- os: macos
arch: aarch64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')) && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}'
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test
profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
@ -175,7 +175,7 @@ jobs:
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
-czvf target/release/deno_src.tar.gz -C .. deno
- name: Cache Cargo home
uses: cirruslabs/cache@v4
uses: actions/cache@v4
with:
path: |-
~/.cargo/.crates.toml
@ -184,8 +184,8 @@ jobs:
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git/db
key: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
key: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)'
@ -379,7 +379,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -689,7 +689,7 @@ jobs:
!./target/*/gn_root
!./target/*/*.zip
!./target/*/*.tar.gz
key: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

263
Cargo.lock generated
View file

@ -844,16 +844,18 @@ checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
name = "cli_tests"
version = "0.0.0"
dependencies = [
"anyhow",
"bytes",
"chrono",
"deno_ast",
"deno_bench_util",
"deno_cache_dir",
"deno_core",
"deno_fetch",
"deno_lockfile",
"deno_semver",
"deno_terminal 0.2.0",
"deno_tls",
"deno_tower_lsp",
"deno_unsync",
"fastwebsockets",
"file_test_runner",
"flaky_test",
@ -870,11 +872,7 @@ dependencies = [
"pretty_assertions",
"regex",
"reqwest",
"rustls",
"rustls-pemfile",
"rustls-tokio-stream",
"serde",
"serde_json",
"sys_traits",
"test_server",
"tokio",
@ -1252,7 +1250,7 @@ dependencies = [
[[package]]
name = "deno"
version = "2.1.6"
version = "2.1.5"
dependencies = [
"anstream",
"async-trait",
@ -1278,7 +1276,6 @@ dependencies = [
"deno_doc",
"deno_error",
"deno_graph",
"deno_lib",
"deno_lint",
"deno_lockfile",
"deno_npm",
@ -1288,7 +1285,6 @@ dependencies = [
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_snapshots",
"deno_task_shell",
"deno_telemetry",
"deno_terminal 0.2.0",
@ -1300,6 +1296,7 @@ dependencies = [
"dprint-plugin-jupyter",
"dprint-plugin-markdown",
"dprint-plugin-typescript",
"env_logger",
"fancy-regex",
"faster-hex",
"flate2",
@ -1430,7 +1427,7 @@ dependencies = [
[[package]]
name = "deno_bench_util"
version = "0.180.0"
version = "0.179.0"
dependencies = [
"bencher",
"deno_core",
@ -1439,7 +1436,7 @@ dependencies = [
[[package]]
name = "deno_broadcast_channel"
version = "0.180.0"
version = "0.179.0"
dependencies = [
"async-trait",
"deno_core",
@ -1451,7 +1448,7 @@ dependencies = [
[[package]]
name = "deno_cache"
version = "0.118.0"
version = "0.117.0"
dependencies = [
"async-trait",
"deno_core",
@ -1494,7 +1491,7 @@ dependencies = [
[[package]]
name = "deno_canvas"
version = "0.55.0"
version = "0.54.0"
dependencies = [
"deno_core",
"deno_error",
@ -1533,16 +1530,16 @@ dependencies = [
[[package]]
name = "deno_console"
version = "0.186.0"
version = "0.185.0"
dependencies = [
"deno_core",
]
[[package]]
name = "deno_core"
version = "0.331.0"
version = "0.330.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce2d1779358cad2bc56d71176298767be628d707bb75585f6f8a4be2da8ccda1"
checksum = "fd38bbbd68ed873165ccb630322704b44140d3a8c8d50f898beac4d1a8a3358c"
dependencies = [
"anyhow",
"az",
@ -1584,7 +1581,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
[[package]]
name = "deno_cron"
version = "0.66.0"
version = "0.65.0"
dependencies = [
"anyhow",
"async-trait",
@ -1598,7 +1595,7 @@ dependencies = [
[[package]]
name = "deno_crypto"
version = "0.200.0"
version = "0.199.0"
dependencies = [
"aes",
"aes-gcm",
@ -1666,9 +1663,9 @@ dependencies = [
[[package]]
name = "deno_error"
version = "0.5.5"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c23dbc46d5804814b08b4675838f9884e3a52916987ec5105af36d42f9911b5"
checksum = "c4da6a58de6932a96f84e133c072fd3b525966ee122a71f3efd48bbff2eed5ac"
dependencies = [
"deno_error_macro",
"libc",
@ -1680,9 +1677,9 @@ dependencies = [
[[package]]
name = "deno_error_macro"
version = "0.5.5"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "babccedee31ce7e57c3e6dff2cb3ab8d68c49d0df8222fe0d11d628e65192790"
checksum = "46351dff93aed2039407c91e2ded2a5591e42d2795ab3d111288625bb710d3d2"
dependencies = [
"proc-macro2",
"quote",
@ -1691,7 +1688,7 @@ dependencies = [
[[package]]
name = "deno_fetch"
version = "0.210.0"
version = "0.209.0"
dependencies = [
"base64 0.21.7",
"bytes",
@ -1728,7 +1725,7 @@ dependencies = [
[[package]]
name = "deno_ffi"
version = "0.173.0"
version = "0.172.0"
dependencies = [
"deno_core",
"deno_error",
@ -1749,7 +1746,7 @@ dependencies = [
[[package]]
name = "deno_fs"
version = "0.96.0"
version = "0.95.0"
dependencies = [
"async-trait",
"base32",
@ -1807,7 +1804,7 @@ dependencies = [
[[package]]
name = "deno_http"
version = "0.184.0"
version = "0.183.0"
dependencies = [
"async-compression",
"async-trait",
@ -1847,7 +1844,7 @@ dependencies = [
[[package]]
name = "deno_io"
version = "0.96.0"
version = "0.95.0"
dependencies = [
"async-trait",
"deno_core",
@ -1869,7 +1866,7 @@ dependencies = [
[[package]]
name = "deno_kv"
version = "0.94.0"
version = "0.93.0"
dependencies = [
"anyhow",
"async-trait",
@ -1900,40 +1897,6 @@ dependencies = [
"url",
]
[[package]]
name = "deno_lib"
version = "0.2.0"
dependencies = [
"capacity_builder 0.5.0",
"deno_config",
"deno_error",
"deno_fs",
"deno_media_type",
"deno_node",
"deno_npm",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_terminal 0.2.0",
"env_logger",
"faster-hex",
"indexmap 2.3.0",
"libsui",
"log",
"node_resolver",
"parking_lot",
"ring",
"serde",
"serde_json",
"sys_traits",
"test_server",
"thiserror 2.0.3",
"tokio",
"twox-hash",
"url",
]
[[package]]
name = "deno_lint"
version = "0.68.2"
@ -1966,19 +1929,18 @@ dependencies = [
[[package]]
name = "deno_media_type"
version = "0.2.4"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "577fe2bbe04f3e9b1b7c6fac6a75101a9fbd611c50a6b68789e69f4d63dcb2b4"
checksum = "a417f8bd3f1074185c4c8ccb6ea6261ae173781596cc358e68ad07aaac11009d"
dependencies = [
"data-url",
"encoding_rs",
"serde",
"url",
]
[[package]]
name = "deno_napi"
version = "0.117.0"
version = "0.116.0"
dependencies = [
"deno_core",
"deno_error",
@ -2007,7 +1969,7 @@ dependencies = [
[[package]]
name = "deno_net"
version = "0.178.0"
version = "0.177.0"
dependencies = [
"deno_core",
"deno_error",
@ -2026,7 +1988,7 @@ dependencies = [
[[package]]
name = "deno_node"
version = "0.124.0"
version = "0.123.0"
dependencies = [
"aead-gcm-stream",
"aes",
@ -2044,11 +2006,11 @@ dependencies = [
"deno_fetch",
"deno_fs",
"deno_io",
"deno_media_type",
"deno_net",
"deno_package_json",
"deno_path_util",
"deno_permissions",
"deno_process",
"deno_whoami",
"der",
"digest",
@ -2057,7 +2019,7 @@ dependencies = [
"ecdsa",
"ed25519-dalek",
"elliptic-curve",
"errno",
"errno 0.2.8",
"faster-hex",
"h2 0.4.7",
"hkdf",
@ -2086,6 +2048,7 @@ dependencies = [
"p384",
"path-clean",
"pbkdf2",
"pin-project-lite",
"pkcs8",
"rand",
"regex",
@ -2138,7 +2101,7 @@ dependencies = [
[[package]]
name = "deno_npm_cache"
version = "0.5.0"
version = "0.4.0"
dependencies = [
"async-trait",
"base64 0.21.7",
@ -2168,9 +2131,9 @@ dependencies = [
[[package]]
name = "deno_ops"
version = "0.207.0"
version = "0.206.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96f000a21f6969b4c945bc8e9e785aa439f11ca4fd3fbddcd5bebc102167eb37"
checksum = "4c25ffa9d088ea00748dbef870bba110ac22ebf8cf7b2e9eb288409c5d852af3"
dependencies = [
"indexmap 2.3.0",
"proc-macro-rules",
@ -2183,27 +2146,6 @@ dependencies = [
"thiserror 2.0.3",
]
[[package]]
name = "deno_os"
version = "0.3.0"
dependencies = [
"deno_core",
"deno_error",
"deno_path_util",
"deno_permissions",
"deno_telemetry",
"libc",
"netif",
"ntapi",
"once_cell",
"serde",
"signal-hook",
"signal-hook-registry",
"thiserror 2.0.3",
"tokio",
"winapi",
]
[[package]]
name = "deno_package_json"
version = "0.4.0"
@ -2237,7 +2179,7 @@ dependencies = [
[[package]]
name = "deno_permissions"
version = "0.45.0"
version = "0.44.0"
dependencies = [
"capacity_builder 0.5.0",
"deno_core",
@ -2255,36 +2197,9 @@ dependencies = [
"winapi",
]
[[package]]
name = "deno_process"
version = "0.1.0"
dependencies = [
"deno_core",
"deno_error",
"deno_fs",
"deno_io",
"deno_os",
"deno_path_util",
"deno_permissions",
"libc",
"log",
"memchr",
"nix",
"pin-project-lite",
"rand",
"serde",
"simd-json",
"tempfile",
"thiserror 2.0.3",
"tokio",
"which",
"winapi",
"windows-sys 0.59.0",
]
[[package]]
name = "deno_resolver"
version = "0.17.0"
version = "0.16.0"
dependencies = [
"anyhow",
"async-trait",
@ -2310,7 +2225,7 @@ dependencies = [
[[package]]
name = "deno_runtime"
version = "0.194.0"
version = "0.193.0"
dependencies = [
"color-print",
"deno_ast",
@ -2331,11 +2246,8 @@ dependencies = [
"deno_napi",
"deno_net",
"deno_node",
"deno_os",
"deno_path_util",
"deno_permissions",
"deno_process",
"deno_resolver",
"deno_telemetry",
"deno_terminal 0.2.0",
"deno_tls",
@ -2348,6 +2260,7 @@ dependencies = [
"dlopen2",
"encoding_rs",
"fastwebsockets",
"flate2",
"http 1.1.0",
"http-body-util",
"hyper 0.14.28",
@ -2355,6 +2268,7 @@ dependencies = [
"hyper-util",
"libc",
"log",
"netif",
"nix",
"node_resolver",
"notify",
@ -2365,6 +2279,8 @@ dependencies = [
"rustyline",
"same-file",
"serde",
"signal-hook",
"signal-hook-registry",
"sys_traits",
"tempfile",
"test_server",
@ -2395,13 +2311,6 @@ dependencies = [
"url",
]
[[package]]
name = "deno_snapshots"
version = "0.1.0"
dependencies = [
"deno_runtime",
]
[[package]]
name = "deno_task_shell"
version = "0.20.2"
@ -2422,7 +2331,7 @@ dependencies = [
[[package]]
name = "deno_telemetry"
version = "0.8.0"
version = "0.7.0"
dependencies = [
"async-trait",
"deno_core",
@ -2465,7 +2374,7 @@ dependencies = [
[[package]]
name = "deno_tls"
version = "0.173.0"
version = "0.172.0"
dependencies = [
"deno_core",
"deno_error",
@ -2516,7 +2425,7 @@ dependencies = [
[[package]]
name = "deno_url"
version = "0.186.0"
version = "0.185.0"
dependencies = [
"deno_bench_util",
"deno_console",
@ -2529,7 +2438,7 @@ dependencies = [
[[package]]
name = "deno_web"
version = "0.217.0"
version = "0.216.0"
dependencies = [
"async-trait",
"base64-simd 0.8.0",
@ -2552,7 +2461,7 @@ dependencies = [
[[package]]
name = "deno_webgpu"
version = "0.153.0"
version = "0.152.0"
dependencies = [
"deno_core",
"deno_error",
@ -2566,7 +2475,7 @@ dependencies = [
[[package]]
name = "deno_webidl"
version = "0.186.0"
version = "0.185.0"
dependencies = [
"deno_bench_util",
"deno_core",
@ -2574,7 +2483,7 @@ dependencies = [
[[package]]
name = "deno_websocket"
version = "0.191.0"
version = "0.190.0"
dependencies = [
"bytes",
"deno_core",
@ -2597,7 +2506,7 @@ dependencies = [
[[package]]
name = "deno_webstorage"
version = "0.181.0"
version = "0.180.0"
dependencies = [
"deno_core",
"deno_error",
@ -2683,43 +2592,6 @@ dependencies = [
"v8_valueserializer",
]
[[package]]
name = "denort"
version = "2.1.5"
dependencies = [
"async-trait",
"bincode",
"deno_cache_dir",
"deno_config",
"deno_core",
"deno_error",
"deno_lib",
"deno_media_type",
"deno_npm",
"deno_package_json",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_snapshots",
"deno_terminal 0.2.0",
"import_map",
"indexmap 2.3.0",
"libsui",
"log",
"node_resolver",
"pretty_assertions",
"serde",
"serde_json",
"sys_traits",
"test_server",
"thiserror 2.0.3",
"tokio",
"tokio-util",
"twox-hash",
"url",
]
[[package]]
name = "der"
version = "0.7.9"
@ -3249,6 +3121,17 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno"
version = "0.3.8"
@ -3259,6 +3142,16 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "error-code"
version = "3.2.0"
@ -5185,7 +5078,7 @@ dependencies = [
[[package]]
name = "napi_sym"
version = "0.116.0"
version = "0.115.0"
dependencies = [
"quote",
"serde",
@ -5240,12 +5133,13 @@ dependencies = [
[[package]]
name = "node_resolver"
version = "0.24.0"
version = "0.23.0"
dependencies = [
"anyhow",
"async-trait",
"boxed_error",
"deno_error",
"deno_media_type",
"deno_package_json",
"deno_path_util",
"futures",
@ -5253,7 +5147,6 @@ dependencies = [
"once_cell",
"path-clean",
"regex",
"serde",
"serde_json",
"sys_traits",
"thiserror 2.0.3",
@ -6651,7 +6544,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
dependencies = [
"bitflags 2.6.0",
"errno",
"errno 0.3.8",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
@ -6986,9 +6879,9 @@ dependencies = [
[[package]]
name = "serde_v8"
version = "0.240.0"
version = "0.239.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd0494d74c40ab94f53a19485de359ea6a55f05341b817b93440b673c1ce8ec6"
checksum = "3caa6d882827148e5d9052d9d8d6d1c9d6ad426ed00cab46cafb8c07a0e7126a"
dependencies = [
"deno_error",
"num-bigint",

View file

@ -5,9 +5,6 @@ resolver = "2"
members = [
"bench_util",
"cli",
"cli/lib",
"cli/rt",
"cli/snapshot",
"ext/broadcast_channel",
"ext/cache",
"ext/canvas",
@ -51,19 +48,19 @@ repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.331.0" }
deno_core = { version = "0.330.0" }
deno_bench_util = { version = "0.180.0", path = "./bench_util" }
deno_bench_util = { version = "0.179.0", path = "./bench_util" }
deno_config = { version = "=0.45.0", features = ["workspace", "sync"] }
deno_lockfile = "=0.24.0"
deno_media_type = { version = "0.2.4", features = ["module_specifier"] }
deno_media_type = { version = "0.2.3", features = ["module_specifier"] }
deno_npm = "=0.27.2"
deno_path_util = "=0.3.0"
deno_permissions = { version = "0.45.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.194.0", path = "./runtime" }
deno_permissions = { version = "0.44.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.193.0", path = "./runtime" }
deno_semver = "=0.7.1"
deno_terminal = "0.2.0"
napi_sym = { version = "0.116.0", path = "./ext/napi/sym" }
napi_sym = { version = "0.115.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.9.0"
@ -72,38 +69,34 @@ denokv_remote = "0.9.0"
denokv_sqlite = { default-features = false, version = "0.9.0" }
# exts
deno_broadcast_channel = { version = "0.180.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.118.0", path = "./ext/cache" }
deno_canvas = { version = "0.55.0", path = "./ext/canvas" }
deno_console = { version = "0.186.0", path = "./ext/console" }
deno_cron = { version = "0.66.0", path = "./ext/cron" }
deno_crypto = { version = "0.200.0", path = "./ext/crypto" }
deno_fetch = { version = "0.210.0", path = "./ext/fetch" }
deno_ffi = { version = "0.173.0", path = "./ext/ffi" }
deno_fs = { version = "0.96.0", path = "./ext/fs" }
deno_http = { version = "0.184.0", path = "./ext/http" }
deno_io = { version = "0.96.0", path = "./ext/io" }
deno_kv = { version = "0.94.0", path = "./ext/kv" }
deno_napi = { version = "0.117.0", path = "./ext/napi" }
deno_net = { version = "0.178.0", path = "./ext/net" }
deno_node = { version = "0.124.0", path = "./ext/node" }
deno_os = { version = "0.3.0", path = "./ext/os" }
deno_process = { version = "0.1.0", path = "./ext/process" }
deno_telemetry = { version = "0.8.0", path = "./ext/telemetry" }
deno_tls = { version = "0.173.0", path = "./ext/tls" }
deno_url = { version = "0.186.0", path = "./ext/url" }
deno_web = { version = "0.217.0", path = "./ext/web" }
deno_webgpu = { version = "0.153.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.186.0", path = "./ext/webidl" }
deno_websocket = { version = "0.191.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.181.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.179.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.117.0", path = "./ext/cache" }
deno_canvas = { version = "0.54.0", path = "./ext/canvas" }
deno_console = { version = "0.185.0", path = "./ext/console" }
deno_cron = { version = "0.65.0", path = "./ext/cron" }
deno_crypto = { version = "0.199.0", path = "./ext/crypto" }
deno_fetch = { version = "0.209.0", path = "./ext/fetch" }
deno_ffi = { version = "0.172.0", path = "./ext/ffi" }
deno_fs = { version = "0.95.0", path = "./ext/fs" }
deno_http = { version = "0.183.0", path = "./ext/http" }
deno_io = { version = "0.95.0", path = "./ext/io" }
deno_kv = { version = "0.93.0", path = "./ext/kv" }
deno_napi = { version = "0.116.0", path = "./ext/napi" }
deno_net = { version = "0.177.0", path = "./ext/net" }
deno_node = { version = "0.123.0", path = "./ext/node" }
deno_telemetry = { version = "0.7.0", path = "./ext/telemetry" }
deno_tls = { version = "0.172.0", path = "./ext/tls" }
deno_url = { version = "0.185.0", path = "./ext/url" }
deno_web = { version = "0.216.0", path = "./ext/web" }
deno_webgpu = { version = "0.152.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.185.0", path = "./ext/webidl" }
deno_websocket = { version = "0.190.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.180.0", path = "./ext/webstorage" }
# workspace libraries
deno_lib = { version = "0.2.0", path = "./cli/lib" }
deno_npm_cache = { version = "0.5.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.17.0", path = "./resolvers/deno" }
deno_snapshots = { version = "0.1.0", path = "./cli/snapshot" }
node_resolver = { version = "0.24.0", path = "./resolvers/node" }
# resolvers
deno_npm_cache = { version = "0.4.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.16.0", path = "./resolvers/deno" }
node_resolver = { version = "0.23.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"
@ -126,7 +119,7 @@ dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.1"
deno_cache_dir = "=0.16.0"
deno_error = "=0.5.5"
deno_error = "=0.5.3"
deno_package_json = { version = "0.4.0", default-features = false }
deno_unsync = "0.4.2"
dlopen2 = "0.6.1"
@ -157,7 +150,6 @@ ipnet = "2.3"
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3"
libc = "0.2.168"
libsui = "0.5.0"
libz-sys = { version = "1.1.20", default-features = false }
log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases

View file

@ -6,32 +6,6 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at:
https://github.com/denoland/deno_install
### 2.1.6 / 2025.01.16
- fix(check/lsp): correctly resolve compilerOptions.types (#27686)
- fix(check/lsp): fix bugs with tsc type resolution, allow npm packages to
augment `ImportMeta` (#27690)
- fix(compile): store embedded fs case sensitivity (#27653)
- fix(compile/windows): better handling of deno_dir on different drive letter
than code (#27654)
- fix(ext/console): change Temporal color (#27684)
- fix(ext/node): add `writev` method to `FileHandle` (#27563)
- fix(ext/node): add chown method to FileHandle class (#27638)
- fix(ext/node): apply `@npmcli/agent` workaround to `npm-check-updates`
(#27639)
- fix(ext/node): fix playwright http client (#27662)
- fix(ext/node): show bare-node-builtin hint when using an import map (#27632)
- fix(ext/node): use primordials in `ext/node/polyfills/_fs_common.ts` (#27589)
- fix(lsp): handle pathless untitled URIs (#27637)
- fix(lsp/check): don't resolve unknown media types to a `.js` extension
(#27631)
- fix(node): Prevent node:child_process from always inheriting the parent
environment (#27343) (#27340)
- fix(node/fs): add utimes method to the FileHandle class (#27582)
- fix(outdated): Use `latest` tag even when it's the same as the current version
(#27699)
- fix(outdated): retain strict semver specifier when updating (#27701)
### 2.1.5 / 2025.01.09
- feat(unstable): implement QUIC (#21942)

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.180.0"
version = "0.179.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "2.1.6"
version = "2.1.5"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -16,6 +16,11 @@ name = "deno"
path = "main.rs"
doc = false
[[bin]]
name = "denort"
path = "mainrt.rs"
doc = false
[[test]]
name = "integration"
path = "integration_tests_runner.rs"
@ -44,7 +49,7 @@ dhat-heap = ["dhat"]
upgrade = []
# A dev feature to disable creations and loading of snapshots in favor of
# loading JS sources at runtime.
hmr = ["deno_runtime/hmr", "deno_snapshots/disable"]
hmr = ["deno_runtime/hmr"]
# Vendor zlib as zlib-ng
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
@ -55,12 +60,10 @@ lazy-regex.workspace = true
serde.workspace = true
serde_json.workspace = true
zstd.workspace = true
glibc_version = "0.1.2"
flate2 = { workspace = true, features = ["default"] }
deno_error.workspace = true
[target.'cfg(unix)'.build-dependencies]
glibc_version = "0.1.2"
[target.'cfg(windows)'.build-dependencies]
winapi.workspace = true
winres.workspace = true
@ -73,7 +76,6 @@ deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"]
deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.87.0" }
deno_lib.workspace = true
deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm.workspace = true
@ -83,11 +85,10 @@ deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_snapshots = { workspace = true }
deno_task_shell = "=0.20.2"
deno_telemetry.workspace = true
deno_terminal.workspace = true
libsui.workspace = true
libsui = "0.5.0"
node_resolver.workspace = true
anstream = "0.6.14"
@ -113,6 +114,7 @@ dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.3"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
@ -153,6 +155,7 @@ rustyline-derive = "=0.7.0"
serde.workspace = true
serde_repr.workspace = true
sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] }
sqlformat = "=0.3.2"
strsim = "0.11.1"
@ -181,7 +184,6 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "
[target.'cfg(unix)'.dependencies]
nix.workspace = true
shell-escape = "=0.1.5"
[dev-dependencies]
deno_bench_util.workspace = true

View file

@ -31,9 +31,6 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_graph::GraphKind;
use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::version::DENO_VERSION_INFO;
use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::SysDescriptor;
@ -549,6 +546,15 @@ impl Default for TypeCheckMode {
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
// Info needed to run NPM lifecycle scripts
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct LifecycleScriptsConfig {
@ -576,6 +582,19 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
}
}
#[derive(
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct InternalFlags {
/// Used when the language server is configured with an
@ -1465,15 +1484,14 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) {
}
pub fn clap_root() -> Command {
debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION);
let long_version = format!(
"{} ({}, {}, {})\nv8 {}\ntypescript {}",
DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.release_channel.name(),
crate::version::DENO_VERSION_INFO.deno,
crate::version::DENO_VERSION_INFO.release_channel.name(),
env!("PROFILE"),
env!("TARGET"),
deno_core::v8::VERSION_STRING,
DENO_VERSION_INFO.typescript
crate::version::DENO_VERSION_INFO.typescript
);
run_args(Command::new("deno"), true)
@ -1489,7 +1507,7 @@ pub fn clap_root() -> Command {
)
.color(ColorChoice::Auto)
.term_width(800)
.version(DENO_VERSION_INFO.deno)
.version(crate::version::DENO_VERSION_INFO.deno)
.long_version(long_version)
.disable_version_flag(true)
.disable_help_flag(true)

View file

@ -61,13 +61,11 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum AtomicWriteFileWithRetriesError {
#[class(inherit)]
#[error(transparent)]
Changed(JsErrorBox),
#[class(inherit)]
#[error("Failed writing lockfile")]
Io(#[source] std::io::Error),
#[error("Failed writing lockfile")]
#[class(inherit)]
struct AtomicWriteFileWithRetriesError {
#[source]
source: std::io::Error,
}
impl CliLockfile {
@ -89,16 +87,12 @@ impl CliLockfile {
self.lockfile.lock().overwrite
}
pub fn write_if_changed(
&self,
) -> Result<(), AtomicWriteFileWithRetriesError> {
pub fn write_if_changed(&self) -> Result<(), JsErrorBox> {
if self.skip_write {
return Ok(());
}
self
.error_if_changed()
.map_err(AtomicWriteFileWithRetriesError::Changed)?;
self.error_if_changed()?;
let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else {
return Ok(()); // nothing to do
@ -111,7 +105,9 @@ impl CliLockfile {
&bytes,
cache::CACHE_PERM,
)
.map_err(AtomicWriteFileWithRetriesError::Io)?;
.map_err(|source| {
JsErrorBox::from_err(AtomicWriteFileWithRetriesError { source })
})?;
lockfile.has_content_changed = false;
Ok(())
}

View file

@ -10,6 +10,10 @@ mod package_json;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::net::SocketAddr;
use std::num::NonZeroUsize;
use std::path::Path;
@ -54,13 +58,6 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::GraphKind;
pub use deno_json::check_warn_tsconfig;
use deno_lib::args::has_flag_env_var;
use deno_lib::args::npm_pkg_req_ref_to_binary_command;
use deno_lib::args::CaData;
use deno_lib::args::NpmProcessStateKind;
use deno_lib::args::NPM_PROCESS_STATE;
use deno_lib::version::DENO_VERSION_INFO;
use deno_lib::worker::StorageKeyResolver;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
@ -68,20 +65,27 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::rustls_pemfile;
use deno_runtime::deno_tls::webpki_roots;
use deno_runtime::inspector_server::InspectorServer;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::StackString;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelRuntimeConfig;
use deno_terminal::colors;
use dotenvy::from_filename;
pub use flags::*;
use import_map::resolve_import_map_value_from_specifier;
pub use lockfile::AtomicWriteFileWithRetriesError;
pub use lockfile::CliLockfile;
pub use lockfile::CliLockfileReadFromPathOptions;
use once_cell::sync::Lazy;
pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::EnvHomeDir;
use thiserror::Error;
@ -89,6 +93,7 @@ use crate::cache::DenoDirProvider;
use crate::file_fetcher::CliFileFetcher;
use crate::sys::CliSys;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version;
pub fn npm_registry_url() -> &'static Url {
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
@ -600,6 +605,147 @@ pub fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
})
}
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
)]
UnknownStore(String),
#[error("Unable to add pem file to certificate store: {0}")]
FailedAddPemFile(String),
#[error("Failed opening CA file: {0}")]
CaFileOpenError(String),
}
/// Create and populate a root cert store based on the passed options and
/// environment.
pub fn get_root_cert_store(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Result<RootCertStore, RootCertStoreLoadError> {
let mut root_cert_store = RootCertStore::empty();
let ca_stores: Vec<String> = maybe_ca_stores
.or_else(|| {
let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?;
Some(
env_ca_store
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
})
.unwrap_or_else(|| vec!["mozilla".to_string()]);
for store in ca_stores.iter() {
match store.as_str() {
"mozilla" => {
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
}
"system" => {
let roots = load_native_certs().expect("could not load platform certs");
for root in roots {
if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
{
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
}
}
_ => {
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
}
}
}
let ca_data =
maybe_ca_data.or_else(|| env::var("DENO_CERT").ok().map(CaData::File));
if let Some(ca_data) = ca_data {
let result = match ca_data {
CaData::File(ca_file) => {
let ca_file = if let Some(root) = &maybe_root_path {
root.join(&ca_file)
} else {
PathBuf::from(ca_file)
};
let certfile = std::fs::File::open(ca_file).map_err(|err| {
RootCertStoreLoadError::CaFileOpenError(err.to_string())
})?;
let mut reader = BufReader::new(certfile);
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
CaData::Bytes(data) => {
let mut reader = BufReader::new(Cursor::new(data));
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
};
match result {
Ok(certs) => {
root_cert_store.add_parsable_certificates(certs);
}
Err(e) => {
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
}
}
}
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
/// Overrides for the options below that when set will
/// use these values over the values derived from the
/// CLI flags or config file.
@ -1081,16 +1227,6 @@ impl CliOptions {
}
}
pub fn resolve_storage_key_resolver(&self) -> StorageKeyResolver {
if let Some(location) = &self.flags.location {
StorageKeyResolver::from_flag(location)
} else if let Some(deno_json) = self.start_dir.maybe_deno_json() {
StorageKeyResolver::from_config_file_url(&deno_json.specifier)
} else {
StorageKeyResolver::new_use_main_module()
}
}
// If the main module should be treated as being in an npm package.
// This is triggered via a secret environment variable which is used
// for functionality like child_process.fork. Users should NOT depend
@ -1145,7 +1281,7 @@ impl CliOptions {
Ok(Some(InspectorServer::new(
host,
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
)?))
}
@ -1839,11 +1975,72 @@ fn resolve_import_map_specifier(
}
}
pub struct StorageKeyResolver(Option<Option<String>>);
impl StorageKeyResolver {
pub fn from_options(options: &CliOptions) -> Self {
Self(if let Some(location) = &options.flags.location {
// if a location is set, then the ascii serialization of the location is
// used, unless the origin is opaque, and then no storage origin is set, as
// we can't expect the origin to be reproducible
let storage_origin = location.origin();
if storage_origin.is_tuple() {
Some(Some(storage_origin.ascii_serialization()))
} else {
Some(None)
}
} else {
// otherwise we will use the path to the config file or None to
// fall back to using the main module's path
options
.start_dir
.maybe_deno_json()
.map(|config_file| Some(config_file.specifier.to_string()))
})
}
/// Creates a storage key resolver that will always resolve to being empty.
pub fn empty() -> Self {
Self(Some(None))
}
/// Resolves the storage key to use based on the current flags, config, or main module.
pub fn resolve_storage_key(
&self,
main_module: &ModuleSpecifier,
) -> Option<String> {
// use the stored value or fall back to using the path of the main module.
if let Some(maybe_value) = &self.0 {
maybe_value.clone()
} else {
Some(main_module.to_string())
}
}
}
/// Resolves the no_prompt value based on the cli flags and environment.
pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
}
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool {
let value = env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
}
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn config_to_deno_graph_workspace_member(
config: &ConfigFile,
) -> Result<deno_graph::WorkspaceMember, AnyError> {
@ -1904,6 +2101,13 @@ pub enum NpmCachingStrategy {
Manual,
}
pub fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
@ -1956,6 +2160,27 @@ mod test {
assert_eq!(actual, None);
}
#[test]
fn storage_key_resolver_test() {
let resolver = StorageKeyResolver(None);
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
assert_eq!(
resolver.resolve_storage_key(&specifier),
Some(specifier.to_string())
);
let resolver = StorageKeyResolver(Some(None));
assert_eq!(resolver.resolve_storage_key(&specifier), None);
let resolver = StorageKeyResolver(Some(Some("value".to_string())));
assert_eq!(
resolver.resolve_storage_key(&specifier),
Some("value".to_string())
);
// test empty
let resolver = StorageKeyResolver::empty();
assert_eq!(resolver.resolve_storage_key(&specifier), None);
}
#[test]
fn jsr_urls() {
let reg_url = jsr_url();

View file

@ -5,6 +5,7 @@ use std::path::PathBuf;
use deno_core::snapshot::*;
use deno_runtime::*;
mod shared;
mod ts {
use std::collections::HashMap;
@ -309,6 +310,57 @@ mod ts {
println!("cargo:rerun-if-changed={}", path.display());
}
}
pub(crate) fn version() -> String {
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
let version_text = " version = \"";
for line in file_text.lines() {
if let Some(index) = line.find(version_text) {
let remaining_line = &line[index + version_text.len()..];
return remaining_line[..remaining_line.find('"').unwrap()].to_string();
}
}
panic!("Could not find ts version.")
}
}
#[cfg(not(feature = "hmr"))]
fn create_cli_snapshot(snapshot_path: PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions;
let snapshot_options = SnapshotOptions {
ts_version: ts::version(),
v8_version: deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(),
};
deno_runtime::snapshot::create_runtime_snapshot(
snapshot_path,
snapshot_options,
vec![],
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
}
fn main() {
@ -318,7 +370,7 @@ fn main() {
}
deno_napi::print_linker_flags("deno");
deno_webgpu::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
// Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap();
@ -337,15 +389,51 @@ fn main() {
}
println!("cargo:rerun-if-env-changed=DENO_CANARY");
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash());
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&git_commit_hash()[..7]
);
let ts_version = ts::version();
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
println!("cargo:rerun-if-env-changed=TS_VERSION");
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
}
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
#[cfg(not(feature = "hmr"))]
{
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
create_cli_snapshot(cli_snapshot_path);
}
#[cfg(target_os = "windows")]
{
let mut res = winres::WindowsResource::new();

View file

@ -9,13 +9,14 @@ use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::unsync::spawn_blocking;
use deno_lib::util::hash::FastInsecureHasher;
use deno_runtime::deno_webstorage::rusqlite;
use deno_runtime::deno_webstorage::rusqlite::Connection;
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
use deno_runtime::deno_webstorage::rusqlite::Params;
use once_cell::sync::OnceCell;
use super::FastInsecureHasher;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CacheDBHash(u64);

11
cli/cache/caches.rs vendored
View file

@ -3,18 +3,17 @@
use std::path::PathBuf;
use std::sync::Arc;
use deno_lib::version::DENO_VERSION_INFO;
use once_cell::sync::OnceCell;
use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration;
use super::check::TYPE_CHECK_CACHE_DB;
use super::code_cache::CODE_CACHE_DB;
use super::deno_dir::DenoDirProvider;
use super::fast_check::FAST_CHECK_CACHE_DB;
use super::incremental::INCREMENTAL_CACHE_DB;
use super::module_info::MODULE_INFO_CACHE_DB;
use super::node::NODE_ANALYSIS_CACHE_DB;
use crate::cache::DenoDirProvider;
pub struct Caches {
dir_provider: Arc<DenoDirProvider>,
@ -49,9 +48,13 @@ impl Caches {
cell
.get_or_init(|| {
if let Some(path) = path {
CacheDB::from_path(config, path, DENO_VERSION_INFO.deno)
CacheDB::from_path(
config,
path,
crate::version::DENO_VERSION_INFO.deno,
)
} else {
CacheDB::in_memory(config, DENO_VERSION_INFO.deno)
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
}
})
.clone()

View file

@ -1,5 +1,7 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_runtime::code_cache;
@ -9,6 +11,7 @@ use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash;
use super::cache_db::CacheFailure;
use crate::worker::CliCodeCache;
pub static CODE_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: concat!(
@ -82,6 +85,12 @@ impl CodeCache {
}
}
impl CliCodeCache for CodeCache {
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
self
}
}
impl code_cache::CodeCache for CodeCache {
fn get_sync(
&self,

View file

@ -4,6 +4,7 @@ use std::env;
use std::path::PathBuf;
use deno_cache_dir::DenoDirResolutionError;
use once_cell::sync::OnceCell;
use super::DiskCache;
use crate::sys::CliSys;
@ -13,7 +14,7 @@ use crate::sys::CliSys;
pub struct DenoDirProvider {
sys: CliSys,
maybe_custom_root: Option<PathBuf>,
deno_dir: std::sync::OnceLock<Result<DenoDir, DenoDirResolutionError>>,
deno_dir: OnceCell<Result<DenoDir, DenoDirResolutionError>>,
}
impl DenoDirProvider {

View file

@ -9,11 +9,11 @@ use std::path::Prefix;
use std::str;
use deno_cache_dir::url_to_filename;
use deno_cache_dir::CACHE_PERM;
use deno_core::url::Host;
use deno_core::url::Url;
use deno_path_util::fs::atomic_write_file_with_retries;
use super::CACHE_PERM;
use crate::sys::CliSys;
#[derive(Debug, Clone)]
@ -130,9 +130,6 @@ impl DiskCache {
#[cfg(test)]
mod tests {
// ok, testing
#[allow(clippy::disallowed_types)]
use sys_traits::impls::RealSys;
use test_util::TempDir;
use super::*;
@ -141,7 +138,7 @@ mod tests {
fn test_set_get_cache_file() {
let temp_dir = TempDir::new();
let sub_dir = temp_dir.path().join("sub_dir");
let cache = DiskCache::new(RealSys, &sub_dir.to_path_buf());
let cache = DiskCache::new(CliSys::default(), &sub_dir.to_path_buf());
let path = PathBuf::from("foo/bar.txt");
cache.set(&path, b"hello").unwrap();
assert_eq!(cache.get(&path).unwrap(), b"hello");
@ -155,7 +152,7 @@ mod tests {
PathBuf::from("/deno_dir/")
};
let cache = DiskCache::new(RealSys, &cache_location);
let cache = DiskCache::new(CliSys::default(), &cache_location);
let mut test_cases = vec![
(
@ -211,7 +208,7 @@ mod tests {
} else {
"/foo"
};
let cache = DiskCache::new(RealSys, &PathBuf::from(p));
let cache = DiskCache::new(CliSys::default(), &PathBuf::from(p));
let mut test_cases = vec![
(
@ -259,7 +256,7 @@ mod tests {
PathBuf::from("/deno_dir/")
};
let cache = DiskCache::new(RealSys, &cache_location);
let cache = DiskCache::new(CliSys::default(), &cache_location);
let mut test_cases = vec!["unknown://localhost/test.ts"];

5
cli/cache/emit.rs vendored
View file

@ -6,7 +6,6 @@ use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::unsync::sync::AtomicFlag;
use deno_lib::version::DENO_VERSION_INFO;
use super::DiskCache;
@ -24,7 +23,7 @@ impl EmitCache {
disk_cache,
emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer {
cli_version: DENO_VERSION_INFO.deno,
cli_version: crate::version::DENO_VERSION_INFO.deno,
},
}
}
@ -148,7 +147,7 @@ impl EmitFileSerializer {
// it's ok to use an insecure hash here because
// if someone can change the emit source then they
// can also change the version hash
deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
.write(bytes)
// emit should not be re-used between cli versions
.write_str(self.cli_version)

7
cli/cache/mod.rs vendored
View file

@ -15,7 +15,6 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse;
use deno_graph::source::Loader;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
@ -30,6 +29,7 @@ mod cache_db;
mod caches;
mod check;
mod code_cache;
mod common;
mod deno_dir;
mod disk_cache;
mod emit;
@ -43,6 +43,7 @@ pub use cache_db::CacheDBHash;
pub use caches::Caches;
pub use check::TypeCheckCache;
pub use code_cache::CodeCache;
pub use common::FastInsecureHasher;
/// Permissions used to save a file in the disk caches.
pub use deno_cache_dir::CACHE_PERM;
pub use deno_dir::DenoDir;
@ -75,7 +76,7 @@ pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
file_fetcher: Arc<CliFileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer,
sys: CliSys,
@ -87,7 +88,7 @@ impl FetchCacher {
pub fn new(
file_fetcher: Arc<CliFileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
module_info_cache: Arc<ModuleInfoCache>,
sys: CliSys,
options: FetchCacherOptions,

View file

@ -20,15 +20,15 @@ use deno_error::JsErrorBox;
use deno_graph::MediaType;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_lib::util::hash::FastInsecureHasher;
use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::resolver::CliCjsTracker;
use crate::resolver::CjsTracker;
#[derive(Debug)]
pub struct Emitter {
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options:
@ -39,7 +39,7 @@ pub struct Emitter {
impl Emitter {
pub fn new(
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions,
@ -112,9 +112,9 @@ impl Emitter {
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
module_kind: ModuleKind,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>,
) -> Result<String, EmitParsedSourceHelperError> {
) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, module_kind, source) {

View file

@ -11,20 +11,11 @@ use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
use deno_error::JsErrorBox;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::CaData;
use deno_lib::loader::NpmModuleLoader;
use deno_lib::npm::create_npm_process_state_provider;
use deno_lib::npm::NpmRegistryReadPermissionChecker;
use deno_lib::npm::NpmRegistryReadPermissionCheckerMode;
use deno_lib::worker::LibMainWorkerFactory;
use deno_lib::worker::LibMainWorkerOptions;
use deno_npm_cache::NpmCacheSetting;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::DenoResolverOptions;
@ -41,13 +32,17 @@ use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use log::warn;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use once_cell::sync::OnceCell;
use crate::args::check_warn_tsconfig;
use crate::args::get_root_cert_store;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver;
use crate::args::TsConfigType;
use crate::cache::Caches;
use crate::cache::CodeCache;
@ -73,6 +68,7 @@ use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_cli_npm_resolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::CliByonmNpmResolverCreateOptions;
@ -84,14 +80,17 @@ use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CliNpmTarballCache;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::npm::NpmRegistryReadPermissionCheckerMode;
use crate::npm::NpmResolutionInitializer;
use crate::resolver::CliCjsTracker;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::FoundPackageJsonDepFlag;
use crate::resolver::NpmModuleLoader;
use crate::standalone::binary::DenoCompileBinaryWriter;
use crate::sys::CliSys;
use crate::tools::check::TypeChecker;
@ -191,7 +190,7 @@ impl<T> Deferred<T> {
struct CliFactoryServices {
blob_store: Deferred<Arc<BlobStore>>,
caches: Deferred<Arc<Caches>>,
cjs_tracker: Deferred<Arc<CliCjsTracker>>,
cjs_tracker: Deferred<Arc<CjsTracker>>,
cli_options: Deferred<Arc<CliOptions>>,
code_cache: Deferred<Arc<CodeCache>>,
deno_resolver: Deferred<Arc<CliDenoResolver>>,
@ -204,7 +203,7 @@ struct CliFactoryServices {
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>,
http_client_provider: Deferred<Arc<HttpClientProvider>>,
in_npm_pkg_checker: Deferred<DenoInNpmPackageChecker>,
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
@ -224,7 +223,7 @@ struct CliFactoryServices {
npm_resolution: Arc<NpmResolutionCell>,
npm_resolution_initializer: Deferred<Arc<NpmResolutionInitializer>>,
npm_resolution_installer: Deferred<Arc<NpmResolutionInstaller>>,
npm_resolver: Deferred<CliNpmResolver>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
npm_tarball_cache: Deferred<Arc<CliNpmTarballCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser:
@ -400,7 +399,7 @@ impl CliFactory {
pub fn in_npm_pkg_checker(
&self,
) -> Result<&DenoInNpmPackageChecker, AnyError> {
) -> Result<&Arc<dyn InNpmPackageChecker>, AnyError> {
self.services.in_npm_pkg_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?;
let options = if cli_options.use_byonm() {
@ -415,7 +414,7 @@ impl CliFactory {
},
)
};
Ok(DenoInNpmPackageChecker::new(options))
Ok(deno_resolver::npm::create_in_npm_pkg_checker(options))
})
}
@ -560,14 +559,16 @@ impl CliFactory {
})
}
pub async fn npm_resolver(&self) -> Result<&CliNpmResolver, AnyError> {
pub async fn npm_resolver(
&self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
self
.services
.npm_resolver
.get_or_try_init_async(
async {
let cli_options = self.cli_options()?;
Ok(CliNpmResolver::new(if cli_options.use_byonm() {
Ok(create_cli_npm_resolver(if cli_options.use_byonm() {
CliNpmResolverCreateOptions::Byonm(
CliByonmNpmResolverCreateOptions {
sys: self.sys(),
@ -795,7 +796,11 @@ impl CliFactory {
Ok(Arc::new(CliNodeResolver::new(
self.in_npm_pkg_checker()?.clone(),
RealIsBuiltInNodeModuleChecker,
self.npm_resolver().await?.clone(),
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
self.sys(),
node_resolver::ConditionsFromResolutionMode::default(),
@ -813,14 +818,26 @@ impl CliFactory {
.services
.node_code_translator
.get_or_try_init_async(async {
let caches = self.caches()?;
let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db());
let node_resolver = self.node_resolver().await?.clone();
let cjs_code_analyzer = self.create_cjs_code_analyzer()?;
let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache,
self.cjs_tracker()?.clone(),
self.fs().clone(),
Some(self.parsed_source_cache().clone()),
);
Ok(Arc::new(NodeCodeTranslator::new(
cjs_code_analyzer,
cjs_esm_analyzer,
self.in_npm_pkg_checker()?.clone(),
node_resolver,
self.npm_resolver().await?.clone(),
self
.npm_resolver()
.await?
.clone()
.into_npm_pkg_folder_resolver(),
self.pkg_json_resolver().clone(),
self.sys(),
)))
@ -828,17 +845,6 @@ impl CliFactory {
.await
}
fn create_cjs_code_analyzer(&self) -> Result<CliCjsCodeAnalyzer, AnyError> {
let caches = self.caches()?;
let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db());
Ok(CliCjsCodeAnalyzer::new(
node_analysis_cache,
self.cjs_tracker()?.clone(),
self.fs().clone(),
Some(self.parsed_source_cache().clone()),
))
}
pub async fn npm_req_resolver(
&self,
) -> Result<&Arc<CliNpmReqResolver>, AnyError> {
@ -851,7 +857,7 @@ impl CliFactory {
sys: self.sys(),
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
node_resolver: self.node_resolver().await?.clone(),
npm_resolver: npm_resolver.clone(),
npm_resolver: npm_resolver.clone().into_byonm_or_managed(),
})))
})
.await
@ -982,10 +988,10 @@ impl CliFactory {
.await
}
pub fn cjs_tracker(&self) -> Result<&Arc<CliCjsTracker>, AnyError> {
pub fn cjs_tracker(&self) -> Result<&Arc<CjsTracker>, AnyError> {
self.services.cjs_tracker.get_or_try_init(|| {
let options = self.cli_options()?;
Ok(Arc::new(CliCjsTracker::new(
Ok(Arc::new(CjsTracker::new(
self.in_npm_pkg_checker()?.clone(),
self.pkg_json_resolver().clone(),
if options.is_node_main() || options.unstable_detect_cjs() {
@ -1028,13 +1034,13 @@ impl CliFactory {
) -> Result<DenoCompileBinaryWriter, AnyError> {
let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new(
self.create_cjs_code_analyzer()?,
self.cjs_tracker()?,
self.cli_options()?,
self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?,
self.http_client_provider(),
self.npm_resolver().await?,
self.npm_resolver().await?.as_ref(),
self.workspace_resolver().await?.as_ref(),
cli_options.npm_system_info(),
))
@ -1088,34 +1094,7 @@ impl CliFactory {
Arc::new(NpmRegistryReadPermissionChecker::new(self.sys(), mode))
};
let module_loader_factory = CliModuleLoaderFactory::new(
cli_options,
cjs_tracker,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
self.emitter()?.clone(),
in_npm_pkg_checker.clone(),
self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(),
node_code_translator.clone(),
node_resolver.clone(),
NpmModuleLoader::new(
self.cjs_tracker()?.clone(),
node_code_translator.clone(),
self.sys(),
),
npm_registry_permission_checker,
npm_req_resolver.clone(),
cli_npm_resolver.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.sys(),
);
let lib_main_worker_factory = LibMainWorkerFactory::new(
Ok(CliMainWorkerFactory::new(
self.blob_store().clone(),
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
@ -1124,67 +1103,50 @@ impl CliFactory {
},
self.feature_checker()?.clone(),
fs.clone(),
self.maybe_inspector_server()?.clone(),
Box::new(module_loader_factory),
node_resolver.clone(),
create_npm_process_state_provider(npm_resolver),
pkg_json_resolver,
self.root_cert_store_provider().clone(),
cli_options.resolve_storage_key_resolver(),
self.sys(),
self.create_lib_main_worker_options()?,
);
Ok(CliMainWorkerFactory::new(
lib_main_worker_factory,
maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(),
cli_options.maybe_lockfile().cloned(),
Box::new(CliModuleLoaderFactory::new(
cli_options,
cjs_tracker,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
},
self.emitter()?.clone(),
in_npm_pkg_checker.clone(),
self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(),
node_code_translator.clone(),
node_resolver.clone(),
NpmModuleLoader::new(
self.cjs_tracker()?.clone(),
fs.clone(),
node_code_translator.clone(),
),
npm_registry_permission_checker,
npm_req_resolver.clone(),
cli_npm_resolver.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.sys(),
)),
node_resolver.clone(),
self.npm_installer_if_managed()?.cloned(),
npm_resolver.clone(),
self.sys(),
self.create_cli_main_worker_options()?,
pkg_json_resolver,
self.root_cert_store_provider().clone(),
self.root_permissions_container()?.clone(),
StorageKeyResolver::from_options(cli_options),
self.sys(),
cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?,
self.cli_options()?.otel_config(),
self.cli_options()?.default_npm_caching_strategy(),
))
}
fn create_lib_main_worker_options(
&self,
) -> Result<LibMainWorkerOptions, AnyError> {
let cli_options = self.cli_options()?;
Ok(LibMainWorkerOptions {
argv: cli_options.argv().clone(),
// This optimization is only available for "run" subcommand
// because we need to register new ops for testing and jupyter
// integration.
skip_op_registration: cli_options.sub_command().is_run(),
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
enable_testing_features: cli_options.enable_testing_features(),
has_node_modules_dir: cli_options.has_node_modules_dir(),
inspect_brk: cli_options.inspect_brk().is_some(),
inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(),
location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno
argv0: cli_options
.take_binary_npm_command_name()
.or(std::env::args().next()),
node_debug: std::env::var("NODE_DEBUG").ok(),
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
seed: cli_options.seed(),
unsafely_ignore_certificate_errors: cli_options
.unsafely_ignore_certificate_errors()
.clone(),
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
otel_config: self.cli_options()?.otel_config(),
startup_snapshot: crate::js::deno_isolate_init(),
})
}
fn create_cli_main_worker_options(
&self,
) -> Result<CliMainWorkerOptions, AnyError> {
@ -1216,10 +1178,37 @@ impl CliFactory {
};
Ok(CliMainWorkerOptions {
needs_test_modules: cli_options.sub_command().needs_test(),
argv: cli_options.argv().clone(),
// This optimization is only available for "run" subcommand
// because we need to register new ops for testing and jupyter
// integration.
skip_op_registration: cli_options.sub_command().is_run(),
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
enable_testing_features: cli_options.enable_testing_features(),
has_node_modules_dir: cli_options.has_node_modules_dir(),
hmr: cli_options.has_hmr(),
inspect_brk: cli_options.inspect_brk().is_some(),
inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(),
location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno
argv0: cli_options
.take_binary_npm_command_name()
.or(std::env::args().next()),
node_debug: std::env::var("NODE_DEBUG").ok(),
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
seed: cli_options.seed(),
unsafely_ignore_certificate_errors: cli_options
.unsafely_ignore_certificate_errors()
.clone(),
create_hmr_runner,
create_coverage_collector,
default_npm_caching_strategy: cli_options.default_npm_caching_strategy(),
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
})
}
}

View file

@ -30,7 +30,6 @@ use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::WorkspaceFastCheckOption;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node;
@ -38,6 +37,7 @@ use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::SmallStackString;
use node_resolver::InNpmPackageChecker;
use crate::args::config_to_deno_graph_workspace_member;
use crate::args::jsr_url;
@ -55,7 +55,7 @@ use crate::file_fetcher::CliFileFetcher;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
@ -493,17 +493,17 @@ pub enum BuildGraphWithNpmResolutionError {
pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<CliFileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
@ -514,17 +514,17 @@ impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)]
pub fn new(
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<CliFileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
@ -712,7 +712,8 @@ impl ModuleGraphBuilder {
let initial_package_deps_len = graph.packages.package_deps_sum();
let initial_package_mappings_len = graph.packages.mappings().len();
if roots.iter().any(|r| r.scheme() == "npm") && self.npm_resolver.is_byonm()
if roots.iter().any(|r| r.scheme() == "npm")
&& self.npm_resolver.as_byonm().is_some()
{
return Err(BuildGraphWithNpmResolutionError::UnsupportedNpmSpecifierEntrypointResolutionWay);
}
@ -1225,7 +1226,7 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
#[derive(Debug)]
struct CliGraphResolver<'a> {
cjs_tracker: &'a CliCjsTracker,
cjs_tracker: &'a CjsTracker,
resolver: &'a CliResolver,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}

View file

@ -14,7 +14,6 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions;
@ -29,6 +28,7 @@ use http_body_util::BodyExt;
use thiserror::Error;
use crate::util::progress_bar::UpdateGuard;
use crate::version;
#[derive(Debug, Error)]
pub enum SendError {
@ -79,7 +79,7 @@ impl HttpClientProvider {
Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())),
Entry::Vacant(entry) => {
let client = create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
root_cert_store: match &self.root_cert_store_provider {
Some(provider) => Some(provider.get_or_try_init()?.clone()),
@ -481,7 +481,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path().join("tls/RootCA.pem"),
@ -525,7 +525,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions::default(),
)
.unwrap(),
@ -566,7 +566,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
root_cert_store: Some(root_cert_store),
..Default::default()
@ -587,7 +587,7 @@ mod test {
.unwrap();
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()
@ -620,7 +620,7 @@ mod test {
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()
@ -661,7 +661,7 @@ mod test {
.unwrap();
let client = HttpClient::new(
create_http_client(
DENO_VERSION_INFO.user_agent,
version::DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()

View file

@ -1,5 +1,18 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn main() {
// this file exists to cause the executable to be built when running cargo test
let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"];
if !cfg!(debug_assertions) {
args.push("--release");
}
args.push("--");
// If any args were passed to this process, pass them through to the child
let orig_args = std::env::args().skip(1).collect::<Vec<_>>();
let orig_args: Vec<&str> =
orig_args.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
args.extend(orig_args);
test_util::spawn::exec_replace("cargo", &args).unwrap();
}

View file

@ -2,7 +2,18 @@
use log::debug;
#[cfg(not(feature = "hmr"))]
static CLI_SNAPSHOT: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
pub fn deno_isolate_init() -> Option<&'static [u8]> {
debug!("Deno isolate init with snapshots.");
deno_snapshots::CLI_SNAPSHOT
#[cfg(not(feature = "hmr"))]
{
Some(CLI_SNAPSHOT)
}
#[cfg(feature = "hmr")]
{
None
}
}

View file

@ -8,7 +8,7 @@ import {
restorePermissions,
} from "ext:cli/40_test_common.js";
import { Console } from "ext:deno_console/01_console.js";
import { setExitHandler } from "ext:deno_os/30_os.js";
import { setExitHandler } from "ext:runtime/30_os.js";
const {
op_register_bench,
op_bench_get_origin,

View file

@ -26,7 +26,7 @@ const {
TypeError,
} = primordials;
import { setExitHandler } from "ext:deno_os/30_os.js";
import { setExitHandler } from "ext:runtime/30_os.js";
// Capture `Deno` global so that users deleting or mangling it, won't
// have impact on our sanitizers.

View file

@ -1,46 +0,0 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_lib"
version = "0.2.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Shared code between the Deno CLI and denort"
[lib]
path = "lib.rs"
[dependencies]
capacity_builder.workspace = true
deno_config.workspace = true
deno_error.workspace = true
deno_fs = { workspace = true, features = ["sync_fs"] }
deno_media_type.workspace = true
deno_node = { workspace = true, features = ["sync_fs"] }
deno_npm.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime.workspace = true
deno_semver.workspace = true
deno_terminal.workspace = true
env_logger = "=0.10.0"
faster-hex.workspace = true
indexmap.workspace = true
libsui.workspace = true
log.workspace = true
node_resolver = { workspace = true, features = ["sync"] }
parking_lot.workspace = true
ring.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sys_traits = { workspace = true, features = ["getrandom"] }
thiserror.workspace = true
tokio.workspace = true
twox-hash.workspace = true
url.workspace = true
[dev-dependencies]
test_util.workspace = true

View file

@ -1,4 +0,0 @@
# deno_lib
This crate contains the shared code between the Deno CLI and denort. It is
highly unstable.

View file

@ -1,199 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::path::PathBuf;
use std::sync::LazyLock;
use deno_runtime::colors;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::rustls_pemfile;
use deno_runtime::deno_tls::webpki_roots;
use deno_semver::npm::NpmPackageReqReference;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool {
let value = std::env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
)]
UnknownStore(String),
#[error("Unable to add pem file to certificate store: {0}")]
FailedAddPemFile(String),
#[error("Failed opening CA file: {0}")]
CaFileOpenError(String),
}
/// Create and populate a root cert store based on the passed options and
/// environment.
pub fn get_root_cert_store(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Result<RootCertStore, RootCertStoreLoadError> {
let mut root_cert_store = RootCertStore::empty();
let ca_stores: Vec<String> = maybe_ca_stores
.or_else(|| {
let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?;
Some(
env_ca_store
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
})
.unwrap_or_else(|| vec!["mozilla".to_string()]);
for store in ca_stores.iter() {
match store.as_str() {
"mozilla" => {
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
}
"system" => {
let roots = load_native_certs().expect("could not load platform certs");
for root in roots {
if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
{
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
}
}
_ => {
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
}
}
}
let ca_data =
maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File));
if let Some(ca_data) = ca_data {
let result = match ca_data {
CaData::File(ca_file) => {
let ca_file = if let Some(root) = &maybe_root_path {
root.join(&ca_file)
} else {
PathBuf::from(ca_file)
};
let certfile = std::fs::File::open(ca_file).map_err(|err| {
RootCertStoreLoadError::CaFileOpenError(err.to_string())
})?;
let mut reader = BufReader::new(certfile);
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
CaData::Bytes(data) => {
let mut reader = BufReader::new(Cursor::new(data));
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
};
match result {
Ok(certs) => {
root_cert_store.add_parsable_certificates(certs);
}
Err(e) => {
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
}
}
}
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
LazyLock::new(|| {
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}

View file

@ -1,42 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
fn main() {
// todo(dsherret): remove this after Deno 2.2.0 is published and then
// align the version of this crate with Deno then. We need to wait because
// there was previously a deno_lib 2.2.0 published (https://crates.io/crates/deno_lib/versions)
let version_path = std::path::Path::new(".").join("version.txt");
println!("cargo:rerun-if-changed={}", version_path.display());
#[allow(clippy::disallowed_methods)]
let text = std::fs::read_to_string(version_path).unwrap();
println!("cargo:rustc-env=DENO_VERSION={}", text);
let commit_hash = git_commit_hash();
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash);
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&commit_hash[..7]
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
}

View file

@ -1,48 +0,0 @@
disallowed-methods = [
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" },
]

View file

@ -1,11 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod args;
pub mod loader;
pub mod npm;
pub mod shared;
pub mod standalone;
pub mod sys;
pub mod util;
pub mod version;
pub mod worker;

View file

@ -1,213 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use deno_media_type::MediaType;
use deno_resolver::cjs::CjsTracker;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_core::ModuleSourceCode;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NpmPackageFolderResolver;
use thiserror::Error;
use url::Url;
use crate::sys::DenoLibSys;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
pub found_url: Url,
pub media_type: MediaType,
}
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
pub specifier: Url,
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum NpmModuleLoadError {
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
NotSupportedKindInNpm(#[from] NotSupportedKindInNpmError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
#[class(inherit)]
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
UnableToLoad {
file_path: PathBuf,
maybe_referrer: Option<Url>,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(
"{}",
format_dir_import_message(file_path, maybe_referrer, suggestion)
)]
DirImport {
file_path: PathBuf,
maybe_referrer: Option<Url>,
suggestion: Option<&'static str>,
#[source]
#[inherit]
source: std::io::Error,
},
}
fn format_dir_import_message(
file_path: &std::path::Path,
maybe_referrer: &Option<Url>,
suggestion: &Option<&'static str>,
) -> String {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
if let Some(entrypoint_name) = suggestion {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
}
msg
}
#[derive(Clone)]
pub struct NpmModuleLoader<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
> {
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
sys: TSys,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
>
NpmModuleLoader<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
sys: TSys,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
sys,
}
}
pub async fn load(
&self,
specifier: &Url,
maybe_referrer: Option<&Url>,
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
let file_path = deno_path_util::url_to_file_path(specifier)?;
let code = self.sys.fs_read(&file_path).map_err(|source| {
if self.sys.fs_is_dir_no_err(&file_path) {
let suggestion = ["index.mjs", "index.js", "index.cjs"]
.into_iter()
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
NpmModuleLoadError::DirImport {
file_path,
maybe_referrer: maybe_referrer.cloned(),
suggestion,
source,
}
} else {
NpmModuleLoadError::UnableToLoad {
file_path,
maybe_referrer: maybe_referrer.cloned(),
source,
}
}
})?;
let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(NpmModuleLoadError::NotSupportedKindInNpm(
NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
},
));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
code,
found_url: specifier.clone(),
media_type: MediaType::from_specifier(specifier),
})
}
}

View file

@ -1,80 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod permission_checker;
use std::path::Path;
use std::sync::Arc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_resolver::npm::NpmResolver;
use deno_runtime::deno_process::NpmProcessStateProvider;
use deno_runtime::deno_process::NpmProcessStateProviderRc;
pub use permission_checker::NpmRegistryReadPermissionChecker;
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::DenoLibSys;
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
npm_resolver: &NpmResolver<TSys>,
) -> NpmProcessStateProviderRc {
match npm_resolver {
NpmResolver::Byonm(byonm_npm_resolver) => {
Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone()))
}
NpmResolver::Managed(managed_npm_resolver) => {
Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone()))
}
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
#[derive(Debug)]
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
pub ManagedNpmResolverRc<TSys>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ManagedNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.0.resolution().serialized_valid_snapshot(),
self.0.root_node_modules_path(),
)
}
}
#[derive(Debug)]
pub struct ByonmNpmProcessStateProvider<TSys: DenoLibSys>(
pub Arc<ByonmNpmResolver<TSys>>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ByonmNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}

View file

@ -1,389 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use deno_config::workspace::PackageJsonDepResolution;
use deno_media_type::MediaType;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_semver::Version;
use indexmap::IndexMap;
use node_resolver::analyze::CjsAnalysisExports;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use super::virtual_fs::FileSystemCaseSensitivity;
use crate::args::UnstableConfig;
pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
pub trait DenoRtDeserializable<'a>: Sized {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)>;
}
impl<'a> DenoRtDeserializable<'a> for Cow<'a, [u8]> {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, data) = read_bytes_with_u32_len(input)?;
Ok((input, Cow::Borrowed(data)))
}
}
pub trait DenoRtSerializable<'a> {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
);
}
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
/// Relative path for the node_modules directory in the vfs.
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: Option<String>,
},
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolverImportMap {
pub specifier: String,
pub json: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
// Note: Don't use hashmaps/hashsets. Ensure the serialization
// is deterministic.
#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<log::Level>,
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: IndexMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpecifierId(u32);
impl SpecifierId {
pub fn new(id: u32) -> Self {
Self(id)
}
}
impl<'a> capacity_builder::BytesAppendable<'a> for SpecifierId {
fn append_to_builder<TBytes: capacity_builder::BytesType>(
self,
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
) {
builder.append_le(self.0);
}
}
impl<'a> DenoRtSerializable<'a> for SpecifierId {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
builder.append_le(self.0);
}
}
impl<'a> DenoRtDeserializable<'a> for SpecifierId {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, id) = read_u32(input)?;
Ok((input, Self(id)))
}
}
#[derive(Deserialize, Serialize)]
pub enum CjsExportAnalysisEntry {
Esm,
Cjs(CjsAnalysisExports),
}
const HAS_TRANSPILED_FLAG: u8 = 1 << 0;
const HAS_SOURCE_MAP_FLAG: u8 = 1 << 1;
const HAS_CJS_EXPORT_ANALYSIS_FLAG: u8 = 1 << 2;
pub struct RemoteModuleEntry<'a> {
pub media_type: MediaType,
pub data: Cow<'a, [u8]>,
pub maybe_transpiled: Option<Cow<'a, [u8]>>,
pub maybe_source_map: Option<Cow<'a, [u8]>>,
pub maybe_cjs_export_analysis: Option<Cow<'a, [u8]>>,
}
impl<'a> DenoRtSerializable<'a> for RemoteModuleEntry<'a> {
fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
fn append_maybe_data<'a>(
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
maybe_data: Option<&'a [u8]>,
) {
if let Some(data) = maybe_data {
builder.append_le(data.len() as u32);
builder.append(data);
}
}
let mut has_data_flags = 0;
if self.maybe_transpiled.is_some() {
has_data_flags |= HAS_TRANSPILED_FLAG;
}
if self.maybe_source_map.is_some() {
has_data_flags |= HAS_SOURCE_MAP_FLAG;
}
if self.maybe_cjs_export_analysis.is_some() {
has_data_flags |= HAS_CJS_EXPORT_ANALYSIS_FLAG;
}
builder.append(serialize_media_type(self.media_type));
builder.append_le(self.data.len() as u32);
builder.append(self.data.as_ref());
builder.append(has_data_flags);
append_maybe_data(builder, self.maybe_transpiled.as_deref());
append_maybe_data(builder, self.maybe_source_map.as_deref());
append_maybe_data(builder, self.maybe_cjs_export_analysis.as_deref());
}
}
impl<'a> DenoRtDeserializable<'a> for RemoteModuleEntry<'a> {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
#[allow(clippy::type_complexity)]
fn deserialize_data_if_has_flag(
input: &[u8],
has_data_flags: u8,
flag: u8,
) -> std::io::Result<(&[u8], Option<Cow<[u8]>>)> {
if has_data_flags & flag != 0 {
let (input, bytes) = read_bytes_with_u32_len(input)?;
Ok((input, Some(Cow::Borrowed(bytes))))
} else {
Ok((input, None))
}
}
let (input, media_type) = MediaType::deserialize(input)?;
let (input, data) = read_bytes_with_u32_len(input)?;
let (input, has_data_flags) = read_u8(input)?;
let (input, maybe_transpiled) =
deserialize_data_if_has_flag(input, has_data_flags, HAS_TRANSPILED_FLAG)?;
let (input, maybe_source_map) =
deserialize_data_if_has_flag(input, has_data_flags, HAS_SOURCE_MAP_FLAG)?;
let (input, maybe_cjs_export_analysis) = deserialize_data_if_has_flag(
input,
has_data_flags,
HAS_CJS_EXPORT_ANALYSIS_FLAG,
)?;
Ok((
input,
Self {
media_type,
data: Cow::Borrowed(data),
maybe_transpiled,
maybe_source_map,
maybe_cjs_export_analysis,
},
))
}
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
MediaType::Jsx => 1,
MediaType::Mjs => 2,
MediaType::Cjs => 3,
MediaType::TypeScript => 4,
MediaType::Mts => 5,
MediaType::Cts => 6,
MediaType::Dts => 7,
MediaType::Dmts => 8,
MediaType::Dcts => 9,
MediaType::Tsx => 10,
MediaType::Json => 11,
MediaType::Wasm => 12,
MediaType::Css => 13,
MediaType::SourceMap => 14,
MediaType::Unknown => 15,
}
}
impl<'a> DenoRtDeserializable<'a> for MediaType {
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, value) = read_u8(input)?;
let value = match value {
0 => MediaType::JavaScript,
1 => MediaType::Jsx,
2 => MediaType::Mjs,
3 => MediaType::Cjs,
4 => MediaType::TypeScript,
5 => MediaType::Mts,
6 => MediaType::Cts,
7 => MediaType::Dts,
8 => MediaType::Dmts,
9 => MediaType::Dcts,
10 => MediaType::Tsx,
11 => MediaType::Json,
12 => MediaType::Wasm,
13 => MediaType::Css,
14 => MediaType::SourceMap,
15 => MediaType::Unknown,
value => {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Unknown media type value: {value}"),
))
}
};
Ok((input, value))
}
}
/// Data stored keyed by specifier.
pub struct SpecifierDataStore<TData> {
data: IndexMap<SpecifierId, TData>,
}
impl<TData> Default for SpecifierDataStore<TData> {
fn default() -> Self {
Self {
data: IndexMap::new(),
}
}
}
impl<TData> SpecifierDataStore<TData> {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn iter(&self) -> impl Iterator<Item = (SpecifierId, &TData)> {
self.data.iter().map(|(k, v)| (*k, v))
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.data.len()
}
pub fn contains(&self, specifier: SpecifierId) -> bool {
self.data.contains_key(&specifier)
}
pub fn add(&mut self, specifier: SpecifierId, value: TData) {
self.data.insert(specifier, value);
}
pub fn get(&self, specifier: SpecifierId) -> Option<&TData> {
self.data.get(&specifier)
}
}
impl<'a, TData> SpecifierDataStore<TData>
where
TData: DenoRtSerializable<'a> + 'a,
{
pub fn serialize(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
) {
builder.append_le(self.len() as u32);
for (specifier, value) in self.iter() {
builder.append(specifier);
value.serialize(builder);
}
}
}
impl<'a, TData> DenoRtDeserializable<'a> for SpecifierDataStore<TData>
where
TData: DenoRtDeserializable<'a>,
{
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
let (input, len) = read_u32_as_usize(input)?;
let mut data = IndexMap::with_capacity(len);
let mut input = input;
for _ in 0..len {
let (new_input, specifier) = SpecifierId::deserialize(input)?;
let (new_input, value) = TData::deserialize(new_input)?;
data.insert(specifier, value);
input = new_input;
}
Ok((input, Self { data }))
}
}
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
read_u32(input).map(|(input, len)| (input, len as usize))
}
fn read_u32(input: &[u8]) -> std::io::Result<(&[u8], u32)> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
Ok((input, len))
}
fn read_u8(input: &[u8]) -> std::io::Result<(&[u8], u8)> {
check_has_len(input, 1)?;
Ok((&input[1..], input[0]))
}
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
if input.len() < len {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Unexpected end of data",
))
} else {
Ok(())
}
}

View file

@ -1,4 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod binary;
pub mod virtual_fs;

View file

@ -1,999 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::cmp::Ordering;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::normalize_path;
use deno_path_util::strip_unc_prefix;
use deno_runtime::colors;
use deno_runtime::deno_core::anyhow::bail;
use deno_runtime::deno_core::anyhow::Context;
use deno_runtime::deno_core::error::AnyError;
use indexmap::IndexSet;
use serde::de;
use serde::de::SeqAccess;
use serde::de::Visitor;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::Serializer;
#[derive(Debug, PartialEq, Eq)]
pub enum WindowsSystemRootablePath {
/// The root of the system above any drive letters.
WindowSystemRoot,
Path(PathBuf),
}
impl WindowsSystemRootablePath {
pub fn root_for_current_os() -> Self {
if cfg!(windows) {
WindowsSystemRootablePath::WindowSystemRoot
} else {
WindowsSystemRootablePath::Path(PathBuf::from("/"))
}
}
pub fn join(&self, name_component: &str) -> PathBuf {
// this method doesn't handle multiple components
debug_assert!(
!name_component.contains('\\'),
"Invalid component: {}",
name_component
);
debug_assert!(
!name_component.contains('/'),
"Invalid component: {}",
name_component
);
match self {
WindowsSystemRootablePath::WindowSystemRoot => {
// windows drive letter
PathBuf::from(&format!("{}\\", name_component))
}
WindowsSystemRootablePath::Path(path) => path.join(name_component),
}
}
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum FileSystemCaseSensitivity {
#[serde(rename = "s")]
Sensitive,
#[serde(rename = "i")]
Insensitive,
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct VirtualDirectoryEntries(Vec<VfsEntry>);
impl VirtualDirectoryEntries {
pub fn new(mut entries: Vec<VfsEntry>) -> Self {
// needs to be sorted by name
entries.sort_by(|a, b| a.name().cmp(b.name()));
Self(entries)
}
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, VfsEntry> {
self.0.iter_mut()
}
pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> {
self.0.iter()
}
pub fn take_inner(&mut self) -> Vec<VfsEntry> {
std::mem::take(&mut self.0)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn get_by_name(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&VfsEntry> {
self
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &self.0[index])
}
pub fn get_mut_by_name(
&mut self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Option<&mut VfsEntry> {
self
.binary_search(name, case_sensitivity)
.ok()
.map(|index| &mut self.0[index])
}
pub fn get_mut_by_index(&mut self, index: usize) -> Option<&mut VfsEntry> {
self.0.get_mut(index)
}
pub fn get_by_index(&self, index: usize) -> Option<&VfsEntry> {
self.0.get(index)
}
pub fn binary_search(
&self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
) -> Result<usize, usize> {
match case_sensitivity {
FileSystemCaseSensitivity::Sensitive => {
self.0.binary_search_by(|e| e.name().cmp(name))
}
FileSystemCaseSensitivity::Insensitive => self.0.binary_search_by(|e| {
e.name()
.chars()
.zip(name.chars())
.map(|(a, b)| a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()))
.find(|&ord| ord != Ordering::Equal)
.unwrap_or_else(|| e.name().len().cmp(&name.len()))
}),
}
}
pub fn insert(
&mut self,
entry: VfsEntry,
case_sensitivity: FileSystemCaseSensitivity,
) -> usize {
match self.binary_search(entry.name(), case_sensitivity) {
Ok(index) => {
self.0[index] = entry;
index
}
Err(insert_index) => {
self.0.insert(insert_index, entry);
insert_index
}
}
}
pub fn insert_or_modify(
&mut self,
name: &str,
case_sensitivity: FileSystemCaseSensitivity,
on_insert: impl FnOnce() -> VfsEntry,
on_modify: impl FnOnce(&mut VfsEntry),
) -> usize {
match self.binary_search(name, case_sensitivity) {
Ok(index) => {
on_modify(&mut self.0[index]);
index
}
Err(insert_index) => {
self.0.insert(insert_index, on_insert());
insert_index
}
}
}
pub fn remove(&mut self, index: usize) -> VfsEntry {
self.0.remove(index)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualDirectory {
#[serde(rename = "n")]
pub name: String,
// should be sorted by name
#[serde(rename = "e")]
pub entries: VirtualDirectoryEntries,
}
#[derive(Debug, Clone, Copy)]
pub struct OffsetWithLength {
pub offset: u64,
pub len: u64,
}
// serialize as an array in order to save space
impl Serialize for OffsetWithLength {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let array = [self.offset, self.len];
array.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for OffsetWithLength {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct OffsetWithLengthVisitor;
impl<'de> Visitor<'de> for OffsetWithLengthVisitor {
type Value = OffsetWithLength;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an array with two elements: [offset, len]")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let offset = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let len = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
Ok(OffsetWithLength { offset, len })
}
}
deserializer.deserialize_seq(OffsetWithLengthVisitor)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VirtualFile {
#[serde(rename = "n")]
pub name: String,
#[serde(rename = "o")]
pub offset: OffsetWithLength,
#[serde(rename = "m", skip_serializing_if = "Option::is_none")]
pub transpiled_offset: Option<OffsetWithLength>,
#[serde(rename = "c", skip_serializing_if = "Option::is_none")]
pub cjs_export_analysis_offset: Option<OffsetWithLength>,
#[serde(rename = "s", skip_serializing_if = "Option::is_none")]
pub source_map_offset: Option<OffsetWithLength>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualSymlinkParts(Vec<String>);
impl VirtualSymlinkParts {
pub fn from_path(path: &Path) -> Self {
Self(
path
.components()
.filter(|c| !matches!(c, std::path::Component::RootDir))
.map(|c| c.as_os_str().to_string_lossy().to_string())
.collect(),
)
}
pub fn take_parts(&mut self) -> Vec<String> {
std::mem::take(&mut self.0)
}
pub fn parts(&self) -> &[String] {
&self.0
}
pub fn set_parts(&mut self, parts: Vec<String>) {
self.0 = parts;
}
pub fn display(&self) -> String {
self.0.join("/")
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VirtualSymlink {
#[serde(rename = "n")]
pub name: String,
#[serde(rename = "p")]
pub dest_parts: VirtualSymlinkParts,
}
impl VirtualSymlink {
pub fn resolve_dest_from_root(&self, root: &Path) -> PathBuf {
let mut dest = root.to_path_buf();
for part in &self.dest_parts.0 {
dest.push(part);
}
dest
}
}
#[derive(Debug, Copy, Clone)]
pub enum VfsEntryRef<'a> {
Dir(&'a VirtualDirectory),
File(&'a VirtualFile),
Symlink(&'a VirtualSymlink),
}
impl VfsEntryRef<'_> {
pub fn name(&self) -> &str {
match self {
Self::Dir(dir) => &dir.name,
Self::File(file) => &file.name,
Self::Symlink(symlink) => &symlink.name,
}
}
}
// todo(dsherret): we should store this more efficiently in the binary
#[derive(Debug, Serialize, Deserialize)]
pub enum VfsEntry {
Dir(VirtualDirectory),
File(VirtualFile),
Symlink(VirtualSymlink),
}
impl VfsEntry {
pub fn name(&self) -> &str {
match self {
Self::Dir(dir) => &dir.name,
Self::File(file) => &file.name,
Self::Symlink(symlink) => &symlink.name,
}
}
pub fn as_ref(&self) -> VfsEntryRef {
match self {
VfsEntry::Dir(dir) => VfsEntryRef::Dir(dir),
VfsEntry::File(file) => VfsEntryRef::File(file),
VfsEntry::Symlink(symlink) => VfsEntryRef::Symlink(symlink),
}
}
}
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
#[derive(Debug)]
pub struct BuiltVfs {
pub root_path: WindowsSystemRootablePath,
pub case_sensitivity: FileSystemCaseSensitivity,
pub entries: VirtualDirectoryEntries,
pub files: Vec<Vec<u8>>,
}
#[derive(Debug, Default)]
struct FilesData {
files: Vec<Vec<u8>>,
current_offset: u64,
file_offsets: HashMap<(String, usize), OffsetWithLength>,
}
impl FilesData {
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
if offset.len == 0 {
return Some(&[]);
}
// the debug assertions in this method should never happen
// because it would indicate providing an offset not in the vfs
let mut count: u64 = 0;
for file in &self.files {
// clippy wanted a match
match count.cmp(&offset.offset) {
Ordering::Equal => {
debug_assert_eq!(offset.len, file.len() as u64);
if offset.len == file.len() as u64 {
return Some(file);
} else {
return None;
}
}
Ordering::Less => {
count += file.len() as u64;
}
Ordering::Greater => {
debug_assert!(false);
return None;
}
}
}
debug_assert!(false);
None
}
pub fn add_data(&mut self, data: Vec<u8>) -> OffsetWithLength {
if data.is_empty() {
return OffsetWithLength { offset: 0, len: 0 };
}
let checksum = crate::util::checksum::gen(&[&data]);
match self.file_offsets.entry((checksum, data.len())) {
Entry::Occupied(occupied_entry) => {
let offset_and_len = *occupied_entry.get();
debug_assert_eq!(data.len() as u64, offset_and_len.len);
offset_and_len
}
Entry::Vacant(vacant_entry) => {
let offset_and_len = OffsetWithLength {
offset: self.current_offset,
len: data.len() as u64,
};
vacant_entry.insert(offset_and_len);
self.current_offset += offset_and_len.len;
self.files.push(data);
offset_and_len
}
}
}
}
pub struct AddFileDataOptions {
pub data: Vec<u8>,
pub maybe_transpiled: Option<Vec<u8>>,
pub maybe_source_map: Option<Vec<u8>>,
pub maybe_cjs_export_analysis: Option<Vec<u8>>,
}
#[derive(Debug)]
pub struct VfsBuilder {
executable_root: VirtualDirectory,
files: FilesData,
/// The minimum root directory that should be included in the VFS.
min_root_dir: Option<WindowsSystemRootablePath>,
case_sensitivity: FileSystemCaseSensitivity,
}
impl Default for VfsBuilder {
fn default() -> Self {
Self::new()
}
}
impl VfsBuilder {
pub fn new() -> Self {
Self {
executable_root: VirtualDirectory {
name: "/".to_string(),
entries: Default::default(),
},
files: Default::default(),
min_root_dir: Default::default(),
// This is not exactly correct because file systems on these OSes
// may be case-sensitive or not based on the directory, but this
// is a good enough approximation and limitation. In the future,
// we may want to store this information per directory instead
// depending on the feedback we get.
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
FileSystemCaseSensitivity::Insensitive
} else {
FileSystemCaseSensitivity::Sensitive
},
}
}
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
self.case_sensitivity
}
pub fn files_len(&self) -> usize {
self.files.files.len()
}
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
self.files.file_bytes(offset)
}
/// Add a directory that might be the minimum root directory
/// of the VFS.
///
/// For example, say the user has a deno.json and specifies an
/// import map in a parent directory. The import map won't be
/// included in the VFS, but its base will meaning we need to
/// tell the VFS builder to include the base of the import map
/// by calling this method.
pub fn add_possible_min_root_dir(&mut self, path: &Path) {
self.add_dir_raw(path);
match &self.min_root_dir {
Some(WindowsSystemRootablePath::WindowSystemRoot) => {
// already the root dir
}
Some(WindowsSystemRootablePath::Path(current_path)) => {
let mut common_components = Vec::new();
for (a, b) in current_path.components().zip(path.components()) {
if a != b {
break;
}
common_components.push(a);
}
if common_components.is_empty() {
self.min_root_dir =
Some(WindowsSystemRootablePath::root_for_current_os());
} else {
self.min_root_dir = Some(WindowsSystemRootablePath::Path(
common_components.iter().collect(),
));
}
}
None => {
self.min_root_dir =
Some(WindowsSystemRootablePath::Path(path.to_path_buf()));
}
}
}
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
let target_path = self.resolve_target_path(path)?;
self.add_dir_recursive_not_symlink(&target_path)
}
fn add_dir_recursive_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
self.add_dir_raw(path);
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let read_dir = std::fs::read_dir(path)
.with_context(|| format!("Reading {}", path.display()))?;
let mut dir_entries =
read_dir.into_iter().collect::<Result<Vec<_>, _>>()?;
dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism
for entry in dir_entries {
let file_type = entry.file_type()?;
let path = entry.path();
if file_type.is_dir() {
self.add_dir_recursive_not_symlink(&path)?;
} else if file_type.is_file() {
self.add_file_at_path_not_symlink(&path)?;
} else if file_type.is_symlink() {
match self.add_symlink(&path) {
Ok(target) => match target {
SymlinkTarget::File(target) => {
self.add_file_at_path_not_symlink(&target)?
}
SymlinkTarget::Dir(target) => {
self.add_dir_recursive_not_symlink(&target)?;
}
},
Err(err) => {
log::warn!(
"{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}",
colors::yellow("Warning"),
path.display(),
err
);
}
}
}
}
Ok(())
}
fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory {
log::debug!("Ensuring directory '{}'", path.display());
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let index = current_dir.entries.insert_or_modify(
&name,
self.case_sensitivity,
|| {
VfsEntry::Dir(VirtualDirectory {
name: name.to_string(),
entries: Default::default(),
})
},
|_| {
// ignore
},
);
match current_dir.entries.get_mut_by_index(index) {
Some(VfsEntry::Dir(dir)) => {
current_dir = dir;
}
_ => unreachable!(),
};
}
current_dir
}
pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory {
&mut self.executable_root
}
pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> {
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let entry = current_dir
.entries
.get_mut_by_name(&name, self.case_sensitivity)?;
match entry {
VfsEntry::Dir(dir) => {
current_dir = dir;
}
_ => unreachable!("{}", path.display()),
};
}
Some(current_dir)
}
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data(
path,
AddFileDataOptions {
data: file_bytes,
maybe_cjs_export_analysis: None,
maybe_transpiled: None,
maybe_source_map: None,
},
)
}
fn add_file_at_path_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data_raw(path, file_bytes)
}
pub fn add_file_with_data(
&mut self,
path: &Path,
options: AddFileDataOptions,
) -> Result<(), AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
let target = self.add_symlink(path)?.into_path_buf();
self.add_file_with_data_raw_options(&target, options)
} else {
self.add_file_with_data_raw_options(path, options)
}
}
pub fn add_file_with_data_raw(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
self.add_file_with_data_raw_options(
path,
AddFileDataOptions {
data,
maybe_transpiled: None,
maybe_cjs_export_analysis: None,
maybe_source_map: None,
},
)
}
fn add_file_with_data_raw_options(
&mut self,
path: &Path,
options: AddFileDataOptions,
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display());
let case_sensitivity = self.case_sensitivity;
let offset_and_len = self.files.add_data(options.data);
let transpiled_offset = options
.maybe_transpiled
.map(|data| self.files.add_data(data));
let source_map_offset = options
.maybe_source_map
.map(|data| self.files.add_data(data));
let cjs_export_analysis_offset = options
.maybe_cjs_export_analysis
.map(|data| self.files.add_data(data));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::File(VirtualFile {
name: name.to_string(),
offset: offset_and_len,
transpiled_offset,
cjs_export_analysis_offset,
source_map_offset,
})
},
|entry| match entry {
VfsEntry::File(virtual_file) => {
virtual_file.offset = offset_and_len;
// doesn't overwrite to None
if transpiled_offset.is_some() {
virtual_file.transpiled_offset = transpiled_offset;
}
if source_map_offset.is_some() {
virtual_file.source_map_offset = source_map_offset;
}
if cjs_export_analysis_offset.is_some() {
virtual_file.cjs_export_analysis_offset =
cjs_export_analysis_offset;
}
}
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
},
);
Ok(())
}
fn resolve_target_path(&mut self, path: &Path) -> Result<PathBuf, AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
Ok(self.add_symlink(path)?.into_path_buf())
} else {
Ok(path.to_path_buf())
}
}
pub fn add_symlink(
&mut self,
path: &Path,
) -> Result<SymlinkTarget, AnyError> {
self.add_symlink_inner(path, &mut IndexSet::new())
}
fn add_symlink_inner(
&mut self,
path: &Path,
visited: &mut IndexSet<PathBuf>,
) -> Result<SymlinkTarget, AnyError> {
log::debug!("Adding symlink '{}'", path.display());
let target = strip_unc_prefix(
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
std::fs::read_link(path)
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
);
let case_sensitivity = self.case_sensitivity;
let target = normalize_path(path.parent().unwrap().join(&target));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::Symlink(VirtualSymlink {
name: name.to_string(),
dest_parts: VirtualSymlinkParts::from_path(&target),
})
},
|_| {
// ignore previously inserted
},
);
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let target_metadata =
std::fs::symlink_metadata(&target).with_context(|| {
format!("Reading symlink target '{}'", target.display())
})?;
if target_metadata.is_symlink() {
if !visited.insert(target.clone()) {
// todo: probably don't error in this scenario
bail!(
"Circular symlink detected: {} -> {}",
visited
.iter()
.map(|p| p.display().to_string())
.collect::<Vec<_>>()
.join(" -> "),
target.display()
);
}
self.add_symlink_inner(&target, visited)
} else if target_metadata.is_dir() {
Ok(SymlinkTarget::Dir(target))
} else {
Ok(SymlinkTarget::File(target))
}
}
/// Adds the CJS export analysis to the provided file.
///
/// Warning: This will panic if the file wasn't properly
/// setup before calling this.
pub fn add_cjs_export_analysis(&mut self, path: &Path, data: Vec<u8>) {
self.add_data_for_file_or_panic(path, data, |file, offset_with_length| {
file.cjs_export_analysis_offset = Some(offset_with_length);
})
}
fn add_data_for_file_or_panic(
&mut self,
path: &Path,
data: Vec<u8>,
update_file: impl FnOnce(&mut VirtualFile, OffsetWithLength),
) {
let offset_with_length = self.files.add_data(data);
let case_sensitivity = self.case_sensitivity;
let dir = self.get_dir_mut(path.parent().unwrap()).unwrap();
let name = path.file_name().unwrap().to_string_lossy();
let file = dir
.entries
.get_mut_by_name(&name, case_sensitivity)
.unwrap();
match file {
VfsEntry::File(virtual_file) => {
update_file(virtual_file, offset_with_length);
}
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => {
unreachable!()
}
}
}
/// Iterates through all the files in the virtual file system.
pub fn iter_files(
&self,
) -> impl Iterator<Item = (PathBuf, &VirtualFile)> + '_ {
FileIterator {
pending_dirs: VecDeque::from([(
WindowsSystemRootablePath::root_for_current_os(),
&self.executable_root,
)]),
current_dir_index: 0,
}
}
pub fn build(self) -> BuiltVfs {
fn strip_prefix_from_symlinks(
dir: &mut VirtualDirectory,
parts: &[String],
) {
for entry in dir.entries.iter_mut() {
match entry {
VfsEntry::Dir(dir) => {
strip_prefix_from_symlinks(dir, parts);
}
VfsEntry::File(_) => {}
VfsEntry::Symlink(symlink) => {
let parts = symlink
.dest_parts
.take_parts()
.into_iter()
.skip(parts.len())
.collect();
symlink.dest_parts.set_parts(parts);
}
}
}
}
let mut current_dir = self.executable_root;
let mut current_path = WindowsSystemRootablePath::root_for_current_os();
loop {
if current_dir.entries.len() != 1 {
break;
}
if self.min_root_dir.as_ref() == Some(&current_path) {
break;
}
match current_dir.entries.iter().next().unwrap() {
VfsEntry::Dir(dir) => {
if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME {
// special directory we want to maintain
break;
}
match current_dir.entries.remove(0) {
VfsEntry::Dir(dir) => {
current_path =
WindowsSystemRootablePath::Path(current_path.join(&dir.name));
current_dir = dir;
}
_ => unreachable!(),
};
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => break,
}
}
if let WindowsSystemRootablePath::Path(path) = &current_path {
strip_prefix_from_symlinks(
&mut current_dir,
VirtualSymlinkParts::from_path(path).parts(),
);
}
BuiltVfs {
root_path: current_path,
case_sensitivity: self.case_sensitivity,
entries: current_dir.entries,
files: self.files.files,
}
}
}
struct FileIterator<'a> {
pending_dirs: VecDeque<(WindowsSystemRootablePath, &'a VirtualDirectory)>,
current_dir_index: usize,
}
impl<'a> Iterator for FileIterator<'a> {
type Item = (PathBuf, &'a VirtualFile);
fn next(&mut self) -> Option<Self::Item> {
while !self.pending_dirs.is_empty() {
let (dir_path, current_dir) = self.pending_dirs.front()?;
if let Some(entry) =
current_dir.entries.get_by_index(self.current_dir_index)
{
self.current_dir_index += 1;
match entry {
VfsEntry::Dir(virtual_directory) => {
self.pending_dirs.push_back((
WindowsSystemRootablePath::Path(
dir_path.join(&virtual_directory.name),
),
virtual_directory,
));
}
VfsEntry::File(virtual_file) => {
return Some((dir_path.join(&virtual_file.name), virtual_file));
}
VfsEntry::Symlink(_) => {
// ignore
}
}
} else {
self.pending_dirs.pop_front();
self.current_dir_index = 0;
}
}
None
}
}
#[derive(Debug)]
pub enum SymlinkTarget {
File(PathBuf),
Dir(PathBuf),
}
impl SymlinkTarget {
pub fn into_path_buf(self) -> PathBuf {
match self {
Self::File(path) => path,
Self::Dir(path) => path,
}
}
}

View file

@ -1,37 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_node::ExtNodeSys;
use sys_traits::FsCanonicalize;
use sys_traits::FsCreateDirAll;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use sys_traits::FsRemoveFile;
use sys_traits::FsRename;
use sys_traits::SystemRandom;
use sys_traits::ThreadSleep;
pub trait DenoLibSys:
FsCanonicalize
+ FsCreateDirAll
+ FsReadDir
+ FsMetadata
+ FsOpen
+ FsRemoveFile
+ FsRename
+ FsRead
+ ThreadSleep
+ SystemRandom
+ ExtNodeSys
+ Clone
+ Send
+ Sync
+ std::fmt::Debug
+ 'static
{
}
// ok, implementation
#[allow(clippy::disallowed_types)]
impl DenoLibSys for sys_traits::impls::RealSys {}

View file

@ -1,8 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod checksum;
pub mod hash;
pub mod logger;
pub mod result;
pub mod text_encoding;
pub mod v8;

View file

@ -1,45 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
#[inline(always)]
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
#[inline(always)]
pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow<str> {
match bytes {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
}
}
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
#[allow(dead_code)]
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
let raw = Arc::into_raw(arc_str);
// SAFETY: This is safe because they have the same memory layout.
unsafe { Arc::from_raw(raw as *const [u8]) }
}
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
#[allow(dead_code)]
pub fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
}

View file

@ -1,14 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[inline(always)]
pub fn construct_v8_flags(
default_v8_flags: &[String],
v8_flags: &[String],
env_v8_flags: Vec<String>,
) -> Vec<String> {
std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned())
.chain(default_v8_flags.iter().cloned())
.chain(env_v8_flags)
.chain(v8_flags.iter().cloned())
.collect::<Vec<_>>()
}

View file

@ -1,94 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_runtime::deno_telemetry::OtelRuntimeConfig;
use crate::shared::ReleaseChannel;
pub fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
const TYPESCRIPT: &str = "5.6.2";
const DENO_VERSION: &str = env!("DENO_VERSION");
// TODO(bartlomieju): ideally we could remove this const.
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();
// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well
const IS_RC: bool = option_env!("DENO_RC").is_some();
pub static DENO_VERSION_INFO: std::sync::LazyLock<DenoVersionInfo> =
std::sync::LazyLock::new(|| {
let release_channel = libsui::find_section("denover")
.and_then(|buf| std::str::from_utf8(buf).ok())
.and_then(|str_| ReleaseChannel::deserialize(str_).ok())
.unwrap_or({
if IS_CANARY {
ReleaseChannel::Canary
} else if IS_RC {
ReleaseChannel::Rc
} else {
ReleaseChannel::Stable
}
});
DenoVersionInfo {
deno: if release_channel == ReleaseChannel::Canary {
concat!(env!("DENO_VERSION"), "+", env!("GIT_COMMIT_HASH_SHORT"))
} else {
env!("DENO_VERSION")
},
release_channel,
git_hash: GIT_COMMIT_HASH,
// Keep in sync with `deno` field.
user_agent: if release_channel == ReleaseChannel::Canary {
concat!(
"Deno/",
env!("DENO_VERSION"),
"+",
env!("GIT_COMMIT_HASH_SHORT")
)
} else {
concat!("Deno/", env!("DENO_VERSION"))
},
typescript: TYPESCRIPT,
}
});
pub struct DenoVersionInfo {
/// Human-readable version of the current Deno binary.
///
/// For stable release, a semver, eg. `v1.46.2`.
/// For canary release, a semver + 7-char git hash, eg. `v1.46.3+asdfqwq`.
pub deno: &'static str,
pub release_channel: ReleaseChannel,
/// A full git hash.
pub git_hash: &'static str,
/// A user-agent header that will be used in HTTP client.
pub user_agent: &'static str,
pub typescript: &'static str,
}
impl DenoVersionInfo {
/// For stable release, a semver like, eg. `v1.46.2`.
/// For canary release a full git hash, eg. `9bdab6fb6b93eb43b1930f40987fa4997287f9c8`.
pub fn version_or_git_hash(&self) -> &'static str {
if self.release_channel == ReleaseChannel::Canary {
self.git_hash
} else {
DENO_VERSION
}
}
}

View file

@ -1 +0,0 @@
2.1.6

View file

@ -1,716 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use deno_core::error::JsError;
use deno_node::NodeRequireLoaderRc;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmResolver;
use deno_runtime::colors;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_core;
use deno_runtime::deno_core::error::CoreError;
use deno_runtime::deno_core::v8;
use deno_runtime::deno_core::CompiledWasmModuleStore;
use deno_runtime::deno_core::Extension;
use deno_runtime::deno_core::FeatureChecker;
use deno_runtime::deno_core::JsRuntime;
use deno_runtime::deno_core::LocalInspectorSession;
use deno_runtime::deno_core::ModuleLoader;
use deno_runtime::deno_core::SharedArrayBufferStore;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeExtInitServices;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_process::NpmProcessStateProviderRc;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
use deno_runtime::web_worker::WebWorker;
use deno_runtime::web_worker::WebWorkerOptions;
use deno_runtime::web_worker::WebWorkerServiceOptions;
use deno_runtime::worker::MainWorker;
use deno_runtime::worker::WorkerOptions;
use deno_runtime::worker::WorkerServiceOptions;
use deno_runtime::BootstrapOptions;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use node_resolver::errors::ResolvePkgJsonBinExportError;
use url::Url;
use crate::args::has_trace_permissions_enabled;
use crate::sys::DenoLibSys;
use crate::util::checksum;
pub struct CreateModuleLoaderResult {
pub module_loader: Rc<dyn ModuleLoader>,
pub node_require_loader: Rc<dyn NodeRequireLoader>,
}
pub trait ModuleLoaderFactory: Send + Sync {
fn create_for_main(
&self,
root_permissions: PermissionsContainer,
) -> CreateModuleLoaderResult;
fn create_for_worker(
&self,
parent_permissions: PermissionsContainer,
permissions: PermissionsContainer,
) -> CreateModuleLoaderResult;
}
enum StorageKeyResolverStrategy {
Specified(Option<String>),
UseMainModule,
}
pub struct StorageKeyResolver(StorageKeyResolverStrategy);
impl StorageKeyResolver {
pub fn from_flag(location: &Url) -> Self {
// if a location is set, then the ascii serialization of the location is
// used, unless the origin is opaque, and then no storage origin is set, as
// we can't expect the origin to be reproducible
let storage_origin = location.origin();
Self(StorageKeyResolverStrategy::Specified(
if storage_origin.is_tuple() {
Some(storage_origin.ascii_serialization())
} else {
None
},
))
}
pub fn from_config_file_url(url: &Url) -> Self {
Self(StorageKeyResolverStrategy::Specified(Some(url.to_string())))
}
pub fn new_use_main_module() -> Self {
Self(StorageKeyResolverStrategy::UseMainModule)
}
/// Creates a storage key resolver that will always resolve to being empty.
pub fn empty() -> Self {
Self(StorageKeyResolverStrategy::Specified(None))
}
/// Resolves the storage key to use based on the current flags, config, or main module.
pub fn resolve_storage_key(&self, main_module: &Url) -> Option<String> {
// use the stored value or fall back to using the path of the main module.
match &self.0 {
StorageKeyResolverStrategy::Specified(value) => value.clone(),
StorageKeyResolverStrategy::UseMainModule => {
Some(main_module.to_string())
}
}
}
}
pub fn get_cache_storage_dir() -> PathBuf {
// ok because this won't ever be used by the js runtime
#[allow(clippy::disallowed_methods)]
// Note: we currently use temp_dir() to avoid managing storage size.
std::env::temp_dir().join("deno_cache")
}
/// By default V8 uses 1.4Gb heap limit which is meant for browser tabs.
/// Instead probe for the total memory on the system and use it instead
/// as a default.
pub fn create_isolate_create_params() -> Option<v8::CreateParams> {
let maybe_mem_info = deno_runtime::deno_os::sys_info::mem_info();
maybe_mem_info.map(|mem_info| {
v8::CreateParams::default()
.heap_limits_from_system_memory(mem_info.total, 0)
})
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveNpmBinaryEntrypointError {
#[class(inherit)]
#[error(transparent)]
ResolvePkgJsonBinExport(ResolvePkgJsonBinExportError),
#[class(generic)]
#[error("{original:#}\n\nFallback failed: {fallback:#}")]
Fallback {
fallback: ResolveNpmBinaryEntrypointFallbackError,
original: ResolvePkgJsonBinExportError,
},
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveNpmBinaryEntrypointFallbackError {
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(node_resolver::errors::PackageSubpathResolveError),
#[class(generic)]
#[error("Cannot find module '{0}'")]
ModuleNotFound(Url),
}
pub struct LibMainWorkerOptions {
pub argv: Vec<String>,
pub log_level: WorkerLogLevel,
pub enable_op_summary_metrics: bool,
pub enable_testing_features: bool,
pub has_node_modules_dir: bool,
pub inspect_brk: bool,
pub inspect_wait: bool,
pub strace_ops: Option<Vec<String>>,
pub is_inspecting: bool,
pub location: Option<Url>,
pub argv0: Option<String>,
pub node_debug: Option<String>,
pub otel_config: OtelConfig,
pub origin_data_folder_path: Option<PathBuf>,
pub seed: Option<u64>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub skip_op_registration: bool,
pub node_ipc: Option<i64>,
pub startup_snapshot: Option<&'static [u8]>,
pub serve_port: Option<u16>,
pub serve_host: Option<String>,
}
struct LibWorkerFactorySharedState<TSys: DenoLibSys> {
blob_store: Arc<BlobStore>,
broadcast_channel: InMemoryBroadcastChannel,
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,
compiled_wasm_module_store: CompiledWasmModuleStore,
feature_checker: Arc<FeatureChecker>,
fs: Arc<dyn deno_fs::FileSystem>,
maybe_inspector_server: Option<Arc<InspectorServer>>,
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver:
Arc<NodeResolver<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys>>,
npm_process_state_provider: NpmProcessStateProviderRc,
pkg_json_resolver: Arc<node_resolver::PackageJsonResolver<TSys>>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
shared_array_buffer_store: SharedArrayBufferStore,
storage_key_resolver: StorageKeyResolver,
sys: TSys,
options: LibMainWorkerOptions,
}
impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
fn resolve_unstable_features(
&self,
feature_checker: &FeatureChecker,
) -> Vec<i32> {
let mut unstable_features =
Vec::with_capacity(UNSTABLE_GRANULAR_FLAGS.len());
for granular_flag in UNSTABLE_GRANULAR_FLAGS {
if feature_checker.check(granular_flag.name) {
unstable_features.push(granular_flag.id);
}
}
unstable_features
}
fn create_node_init_services(
&self,
node_require_loader: NodeRequireLoaderRc,
) -> NodeExtInitServices<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys> {
NodeExtInitServices {
node_require_loader,
node_resolver: self.node_resolver.clone(),
pkg_json_resolver: self.pkg_json_resolver.clone(),
sys: self.sys.clone(),
}
}
fn create_web_worker_callback(
self: &Arc<Self>,
stdio: deno_runtime::deno_io::Stdio,
) -> Arc<CreateWebWorkerCb> {
let shared = self.clone();
Arc::new(move |args| {
let maybe_inspector_server = shared.maybe_inspector_server.clone();
let CreateModuleLoaderResult {
module_loader,
node_require_loader,
} = shared.module_loader_factory.create_for_worker(
args.parent_permissions.clone(),
args.permissions.clone(),
);
let create_web_worker_cb =
shared.create_web_worker_callback(stdio.clone());
let maybe_storage_key = shared
.storage_key_resolver
.resolve_storage_key(&args.main_module);
let cache_storage_dir = maybe_storage_key.map(|key| {
// TODO(@satyarohith): storage quota management
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
});
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
// list of enabled features.
let feature_checker = shared.feature_checker.clone();
let unstable_features =
shared.resolve_unstable_features(feature_checker.as_ref());
let services = WebWorkerServiceOptions {
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
module_loader,
fs: shared.fs.clone(),
node_services: Some(
shared.create_node_init_services(node_require_loader),
),
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
shared_array_buffer_store: Some(
shared.shared_array_buffer_store.clone(),
),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
maybe_inspector_server,
feature_checker,
npm_process_state_provider: Some(
shared.npm_process_state_provider.clone(),
),
permissions: args.permissions,
};
let options = WebWorkerOptions {
name: args.name,
main_module: args.main_module.clone(),
worker_id: args.worker_id,
bootstrap: BootstrapOptions {
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
args: shared.options.argv.clone(),
cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
.unwrap_or(1),
log_level: shared.options.log_level,
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
enable_testing_features: shared.options.enable_testing_features,
locale: deno_core::v8::icu::get_language_tag(),
location: Some(args.main_module),
no_color: !colors::use_color(),
color_level: colors::get_color_level(),
is_stdout_tty: deno_terminal::is_stdout_tty(),
is_stderr_tty: deno_terminal::is_stderr_tty(),
unstable_features,
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
inspect: shared.options.is_inspecting,
has_node_modules_dir: shared.options.has_node_modules_dir,
argv0: shared.options.argv0.clone(),
node_debug: shared.options.node_debug.clone(),
node_ipc_fd: None,
mode: WorkerExecutionMode::Worker,
serve_port: shared.options.serve_port,
serve_host: shared.options.serve_host.clone(),
otel_config: shared.options.otel_config.clone(),
close_on_idle: args.close_on_idle,
},
extensions: vec![],
startup_snapshot: shared.options.startup_snapshot,
create_params: create_isolate_create_params(),
unsafely_ignore_certificate_errors: shared
.options
.unsafely_ignore_certificate_errors
.clone(),
seed: shared.options.seed,
create_web_worker_cb,
format_js_error_fn: Some(Arc::new(format_js_error)),
worker_type: args.worker_type,
stdio: stdio.clone(),
cache_storage_dir,
strace_ops: shared.options.strace_ops.clone(),
close_on_idle: args.close_on_idle,
maybe_worker_metadata: args.maybe_worker_metadata,
enable_stack_trace_arg_in_ops: has_trace_permissions_enabled(),
};
WebWorker::bootstrap_from_options(services, options)
})
}
}
pub struct LibMainWorkerFactory<TSys: DenoLibSys> {
shared: Arc<LibWorkerFactorySharedState<TSys>>,
}
impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
#[allow(clippy::too_many_arguments)]
pub fn new(
blob_store: Arc<BlobStore>,
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,
feature_checker: Arc<FeatureChecker>,
fs: Arc<dyn deno_fs::FileSystem>,
maybe_inspector_server: Option<Arc<InspectorServer>>,
module_loader_factory: Box<dyn ModuleLoaderFactory>,
node_resolver: Arc<
NodeResolver<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys>,
>,
npm_process_state_provider: NpmProcessStateProviderRc,
pkg_json_resolver: Arc<node_resolver::PackageJsonResolver<TSys>>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
storage_key_resolver: StorageKeyResolver,
sys: TSys,
options: LibMainWorkerOptions,
) -> Self {
Self {
shared: Arc::new(LibWorkerFactorySharedState {
blob_store,
broadcast_channel: Default::default(),
code_cache,
compiled_wasm_module_store: Default::default(),
feature_checker,
fs,
maybe_inspector_server,
module_loader_factory,
node_resolver,
npm_process_state_provider,
pkg_json_resolver,
root_cert_store_provider,
shared_array_buffer_store: Default::default(),
storage_key_resolver,
sys,
options,
}),
}
}
pub fn create_main_worker(
&self,
mode: WorkerExecutionMode,
permissions: PermissionsContainer,
main_module: Url,
) -> Result<LibMainWorker, CoreError> {
self.create_custom_worker(
mode,
main_module,
permissions,
vec![],
Default::default(),
)
}
pub fn create_custom_worker(
&self,
mode: WorkerExecutionMode,
main_module: Url,
permissions: PermissionsContainer,
custom_extensions: Vec<Extension>,
stdio: deno_runtime::deno_io::Stdio,
) -> Result<LibMainWorker, CoreError> {
let shared = &self.shared;
let CreateModuleLoaderResult {
module_loader,
node_require_loader,
} = shared
.module_loader_factory
.create_for_main(permissions.clone());
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
// list of enabled features.
let feature_checker = shared.feature_checker.clone();
let unstable_features =
shared.resolve_unstable_features(feature_checker.as_ref());
let maybe_storage_key = shared
.storage_key_resolver
.resolve_storage_key(&main_module);
let origin_storage_dir = maybe_storage_key.as_ref().map(|key| {
shared
.options
.origin_data_folder_path
.as_ref()
.unwrap() // must be set if storage key resolver returns a value
.join(checksum::gen(&[key.as_bytes()]))
});
let cache_storage_dir = maybe_storage_key.map(|key| {
// TODO(@satyarohith): storage quota management
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
});
let services = WorkerServiceOptions {
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
module_loader,
fs: shared.fs.clone(),
node_services: Some(
shared.create_node_init_services(node_require_loader),
),
npm_process_state_provider: Some(
shared.npm_process_state_provider.clone(),
),
blob_store: shared.blob_store.clone(),
broadcast_channel: shared.broadcast_channel.clone(),
fetch_dns_resolver: Default::default(),
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
compiled_wasm_module_store: Some(
shared.compiled_wasm_module_store.clone(),
),
feature_checker,
permissions,
v8_code_cache: shared.code_cache.clone(),
};
let options = WorkerOptions {
bootstrap: BootstrapOptions {
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
args: shared.options.argv.clone(),
cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
.unwrap_or(1),
log_level: shared.options.log_level,
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
enable_testing_features: shared.options.enable_testing_features,
locale: deno_core::v8::icu::get_language_tag(),
location: shared.options.location.clone(),
no_color: !colors::use_color(),
is_stdout_tty: deno_terminal::is_stdout_tty(),
is_stderr_tty: deno_terminal::is_stderr_tty(),
color_level: colors::get_color_level(),
unstable_features,
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
inspect: shared.options.is_inspecting,
has_node_modules_dir: shared.options.has_node_modules_dir,
argv0: shared.options.argv0.clone(),
node_debug: shared.options.node_debug.clone(),
node_ipc_fd: shared.options.node_ipc,
mode,
serve_port: shared.options.serve_port,
serve_host: shared.options.serve_host.clone(),
otel_config: shared.options.otel_config.clone(),
close_on_idle: true,
},
extensions: custom_extensions,
startup_snapshot: shared.options.startup_snapshot,
create_params: create_isolate_create_params(),
unsafely_ignore_certificate_errors: shared
.options
.unsafely_ignore_certificate_errors
.clone(),
seed: shared.options.seed,
format_js_error_fn: Some(Arc::new(format_js_error)),
create_web_worker_cb: shared.create_web_worker_callback(stdio.clone()),
maybe_inspector_server: shared.maybe_inspector_server.clone(),
should_break_on_first_statement: shared.options.inspect_brk,
should_wait_for_inspector_session: shared.options.inspect_wait,
strace_ops: shared.options.strace_ops.clone(),
cache_storage_dir,
origin_storage_dir,
stdio,
skip_op_registration: shared.options.skip_op_registration,
enable_stack_trace_arg_in_ops: has_trace_permissions_enabled(),
};
let worker =
MainWorker::bootstrap_from_options(&main_module, services, options);
Ok(LibMainWorker {
main_module,
worker,
})
}
pub fn resolve_npm_binary_entrypoint(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<Url, ResolveNpmBinaryEntrypointError> {
match self
.shared
.node_resolver
.resolve_binary_export(package_folder, sub_path)
{
Ok(specifier) => Ok(specifier),
Err(original_err) => {
// if the binary entrypoint was not found, fallback to regular node resolution
let result =
self.resolve_binary_entrypoint_fallback(package_folder, sub_path);
match result {
Ok(Some(specifier)) => Ok(specifier),
Ok(None) => {
Err(ResolveNpmBinaryEntrypointError::ResolvePkgJsonBinExport(
original_err,
))
}
Err(fallback_err) => Err(ResolveNpmBinaryEntrypointError::Fallback {
original: original_err,
fallback: fallback_err,
}),
}
}
}
}
/// resolve the binary entrypoint using regular node resolution
fn resolve_binary_entrypoint_fallback(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<Option<Url>, ResolveNpmBinaryEntrypointFallbackError> {
// only fallback if the user specified a sub path
if sub_path.is_none() {
// it's confusing to users if the package doesn't have any binary
// entrypoint and we just execute the main script which will likely
// have blank output, so do not resolve the entrypoint in this case
return Ok(None);
}
let specifier = self
.shared
.node_resolver
.resolve_package_subpath_from_deno_module(
package_folder,
sub_path,
/* referrer */ None,
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)
.map_err(
ResolveNpmBinaryEntrypointFallbackError::PackageSubpathResolve,
)?;
if deno_path_util::url_to_file_path(&specifier)
.map(|p| self.shared.sys.fs_exists_no_err(p))
.unwrap_or(false)
{
Ok(Some(specifier))
} else {
Err(ResolveNpmBinaryEntrypointFallbackError::ModuleNotFound(
specifier,
))
}
}
}
pub struct LibMainWorker {
main_module: Url,
worker: MainWorker,
}
impl LibMainWorker {
pub fn into_main_worker(self) -> MainWorker {
self.worker
}
pub fn main_module(&self) -> &Url {
&self.main_module
}
pub fn js_runtime(&mut self) -> &mut JsRuntime {
&mut self.worker.js_runtime
}
#[inline]
pub fn create_inspector_session(&mut self) -> LocalInspectorSession {
self.worker.create_inspector_session()
}
#[inline]
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_load_event()
}
#[inline]
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
self.worker.dispatch_beforeunload_event()
}
#[inline]
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
self.worker.dispatch_process_beforeexit_event()
}
#[inline]
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_unload_event()
}
#[inline]
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_process_exit_event()
}
pub async fn execute_main_module(&mut self) -> Result<(), CoreError> {
let id = self.worker.preload_main_module(&self.main_module).await?;
self.worker.evaluate_module(id).await
}
pub async fn execute_side_module(&mut self) -> Result<(), CoreError> {
let id = self.worker.preload_side_module(&self.main_module).await?;
self.worker.evaluate_module(id).await
}
pub async fn run(&mut self) -> Result<i32, CoreError> {
log::debug!("main_module {}", self.main_module);
self.execute_main_module().await?;
self.worker.dispatch_load_event()?;
loop {
self
.worker
.run_event_loop(/* wait for inspector */ false)
.await?;
let web_continue = self.worker.dispatch_beforeunload_event()?;
if !web_continue {
let node_continue = self.worker.dispatch_process_beforeexit_event()?;
if !node_continue {
break;
}
}
}
self.worker.dispatch_unload_event()?;
self.worker.dispatch_process_exit_event()?;
Ok(self.worker.exit_code())
}
#[inline]
pub async fn run_event_loop(
&mut self,
wait_for_inspector: bool,
) -> Result<(), CoreError> {
self.worker.run_event_loop(wait_for_inspector).await
}
#[inline]
pub fn exit_code(&self) -> i32 {
self.worker.exit_code()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn storage_key_resolver_test() {
let resolver =
StorageKeyResolver(StorageKeyResolverStrategy::UseMainModule);
let specifier = Url::parse("file:///a.ts").unwrap();
assert_eq!(
resolver.resolve_storage_key(&specifier),
Some(specifier.to_string())
);
let resolver =
StorageKeyResolver(StorageKeyResolverStrategy::Specified(None));
assert_eq!(resolver.resolve_storage_key(&specifier), None);
let resolver = StorageKeyResolver(StorageKeyResolverStrategy::Specified(
Some("value".to_string()),
));
assert_eq!(
resolver.resolve_storage_key(&specifier),
Some("value".to_string())
);
// test empty
let resolver = StorageKeyResolver::empty();
assert_eq!(resolver.resolve_storage_key(&specifier), None);
}
}

View file

@ -19,7 +19,6 @@ use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_runtime::deno_node::PathClean;
use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference;
@ -32,7 +31,6 @@ use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use import_map::ImportMap;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
@ -367,9 +365,7 @@ impl<'a> TsResponseImportMapper<'a> {
if let Ok(Some(pkg_id)) =
npm_resolver.resolve_pkg_id_from_specifier(specifier)
{
let pkg_reqs = npm_resolver
.resolution()
.resolve_pkg_reqs_from_pkg_id(&pkg_id);
let pkg_reqs = npm_resolver.resolve_pkg_reqs_from_pkg_id(&pkg_id);
// check if any pkg reqs match what is found in an import map
if !pkg_reqs.is_empty() {
let sub_path = npm_resolver
@ -1299,19 +1295,6 @@ impl CodeActionCollection {
range: &lsp::Range,
language_server: &language_server::Inner,
) -> Option<lsp::CodeAction> {
fn top_package_req_for_name(
resolution: &NpmResolutionCell,
name: &str,
) -> Option<PackageReq> {
let package_reqs = resolution.package_reqs();
let mut entries = package_reqs
.into_iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by(|a, b| a.1.version.cmp(&b.1.version));
Some(entries.pop()?.0)
}
let (dep_key, dependency, _) =
document.get_maybe_dependency(&range.end)?;
if dependency.maybe_deno_types_specifier.is_some() {
@ -1399,10 +1382,9 @@ impl CodeActionCollection {
.and_then(|versions| versions.first().cloned())?;
let types_specifier_text =
if let Some(npm_resolver) = managed_npm_resolver {
let mut specifier_text = if let Some(req) = top_package_req_for_name(
npm_resolver.resolution(),
&types_package_name,
) {
let mut specifier_text = if let Some(req) =
npm_resolver.top_package_req_for_name(&types_package_name)
{
format!("npm:{req}")
} else {
format!("npm:{}@^{}", &types_package_name, types_package_version)

View file

@ -41,8 +41,6 @@ use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_lib::args::has_flag_env_var;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache;
@ -57,11 +55,13 @@ use super::logging::lsp_log;
use super::lsp_custom;
use super::urls::url_to_uri;
use crate::args::discover_npmrc_from_workspace;
use crate::args::has_flag_env_var;
use crate::args::CliLockfile;
use crate::args::CliLockfileReadFromPathOptions;
use crate::args::ConfigFile;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::cache::FastInsecureHasher;
use crate::file_fetcher::CliFileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::CliSloppyImportsResolver;

View file

@ -265,7 +265,7 @@ impl TsDiagnosticsStore {
}
pub fn should_send_diagnostic_batch_index_notifications() -> bool {
deno_lib::args::has_flag_env_var(
crate::args::has_flag_env_var(
"DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG",
)
}

View file

@ -27,10 +27,6 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::Resolution;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::has_flag_env_var;
use deno_lib::args::CaData;
use deno_lib::version::DENO_VERSION_INFO;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
@ -98,6 +94,9 @@ use super::urls;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::has_flag_env_var;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::InternalFlags;
@ -704,7 +703,7 @@ impl Inner {
let version = format!(
"{} ({}, {})",
DENO_VERSION_INFO.deno,
crate::version::DENO_VERSION_INFO.deno,
env!("PROFILE"),
env!("TARGET")
);

View file

@ -26,7 +26,6 @@ use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
@ -36,6 +35,7 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
@ -56,10 +56,10 @@ use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_cli_npm_resolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
@ -67,13 +67,14 @@ use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::npm::NpmResolutionInitializer;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliIsCjsResolver;
use crate::resolver::CliNpmGraphResolver;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::FoundPackageJsonDepFlag;
use crate::resolver::IsCjsResolver;
use crate::sys::CliSys;
use crate::tsc::into_specifier_and_media_type;
use crate::util::progress_bar::ProgressBar;
@ -82,13 +83,12 @@ use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)]
struct LspScopeResolver {
resolver: Arc<CliResolver>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
is_cjs_resolver: Arc<CliIsCjsResolver>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
is_cjs_resolver: Arc<IsCjsResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolver: Option<CliNpmResolver>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>,
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
pkg_json_resolver: Arc<CliPackageJsonResolver>,
@ -111,7 +111,6 @@ impl Default for LspScopeResolver {
npm_installer: None,
npm_resolver: None,
node_resolver: None,
npm_resolution: factory.services.npm_resolution.clone(),
npm_pkg_req_resolver: None,
pkg_json_resolver: factory.pkg_json_resolver().clone(),
redirect_resolver: None,
@ -225,7 +224,6 @@ impl LspScopeResolver {
npm_pkg_req_resolver,
npm_resolver,
npm_installer,
npm_resolution: factory.services.npm_resolution.clone(),
node_resolver,
pkg_json_resolver,
redirect_resolver,
@ -237,58 +235,12 @@ impl LspScopeResolver {
}
fn snapshot(&self) -> Arc<Self> {
// create a copy of the resolution and then re-initialize the npm resolver from that
// todo(dsherret): this is pretty terrible... we should improve this. It should
// be possible to just change the npm_resolution on the new factory then access
// another method to create a new npm resolver
let mut factory = ResolverFactory::new(self.config_data.as_ref());
factory
.services
.npm_resolution
.set_snapshot(self.npm_resolution.snapshot());
let npm_resolver = self.npm_resolver.as_ref();
let npm_resolver =
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
if let Some(npm_resolver) = &npm_resolver {
factory.set_npm_resolver(CliNpmResolver::new::<CliSys>(
match npm_resolver {
CliNpmResolver::Byonm(byonm_npm_resolver) => {
CliNpmResolverCreateOptions::Byonm(
CliByonmNpmResolverCreateOptions {
root_node_modules_dir: byonm_npm_resolver
.root_node_modules_path()
.map(|p| p.to_path_buf()),
sys: CliSys::default(),
pkg_json_resolver: self.pkg_json_resolver.clone(),
},
)
}
CliNpmResolver::Managed(managed_npm_resolver) => {
CliNpmResolverCreateOptions::Managed({
let npmrc = self
.config_data
.as_ref()
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc);
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&CliSys::default(),
managed_npm_resolver.global_cache_root_path().to_path_buf(),
npmrc.get_all_known_registries_urls(),
));
CliManagedNpmResolverCreateOptions {
sys: CliSys::default(),
npm_cache_dir,
maybe_node_modules_path: managed_npm_resolver
.root_node_modules_path()
.map(|p| p.to_path_buf()),
npmrc,
npm_resolution: factory.services.npm_resolution.clone(),
npm_system_info: NpmSystemInfo::default(),
}
})
}
},
));
factory.set_npm_resolver(npm_resolver.clone());
}
Arc::new(Self {
resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
@ -298,7 +250,6 @@ impl LspScopeResolver {
// npm installer isn't necessary for a snapshot
npm_installer: None,
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
npm_resolution: factory.services.npm_resolution.clone(),
npm_resolver: factory.npm_resolver().cloned(),
node_resolver: factory.node_resolver().cloned(),
redirect_resolver: self.redirect_resolver.clone(),
@ -415,7 +366,7 @@ impl LspResolver {
pub fn as_is_cjs_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &CliIsCjsResolver {
) -> &IsCjsResolver {
let resolver = self.get_scope_resolver(file_referrer);
resolver.is_cjs_resolver.as_ref()
}
@ -431,7 +382,7 @@ impl LspResolver {
pub fn in_npm_pkg_checker(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &DenoInNpmPackageChecker {
) -> &Arc<dyn InNpmPackageChecker> {
let resolver = self.get_scope_resolver(file_referrer);
&resolver.in_npm_pkg_checker
}
@ -439,7 +390,7 @@ impl LspResolver {
pub fn maybe_managed_npm_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&CliManagedNpmResolver> {
) -> Option<&ManagedCliNpmResolver> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.npm_resolver.as_ref().and_then(|r| r.as_managed())
}
@ -654,14 +605,13 @@ pub struct ScopeDepInfo {
struct ResolverFactoryServices {
cli_resolver: Deferred<Arc<CliResolver>>,
found_pkg_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
in_npm_pkg_checker: Deferred<DenoInNpmPackageChecker>,
is_cjs_resolver: Deferred<Arc<CliIsCjsResolver>>,
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
is_cjs_resolver: Deferred<Arc<IsCjsResolver>>,
node_resolver: Deferred<Option<Arc<CliNodeResolver>>>,
npm_graph_resolver: Deferred<Arc<CliNpmGraphResolver>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
npm_resolver: Option<CliNpmResolver>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
}
struct ResolverFactory<'a> {
@ -736,9 +686,10 @@ impl<'a> ResolverFactory<'a> {
npm_client.clone(),
npmrc.clone(),
));
let npm_resolution = Arc::new(NpmResolutionCell::default());
let npm_resolution_initializer = Arc::new(NpmResolutionInitializer::new(
registry_info_provider.clone(),
self.services.npm_resolution.clone(),
npm_resolution.clone(),
match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
@ -761,13 +712,13 @@ impl<'a> ResolverFactory<'a> {
));
let npm_resolution_installer = Arc::new(NpmResolutionInstaller::new(
registry_info_provider,
self.services.npm_resolution.clone(),
npm_resolution.clone(),
maybe_lockfile.clone(),
));
let npm_installer = Arc::new(NpmInstaller::new(
npm_cache.clone(),
Arc::new(NpmInstallDepsProvider::empty()),
self.services.npm_resolution.clone(),
npm_resolution.clone(),
npm_resolution_initializer.clone(),
npm_resolution_installer,
&pb,
@ -794,22 +745,22 @@ impl<'a> ResolverFactory<'a> {
npm_cache_dir,
maybe_node_modules_path,
npmrc,
npm_resolution: self.services.npm_resolution.clone(),
npm_resolution,
npm_system_info: NpmSystemInfo::default(),
})
};
self.set_npm_resolver(CliNpmResolver::new(options));
self.set_npm_resolver(create_cli_npm_resolver(options));
}
pub fn set_npm_installer(&mut self, npm_installer: Arc<NpmInstaller>) {
self.services.npm_installer = Some(npm_installer);
}
pub fn set_npm_resolver(&mut self, npm_resolver: CliNpmResolver) {
pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) {
self.services.npm_resolver = Some(npm_resolver);
}
pub fn npm_resolver(&self) -> Option<&CliNpmResolver> {
pub fn npm_resolver(&self) -> Option<&Arc<dyn CliNpmResolver>> {
self.services.npm_resolver.as_ref()
}
@ -874,27 +825,29 @@ impl<'a> ResolverFactory<'a> {
&self.pkg_json_resolver
}
pub fn in_npm_pkg_checker(&self) -> &DenoInNpmPackageChecker {
pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> {
self.services.in_npm_pkg_checker.get_or_init(|| {
DenoInNpmPackageChecker::new(match &self.services.npm_resolver {
Some(CliNpmResolver::Byonm(_)) | None => {
CreateInNpmPkgCheckerOptions::Byonm
}
Some(CliNpmResolver::Managed(m)) => {
CreateInNpmPkgCheckerOptions::Managed(
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.root_node_modules_path(),
},
)
}
})
deno_resolver::npm::create_in_npm_pkg_checker(
match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) {
Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => {
CreateInNpmPkgCheckerOptions::Byonm
}
Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => {
CreateInNpmPkgCheckerOptions::Managed(
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.maybe_node_modules_path(),
},
)
}
},
)
})
}
pub fn is_cjs_resolver(&self) -> &Arc<CliIsCjsResolver> {
pub fn is_cjs_resolver(&self) -> &Arc<IsCjsResolver> {
self.services.is_cjs_resolver.get_or_init(|| {
Arc::new(CliIsCjsResolver::new(
Arc::new(IsCjsResolver::new(
self.in_npm_pkg_checker().clone(),
self.pkg_json_resolver().clone(),
if self
@ -918,7 +871,7 @@ impl<'a> ResolverFactory<'a> {
Some(Arc::new(CliNodeResolver::new(
self.in_npm_pkg_checker().clone(),
RealIsBuiltInNodeModuleChecker,
npm_resolver.clone(),
npm_resolver.clone().into_npm_pkg_folder_resolver(),
self.pkg_json_resolver.clone(),
self.sys.clone(),
node_resolver::ConditionsFromResolutionMode::default(),
@ -937,7 +890,7 @@ impl<'a> ResolverFactory<'a> {
Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
node_resolver: node_resolver.clone(),
npm_resolver: npm_resolver.clone(),
npm_resolver: npm_resolver.clone().into_byonm_or_managed(),
sys: self.sys.clone(),
})))
})

View file

@ -5,7 +5,6 @@ use std::collections::HashSet;
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier;
use deno_lib::util::checksum;
use lsp::Range;
use tower_lsp::lsp_types as lsp;
@ -16,6 +15,7 @@ use crate::lsp::logging::lsp_warn;
use crate::lsp::urls::url_to_uri;
use crate::tools::test::TestDescription;
use crate::tools::test::TestStepDescription;
use crate::util::checksum;
#[derive(Debug, Clone, PartialEq)]
pub struct TestDefinition {

View file

@ -39,8 +39,6 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;
use deno_lib::util::result::InfallibleResultExt;
use deno_lib::worker::create_isolate_create_params;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::inspector_server::InspectorServer;
@ -74,7 +72,6 @@ use super::documents::Document;
use super::documents::DocumentsFilter;
use super::language_server;
use super::language_server::StateSnapshot;
use super::logging::lsp_log;
use super::performance::Performance;
use super::performance::PerformanceMark;
use super::refactor::RefactorCodeActionData;
@ -97,7 +94,9 @@ use crate::tsc::ResolveArgs;
use crate::tsc::MISSING_DEPENDENCY_SPECIFIER;
use crate::util::path::relative_specifier;
use crate::util::path::to_percent_decoded_str;
use crate::util::result::InfallibleResultExt;
use crate::util::v8::convert;
use crate::worker::create_isolate_create_params;
static BRACKET_ACCESSOR_RE: Lazy<Regex> =
lazy_regex!(r#"^\[['"](.+)[\['"]\]$"#);
@ -4341,9 +4340,7 @@ impl TscSpecifierMap {
if let Some(specifier) = self.normalized_specifiers.get(original) {
return Ok(specifier.clone());
}
let specifier_str = original
.replace(".d.ts.d.ts", ".d.ts")
.replace("$node_modules", "node_modules");
let specifier_str = original.replace(".d.ts.d.ts", ".d.ts");
let specifier = match ModuleSpecifier::parse(&specifier_str) {
Ok(s) => s,
Err(err) => return Err(err),
@ -4698,24 +4695,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
.graph_imports_by_referrer(scope)
{
for specifier in specifiers {
if let Ok(req_ref) =
deno_semver::npm::NpmPackageReqReference::from_specifier(specifier)
{
let Some((resolved, _)) =
state.state_snapshot.resolver.npm_to_file_url(
&req_ref,
scope,
ResolutionMode::Import,
Some(scope),
)
else {
lsp_log!("failed to resolve {req_ref} to file URL");
continue;
};
script_names.insert(resolved.to_string());
} else {
script_names.insert(specifier.to_string());
}
script_names.insert(specifier.to_string());
}
}
}
@ -6265,40 +6245,7 @@ mod tests {
"kind": "keyword"
}
],
"documentation": [
{
"text": "Outputs a message to the console",
"kind": "text",
},
],
"tags": [
{
"name": "param",
"text": [
{
"text": "data",
"kind": "parameterName",
},
{
"text": " ",
"kind": "space",
},
{
"text": "Values to be printed to the console",
"kind": "text",
},
],
},
{
"name": "example",
"text": [
{
"text": "```ts\nconsole.log('Hello', 'World', 123);\n```",
"kind": "text",
},
],
},
]
"documentation": []
})
);
}

View file

@ -81,7 +81,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
file_name_str.push('?');
file_name_str.push_str(query);
}
deno_lib::util::checksum::gen(&[file_name_str.as_bytes()])
crate::util::checksum::gen(&[file_name_str.as_bytes()])
}
fn to_deno_uri(specifier: &Url) -> String {

View file

@ -17,18 +17,16 @@ mod node;
mod npm;
mod ops;
mod resolver;
mod shared;
mod standalone;
mod sys;
mod task_runner;
mod tools;
mod tsc;
mod util;
mod version;
mod worker;
pub mod sys {
#[allow(clippy::disallowed_types)] // ok, definition
pub type CliSys = sys_traits::impls::RealSys;
}
use std::env;
use std::future::Future;
use std::io::IsTerminal;
@ -42,22 +40,18 @@ use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::FutureExt;
use deno_core::unsync::JoinHandle;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use deno_telemetry::OtelConfig;
use deno_terminal::colors;
use factory::CliFactory;
const MODULE_NOT_FOUND: &str = "Module not found";
const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME;
use self::npm::ResolveSnapshotError;
use self::util::draw_thread::DrawThread;
use crate::args::flags_from_vec;
use crate::args::DenoSubcommand;
use crate::args::Flags;
@ -207,7 +201,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
match result {
Ok(v) => Ok(v),
Err(script_err) => {
if let Some(worker::CreateCustomWorkerError::ResolvePkgFolderFromDenoReq(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_)))) = any_and_jserrorbox_downcast_ref::<worker::CreateCustomWorkerError>(&script_err) {
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = util::result::any_and_jserrorbox_downcast_ref::<ResolvePkgFolderFromDenoReqError>(&script_err) {
if flags.node_modules_dir.is_none() {
let mut flags = flags.deref().clone();
let watch = match &flags.subcommand {
@ -357,7 +351,7 @@ fn setup_panic_hook() {
eprintln!("var set and include the backtrace in your report.");
eprintln!();
eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH);
eprintln!("Version: {}", deno_lib::version::DENO_VERSION_INFO.deno);
eprintln!("Version: {}", version::DENO_VERSION_INFO.deno);
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
eprintln!();
orig_hook(panic_info);
@ -379,11 +373,13 @@ fn exit_for_error(error: AnyError) -> ! {
let mut error_code = 1;
if let Some(CoreError::Js(e)) =
any_and_jserrorbox_downcast_ref::<CoreError>(&error)
util::result::any_and_jserrorbox_downcast_ref::<CoreError>(&error)
{
error_string = format_js_error(e);
} else if let Some(e @ ResolveSnapshotError { .. }) =
any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(&error)
util::result::any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(
&error,
)
{
if let Some(e) = e.maybe_integrity_check_error() {
error_string = e.to_string();
@ -446,19 +442,19 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
{
// Ignore results to avoid BrokenPipe errors.
init_logging(None, None);
util::logger::init(None, None);
let _ = err.print();
deno_runtime::exit(0);
}
Err(err) => {
init_logging(None, None);
util::logger::init(None, None);
exit_for_error(AnyError::from(err))
}
};
let otel_config = flags.otel_config();
deno_telemetry::init(deno_lib::version::otel_runtime_config(), &otel_config)?;
init_logging(flags.log_level, Some(otel_config));
deno_telemetry::init(crate::args::otel_runtime_config(), &otel_config)?;
util::logger::init(flags.log_level, Some(otel_config));
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled {
@ -491,19 +487,3 @@ fn resolve_flags_and_init(
Ok(flags)
}
fn init_logging(
maybe_level: Option<log::Level>,
otel_config: Option<OtelConfig>,
) {
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
maybe_level,
otel_config,
// it was considered to hold the draw thread's internal lock
// across logging, but if outputting to stderr blocks then that
// could potentially block other threads that access the draw
// thread's state
on_log_start: DrawThread::hide,
on_log_end: DrawThread::show,
})
}

View file

@ -1,27 +1,46 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Allow unused code warnings because we share
// code between the two bin targets.
#![allow(dead_code)]
#![allow(unused_imports)]
mod standalone;
mod args;
mod cache;
mod emit;
mod file_fetcher;
mod http_util;
mod js;
mod node;
mod npm;
mod resolver;
mod shared;
mod sys;
mod task_runner;
mod util;
mod version;
mod worker;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::env::current_exe;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_lib::version::otel_runtime_config;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_core::error::JsError;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use deno_terminal::colors;
use indexmap::IndexMap;
use standalone::DenoCompileFileSystem;
use self::binary::extract_standalone;
use self::file_system::DenoRtSys;
mod binary;
mod code_cache;
mod file_system;
mod node;
mod run;
use crate::args::Flags;
use crate::util::result::any_and_jserrorbox_downcast_ref;
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
log::error!(
@ -68,26 +87,27 @@ fn load_env_vars(env_vars: &IndexMap<String, String>) {
fn main() {
deno_runtime::deno_permissions::mark_standalone();
let args: Vec<_> = env::args_os().collect();
let standalone = extract_standalone(Cow::Owned(args));
let standalone = standalone::extract_standalone(Cow::Owned(args));
let future = async move {
match standalone {
Ok(Some(data)) => {
deno_runtime::deno_telemetry::init(
otel_runtime_config(),
deno_telemetry::init(
crate::args::otel_runtime_config(),
&data.metadata.otel_config,
)?;
init_logging(
util::logger::init(
data.metadata.log_level,
Some(data.metadata.otel_config.clone()),
);
load_env_vars(&data.metadata.env_vars_from_env_file);
let sys = DenoRtSys::new(data.vfs.clone());
let exit_code = run::run(Arc::new(sys.clone()), sys, data).await?;
let fs = DenoCompileFileSystem::new(data.vfs.clone());
let sys = crate::sys::CliSys::DenoCompile(fs.clone());
let exit_code = standalone::run(Arc::new(fs), sys, data).await?;
deno_runtime::exit(exit_code);
}
Ok(None) => Ok(()),
Err(err) => {
init_logging(None, None);
util::logger::init(None, None);
Err(err)
}
}
@ -95,15 +115,3 @@ fn main() {
unwrap_or_exit(create_and_run_current_thread_with_maybe_metrics(future));
}
fn init_logging(
maybe_level: Option<log::Level>,
otel_config: Option<OtelConfig>,
) {
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
maybe_level,
otel_config,
on_log_start: || {},
on_log_end: || {},
})
}

View file

@ -13,6 +13,8 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::error::ModuleLoaderError;
use deno_core::futures::future::FutureExt;
@ -37,19 +39,9 @@ use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::Resolution;
use deno_graph::WasmModule;
use deno_lib::loader::ModuleCodeStringSource;
use deno_lib::loader::NotSupportedKindInNpmError;
use deno_lib::loader::NpmModuleLoadError;
use deno_lib::npm::NpmRegistryReadPermissionChecker;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::worker::CreateModuleLoaderResult;
use deno_lib::worker::ModuleLoaderFactory;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::code_cache;
use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::ops::require::UnableToGetCwdError;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::ClosestPkgJsonError;
@ -64,6 +56,7 @@ use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::TsTypeLib;
use crate::cache::CodeCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::graph_container::MainModuleGraphContainer;
@ -73,13 +66,16 @@ use crate::graph_util::enhance_graph_error;
use crate::graph_util::CreateGraphOptions;
use crate::graph_util::EnhanceGraphErrorMode;
use crate::graph_util::ModuleGraphBuilder;
use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::ModuleCodeStringSource;
use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader;
use crate::sys::CliSys;
use crate::tools::check;
use crate::tools::check::CheckError;
@ -87,14 +83,8 @@ use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code;
pub type CliNpmModuleLoader = deno_lib::loader::NpmModuleLoader<
CliCjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
CliNpmResolver,
CliSys,
>;
use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum PrepareModuleLoadError {
@ -108,11 +98,6 @@ pub enum PrepareModuleLoadError {
Check(#[from] CheckError),
#[class(inherit)]
#[error(transparent)]
AtomicWriteFileWithRetries(
#[from] crate::args::AtomicWriteFileWithRetriesError,
),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
@ -248,19 +233,18 @@ struct SharedCliModuleLoaderState {
initial_cwd: PathBuf,
is_inspecting: bool,
is_repl: bool,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: CliNpmModuleLoader,
npm_registry_permission_checker:
Arc<NpmRegistryReadPermissionChecker<CliSys>>,
npm_module_loader: NpmModuleLoader,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
sys: CliSys,
@ -310,20 +294,18 @@ impl CliModuleLoaderFactory {
#[allow(clippy::too_many_arguments)]
pub fn new(
options: &CliOptions,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: CliNpmModuleLoader,
npm_registry_permission_checker: Arc<
NpmRegistryReadPermissionChecker<CliSys>,
>,
npm_module_loader: NpmModuleLoader,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
sys: CliSys,
@ -433,55 +415,6 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadCodeSourceError {
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(NpmModuleLoadError),
#[class(inherit)]
#[error(transparent)]
LoadPreparedModule(#[from] LoadPreparedModuleError),
#[class(generic)]
#[error("Loading unprepared module: {}{}", .specifier, .maybe_referrer.as_ref().map(|r| format!(", imported from: {}", r)).unwrap_or_default())]
LoadUnpreparedModule {
specifier: ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
},
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadPreparedModuleError {
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError),
#[class(inherit)]
#[error(transparent)]
LoadMaybeCjs(#[from] LoadMaybeCjsError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadMaybeCjsError {
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("Could not resolve '{reference}'")]
pub struct CouldNotResolveError {
reference: deno_semver::npm::NpmPackageNvReference,
#[source]
#[inherit]
source: node_resolver::errors::PackageSubpathResolveError,
}
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
lib: TsTypeLib,
is_worker: bool,
@ -506,10 +439,7 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, ModuleLoaderError> {
let code_source = self
.load_code_source(specifier, maybe_referrer)
.await
.map_err(JsErrorBox::from_err)?;
let code_source = self.load_code_source(specifier, maybe_referrer).await?;
let code = if self.shared.is_inspecting
|| code_source.media_type == MediaType::Wasm
{
@ -570,7 +500,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, LoadCodeSourceError> {
) -> Result<ModuleCodeStringSource, AnyError> {
if let Some(code_source) = self.load_prepared_module(specifier).await? {
return Ok(code_source);
}
@ -579,14 +509,14 @@ impl<TGraphContainer: ModuleGraphContainer>
.shared
.npm_module_loader
.load(specifier, maybe_referrer)
.await
.map_err(LoadCodeSourceError::NpmModuleLoad);
.await;
}
Err(LoadCodeSourceError::LoadUnpreparedModule {
specifier: specifier.clone(),
maybe_referrer: maybe_referrer.cloned(),
})
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
Err(anyhow!(msg))
}
fn resolve_referrer(
@ -609,8 +539,7 @@ impl<TGraphContainer: ModuleGraphContainer>
.map_err(|e| e.into())
} else {
// this cwd check is slow, so try to avoid it
let cwd = std::env::current_dir()
.map_err(|e| JsErrorBox::from_err(UnableToGetCwdError(e)))?;
let cwd = std::env::current_dir().context("Unable to get CWD")?;
deno_core::resolve_path(referrer, &cwd).map_err(|e| e.into())
}
}
@ -689,11 +618,8 @@ impl<TGraphContainer: ModuleGraphContainer>
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.map_err(|source| {
JsErrorBox::from_err(CouldNotResolveError {
reference: module.nv_reference.clone(),
source,
})
.with_context(|| {
format!("Could not resolve '{}'.", module.nv_reference)
})?
}
Some(Module::Node(module)) => module.specifier.clone(),
@ -714,7 +640,7 @@ impl<TGraphContainer: ModuleGraphContainer>
async fn load_prepared_module(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<ModuleCodeStringSource>, LoadPreparedModuleError> {
) -> Result<Option<ModuleCodeStringSource>, AnyError> {
// Note: keep this in sync with the sync version below
let graph = self.graph_container.graph();
match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
@ -746,8 +672,7 @@ impl<TGraphContainer: ModuleGraphContainer>
}) => self
.load_maybe_cjs(specifier, media_type, source)
.await
.map(Some)
.map_err(LoadPreparedModuleError::LoadMaybeCjs),
.map(Some),
None => Ok(None),
}
}
@ -908,7 +833,7 @@ impl<TGraphContainer: ModuleGraphContainer>
specifier: &ModuleSpecifier,
media_type: MediaType,
original_source: &Arc<str>,
) -> Result<ModuleCodeStringSource, LoadMaybeCjsError> {
) -> Result<ModuleCodeStringSource, AnyError> {
let js_source = if media_type.is_emittable() {
Cow::Owned(
self
@ -1214,13 +1139,12 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit {
#[derive(Debug)]
struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>,
sys: CliSys,
graph_container: TGraphContainer,
in_npm_pkg_checker: DenoInNpmPackageChecker,
npm_registry_permission_checker:
Arc<NpmRegistryReadPermissionChecker<CliSys>>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
}
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader

View file

@ -5,9 +5,8 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_core::error::AnyError;
use deno_graph::ParsedSourceStore;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
@ -20,22 +19,15 @@ use serde::Serialize;
use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::resolver::CjsTracker;
use crate::sys::CliSys;
pub type CliNodeCodeTranslator = NodeCodeTranslator<
CliCjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
CliNpmResolver,
CliSys,
>;
pub type CliNodeResolver = deno_runtime::deno_node::NodeResolver<
DenoInNpmPackageChecker,
CliNpmResolver,
CliSys,
>;
pub type CliNodeResolver = deno_runtime::deno_node::NodeResolver<CliSys>;
pub type CliPackageJsonResolver = node_resolver::PackageJsonResolver<CliSys>;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -51,7 +43,7 @@ pub enum CliCjsAnalysis {
pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
}
@ -59,7 +51,7 @@ pub struct CliCjsCodeAnalyzer {
impl CliCjsCodeAnalyzer {
pub fn new(
cache: NodeAnalysisCache,
cjs_tracker: Arc<CliCjsTracker>,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
) -> Self {
@ -75,7 +67,7 @@ impl CliCjsCodeAnalyzer {
&self,
specifier: &ModuleSpecifier,
source: &str,
) -> Result<CliCjsAnalysis, JsErrorBox> {
) -> Result<CliCjsAnalysis, AnyError> {
let source_hash = CacheDBHash::from_hashable(source);
if let Some(analysis) =
self.cache.get_cjs_analysis(specifier.as_str(), source_hash)
@ -92,9 +84,7 @@ impl CliCjsCodeAnalyzer {
}
let cjs_tracker = self.cjs_tracker.clone();
let is_maybe_cjs = cjs_tracker
.is_maybe_cjs(specifier, media_type)
.map_err(JsErrorBox::from_err)?;
let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
let analysis = if is_maybe_cjs {
let maybe_parsed_source = self
.parsed_source_cache
@ -104,10 +94,9 @@ impl CliCjsCodeAnalyzer {
deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source: Arc<str> = source.into();
move || -> Result<_, JsErrorBox> {
let parsed_source = maybe_parsed_source
.map(Ok)
.unwrap_or_else(|| {
move || -> Result<_, AnyError> {
let parsed_source =
maybe_parsed_source.map(Ok).unwrap_or_else(|| {
deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: source,
@ -116,16 +105,13 @@ impl CliCjsCodeAnalyzer {
scope_analysis: false,
maybe_syntax: None,
})
})
.map_err(JsErrorBox::from_err)?;
})?;
let is_script = parsed_source.compute_is_script();
let is_cjs = cjs_tracker
.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)
.map_err(JsErrorBox::from_err)?;
let is_cjs = cjs_tracker.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(CliCjsAnalysis::Cjs {
@ -157,7 +143,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
&self,
specifier: &ModuleSpecifier,
source: Option<Cow<'a, str>>,
) -> Result<ExtNodeCjsAnalysis<'a>, JsErrorBox> {
) -> Result<ExtNodeCjsAnalysis<'a>, AnyError> {
let source = match source {
Some(source) => source,
None => {

90
cli/npm/byonm.rs Normal file
View file

@ -0,0 +1,90 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageReq;
use node_resolver::NpmPackageFolderResolver;
use super::CliNpmResolver;
use super::InnerCliNpmResolverRef;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::CliSys;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolver = ByonmNpmResolver<CliSys>;
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
#[derive(Debug)]
struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
impl NpmProcessStateProvider for CliByonmWrapper {
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_dir()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}
impl CliNpmResolver for CliByonmNpmResolver {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
Arc::new(CliByonmWrapper(self))
}
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys> {
ByonmOrManagedNpmResolver::Byonm(self)
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
Arc::new(self.clone())
}
fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Byonm(self)
}
fn root_node_modules_path(&self) -> Option<&Path> {
self.root_node_modules_dir()
}
fn check_state_hash(&self) -> Option<u64> {
// it is very difficult to determine the check state hash for byonm
// so we just return None to signify check caching is not supported
None
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
self
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(ResolvePkgFolderFromDenoReqError::Byonm)
}
}

View file

@ -220,7 +220,7 @@ impl<'a> LifecycleScripts<'a> {
get_package_path,
);
let init_cwd = &self.config.initial_cwd;
let process_state = deno_lib::npm::npm_process_state(
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
Some(root_node_modules_dir_path),
);
@ -240,7 +240,7 @@ impl<'a> LifecycleScripts<'a> {
// However, if we concurrently run scripts in the future we will
// have to have multiple temp files.
let temp_file_fd =
deno_runtime::deno_process::npm_process_state_tempfile(
deno_runtime::ops::process::npm_process_state_tempfile(
process_state.as_bytes(),
)
.map_err(LifecycleScriptsError::CreateNpmProcessState)?;
@ -248,7 +248,7 @@ impl<'a> LifecycleScripts<'a> {
let _temp_file =
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
env_vars.insert(
deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
.to_string(),
(temp_file_fd as usize).to_string(),
);

View file

@ -9,7 +9,6 @@ use async_trait::async_trait;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_error::JsErrorBox;
use deno_lib::util::hash::FastInsecureHasher;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
@ -18,6 +17,7 @@ use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsInstaller;
use super::PackageCaching;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use crate::colors;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;

View file

@ -1,25 +1,47 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::managed::ResolvePkgFolderFromDenoModuleError;
use deno_resolver::npm::managed::ResolvePkgFolderFromPkgIdError;
use deno_resolver::npm::managed::ResolvePkgIdFromSpecifierError;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::NpmPackageFolderResolver;
use sys_traits::FsMetadata;
use thiserror::Error;
use super::CliNpmRegistryInfoProvider;
use super::CliNpmResolver;
use super::InnerCliNpmResolverRef;
use crate::args::CliLockfile;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::cache::FastInsecureHasher;
use crate::sys::CliSys;
pub type CliManagedNpmResolverCreateOptions =
ManagedNpmResolverCreateOptions<CliSys>;
#[derive(Debug, Clone)]
pub enum CliNpmResolverManagedSnapshotOption {
ResolveFromLockfile(Arc<CliLockfile>),
@ -117,6 +139,36 @@ impl NpmResolutionInitializer {
}
}
pub struct CliManagedNpmResolverCreateOptions {
pub npm_cache_dir: Arc<NpmCacheDir>,
pub sys: CliSys,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
pub npmrc: Arc<ResolvedNpmRc>,
pub npm_resolution: Arc<NpmResolutionCell>,
}
pub fn create_managed_npm_resolver(
options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
let managed_npm_resolver =
Arc::new(ManagedNpmResolver::<CliSys>::new::<CliSys>(
&options.npm_cache_dir,
&options.npmrc,
options.npm_resolution.clone(),
options.sys.clone(),
options.maybe_node_modules_path,
));
Arc::new(ManagedCliNpmResolver::new(
managed_npm_resolver,
options.npm_cache_dir,
options.npmrc,
options.npm_resolution,
options.sys,
options.npm_system_info,
))
}
#[derive(Debug, Error, Clone, JsError)]
#[error("failed reading lockfile '{}'", lockfile_path.display())]
#[class(inherit)]
@ -201,3 +253,244 @@ async fn snapshot_from_lockfile(
.await?;
Ok(snapshot)
}
/// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver {
managed_npm_resolver: Arc<ManagedNpmResolver<CliSys>>,
npm_cache_dir: Arc<NpmCacheDir>,
npm_rc: Arc<ResolvedNpmRc>,
sys: CliSys,
resolution: Arc<NpmResolutionCell>,
system_info: NpmSystemInfo,
}
impl std::fmt::Debug for ManagedCliNpmResolver {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ManagedCliNpmResolver")
.field("<omitted>", &"<omitted>")
.finish()
}
}
impl ManagedCliNpmResolver {
#[allow(clippy::too_many_arguments)]
pub fn new(
managed_npm_resolver: Arc<ManagedNpmResolver<CliSys>>,
npm_cache_dir: Arc<NpmCacheDir>,
npm_rc: Arc<ResolvedNpmRc>,
resolution: Arc<NpmResolutionCell>,
sys: CliSys,
system_info: NpmSystemInfo,
) -> Self {
Self {
managed_npm_resolver,
npm_cache_dir,
npm_rc,
resolution,
sys,
system_info,
}
}
pub fn resolve_pkg_folder_from_pkg_id(
&self,
pkg_id: &NpmPackageId,
) -> Result<PathBuf, ResolvePkgFolderFromPkgIdError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_pkg_id(pkg_id)
}
/// Resolves the package id from the provided specifier.
pub fn resolve_pkg_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageId>, ResolvePkgIdFromSpecifierError> {
self
.managed_npm_resolver
.resolve_pkg_id_from_specifier(specifier)
}
pub fn resolve_pkg_reqs_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Vec<PackageReq> {
self.resolution.resolve_pkg_reqs_from_pkg_id(id)
}
pub fn all_system_packages(
&self,
system_info: &NpmSystemInfo,
) -> Vec<NpmResolutionPackage> {
self.resolution.all_system_packages(system_info)
}
/// Checks if the provided package req's folder is cached.
pub fn is_pkg_req_folder_cached(&self, req: &PackageReq) -> bool {
self
.resolve_pkg_id_from_pkg_req(req)
.ok()
.and_then(|id| {
self
.managed_npm_resolver
.resolve_pkg_folder_from_pkg_id(&id)
.ok()
})
.map(|folder| self.sys.fs_exists_no_err(folder))
.unwrap_or(false)
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.resolution.snapshot()
}
pub fn top_package_req_for_name(&self, name: &str) -> Option<PackageReq> {
let package_reqs = self.resolution.package_reqs();
let mut entries = package_reqs
.iter()
.filter(|(_, nv)| nv.name == name)
.collect::<Vec<_>>();
entries.sort_by_key(|(_, nv)| &nv.version);
Some(entries.last()?.0.clone())
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
) -> ValidSerializedNpmResolutionSnapshot {
self
.resolution
.serialized_valid_snapshot_for_system(system_info)
}
pub fn resolve_pkg_folder_from_deno_module(
&self,
nv: &PackageNv,
) -> Result<PathBuf, ResolvePkgFolderFromDenoModuleError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_deno_module(nv)
}
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self.resolution.resolve_pkg_id_from_pkg_req(req)
}
pub fn maybe_node_modules_path(&self) -> Option<&Path> {
self.managed_npm_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache_dir.root_dir()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache_dir.root_dir_url()
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.resolution.serialized_valid_snapshot(),
self.managed_npm_resolver.node_modules_path(),
)
}
}
impl CliNpmResolver for ManagedCliNpmResolver {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver> {
self.managed_npm_resolver.clone()
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
}
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys> {
ByonmOrManagedNpmResolver::Managed(self.managed_npm_resolver.clone())
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
// create a new snapshotted npm resolution and resolver
let npm_resolution =
Arc::new(NpmResolutionCell::new(self.resolution.snapshot()));
Arc::new(ManagedCliNpmResolver::new(
Arc::new(ManagedNpmResolver::<CliSys>::new::<CliSys>(
&self.npm_cache_dir,
&self.npm_rc,
npm_resolution.clone(),
self.sys.clone(),
self.root_node_modules_path().map(ToOwned::to_owned),
)),
self.npm_cache_dir.clone(),
self.npm_rc.clone(),
npm_resolution,
self.sys.clone(),
self.system_info.clone(),
))
}
fn as_inner(&self) -> InnerCliNpmResolverRef {
InnerCliNpmResolverRef::Managed(self)
}
fn root_node_modules_path(&self) -> Option<&Path> {
self.managed_npm_resolver.node_modules_path()
}
fn check_state_hash(&self) -> Option<u64> {
// We could go further and check all the individual
// npm packages, but that's probably overkill.
let mut package_reqs = self
.resolution
.package_reqs()
.into_iter()
.collect::<Vec<_>>();
package_reqs.sort_by(|a, b| a.0.cmp(&b.0)); // determinism
let mut hasher = FastInsecureHasher::new_without_deno_version();
// ensure the cache gets busted when turning nodeModulesDir on or off
// as this could cause changes in resolution
hasher
.write_hashable(self.managed_npm_resolver.node_modules_path().is_some());
for (pkg_req, pkg_nv) in package_reqs {
hasher.write_hashable(&pkg_req);
hasher.write_hashable(&pkg_nv);
}
Some(hasher.finish())
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
self
.managed_npm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
}
}

View file

@ -1,27 +1,39 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod byonm;
pub mod installer;
mod managed;
mod permission_checker;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmOrManagedNpmResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use http::HeaderName;
use http::HeaderValue;
use node_resolver::NpmPackageFolderResolver;
pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver;
pub use self::managed::NpmResolutionInitializer;
pub use self::managed::ResolveSnapshotError;
pub use self::permission_checker::NpmRegistryReadPermissionChecker;
pub use self::permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::sys::CliSys;
@ -32,12 +44,6 @@ pub type CliNpmTarballCache =
pub type CliNpmCache = deno_npm_cache::NpmCache<CliSys>;
pub type CliNpmRegistryInfoProvider =
deno_npm_cache::RegistryInfoProvider<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmResolver = deno_resolver::npm::NpmResolver<CliSys>;
pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver<CliSys>;
pub type CliNpmResolverCreateOptions =
deno_resolver::npm::NpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
#[derive(Debug)]
pub struct CliNpmCacheHttpClient {
@ -98,6 +104,70 @@ impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
}
}
pub enum CliNpmResolverCreateOptions {
Managed(CliManagedNpmResolverCreateOptions),
Byonm(CliByonmNpmResolverCreateOptions),
}
pub fn create_cli_npm_resolver(
options: CliNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
use CliNpmResolverCreateOptions::*;
match options {
Managed(options) => managed::create_managed_npm_resolver(options),
Byonm(options) => Arc::new(ByonmNpmResolver::new(options)),
}
}
pub enum InnerCliNpmResolverRef<'a> {
Managed(&'a ManagedCliNpmResolver),
#[allow(dead_code)]
Byonm(&'a CliByonmNpmResolver),
}
// todo(dsherret): replace with an enum
pub trait CliNpmResolver: Send + Sync + std::fmt::Debug {
fn into_npm_pkg_folder_resolver(
self: Arc<Self>,
) -> Arc<dyn NpmPackageFolderResolver>;
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider>;
fn into_byonm_or_managed(
self: Arc<Self>,
) -> ByonmOrManagedNpmResolver<CliSys>;
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
fn as_inner(&self) -> InnerCliNpmResolverRef;
fn as_managed(&self) -> Option<&ManagedCliNpmResolver> {
match self.as_inner() {
InnerCliNpmResolverRef::Managed(inner) => Some(inner),
InnerCliNpmResolverRef::Byonm(_) => None,
}
}
fn as_byonm(&self) -> Option<&CliByonmNpmResolver> {
match self.as_inner() {
InnerCliNpmResolverRef::Managed(_) => None,
InnerCliNpmResolverRef::Byonm(inner) => Some(inner),
}
}
fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>;
fn root_node_modules_path(&self) -> Option<&Path>;
/// Returns a hash returning the state of the npm resolver
/// or `None` if the state currently can't be determined.
fn check_state_hash(&self) -> Option<u64>;
}
#[derive(Debug)]
pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
@ -183,8 +253,8 @@ pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
pub fn get_npm_config_user_agent() -> String {
format!(
"deno/{} npm/? deno/{} {} {}",
DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.deno,
env!("CARGO_PKG_VERSION"),
env!("CARGO_PKG_VERSION"),
std::env::consts::OS,
std::env::consts::ARCH
)

View file

@ -6,11 +6,12 @@ use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use deno_core::parking_lot::Mutex;
use deno_error::JsErrorBox;
use deno_runtime::deno_node::NodePermissions;
use parking_lot::Mutex;
use sys_traits::FsCanonicalize;
use crate::sys::DenoLibSys;
use crate::sys::CliSys;
#[derive(Debug)]
pub enum NpmRegistryReadPermissionCheckerMode {
@ -20,8 +21,8 @@ pub enum NpmRegistryReadPermissionCheckerMode {
}
#[derive(Debug)]
pub struct NpmRegistryReadPermissionChecker<TSys: DenoLibSys> {
sys: TSys,
pub struct NpmRegistryReadPermissionChecker {
sys: CliSys,
cache: Mutex<HashMap<PathBuf, PathBuf>>,
mode: NpmRegistryReadPermissionCheckerMode,
}
@ -36,8 +37,8 @@ struct EnsureRegistryReadPermissionError {
source: std::io::Error,
}
impl<TSys: DenoLibSys> NpmRegistryReadPermissionChecker<TSys> {
pub fn new(sys: TSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
impl NpmRegistryReadPermissionChecker {
pub fn new(sys: CliSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
Self {
sys,
cache: Default::default(),

View file

@ -1,11 +1,17 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use async_trait::async_trait;
use dashmap::DashSet;
use deno_ast::MediaType;
use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::source::ResolveError;
@ -13,44 +19,150 @@ use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_semver::package::PackageReq;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use thiserror::Error;
use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub type CliCjsTracker =
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, CliSys>;
pub type CliIsCjsResolver =
deno_resolver::cjs::IsCjsResolver<DenoInNpmPackageChecker, CliSys>;
pub type CjsTracker = deno_resolver::cjs::CjsTracker<CliSys>;
pub type IsCjsResolver = deno_resolver::cjs::IsCjsResolver<CliSys>;
pub type CliSloppyImportsCachedFs = SloppyImportsCachedFs<CliSys>;
pub type CliSloppyImportsResolver =
SloppyImportsResolver<CliSloppyImportsCachedFs>;
pub type CliDenoResolver = deno_resolver::DenoResolver<
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
CliNpmResolver,
CliSloppyImportsCachedFs,
CliSys,
>;
pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver<
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
CliNpmResolver,
CliSys,
>;
pub type CliNpmReqResolver =
deno_resolver::npm::NpmReqResolver<RealIsBuiltInNodeModuleChecker, CliSys>;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
pub found_url: ModuleSpecifier,
pub media_type: MediaType,
}
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
pub specifier: Url,
}
// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone)
#[derive(Clone)]
pub struct NpmModuleLoader {
cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
node_code_translator: Arc<CliNodeCodeTranslator>,
}
impl NpmModuleLoader {
pub fn new(
cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
node_code_translator: Arc<CliNodeCodeTranslator>,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
fs,
}
}
pub async fn load(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, AnyError> {
let file_path = specifier.to_file_path().unwrap();
let code = self
.fs
.read_file_async(file_path.clone(), None)
.await
.map_err(AnyError::from)
.with_context(|| {
if file_path.is_dir() {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = &maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
let entrypoint_name = ["index.mjs", "index.js", "index.cjs"]
.iter()
.find(|e| dir_path.join(e).is_file());
if let Some(entrypoint_name) = entrypoint_name {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
}
msg
} else {
let mut msg = "Unable to load ".to_string();
msg.push_str(&file_path.to_string_lossy());
if let Some(referrer) = &maybe_referrer {
msg.push_str(" imported from ");
msg.push_str(referrer.as_str());
}
msg
}
})?;
let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(AnyError::from(NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
}));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
code,
found_url: specifier.clone(),
media_type: MediaType::from_specifier(specifier),
})
}
}
#[derive(Debug, Default)]
pub struct FoundPackageJsonDepFlag(AtomicFlag);

View file

@ -1,63 +0,0 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "denort"
version = "2.1.5"
authors.workspace = true
default-run = "denort"
edition.workspace = true
license.workspace = true
publish = false
repository.workspace = true
description = "Provides the denort executable"
[[bin]]
name = "denort"
path = "main.rs"
doc = false
[[test]]
name = "integration"
path = "integration_tests_runner.rs"
harness = false
[build-dependencies]
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
[dependencies]
deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_error.workspace = true
deno_lib.workspace = true
deno_media_type = { workspace = true, features = ["data_url", "decoding"] }
deno_npm.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_snapshots.workspace = true
deno_terminal.workspace = true
libsui = "0.5.0"
node_resolver.workspace = true
async-trait.workspace = true
bincode = "=1.3.3"
import_map = { version = "=0.21.0", features = ["ext"] }
indexmap.workspace = true
log = { workspace = true, features = ["serde"] }
serde.workspace = true
serde_json.workspace = true
sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] }
thiserror.workspace = true
tokio.workspace = true
tokio-util.workspace = true
twox-hash.workspace = true
url.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
sys_traits = { workspace = true, features = ["memory"] }
test_util.workspace = true

View file

@ -1,682 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::ffi::OsString;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_lib::standalone::binary::DenoRtDeserializable;
use deno_lib::standalone::binary::Metadata;
use deno_lib::standalone::binary::RemoteModuleEntry;
use deno_lib::standalone::binary::SpecifierDataStore;
use deno_lib::standalone::binary::SpecifierId;
use deno_lib::standalone::binary::MAGIC_BYTES;
use deno_lib::standalone::virtual_fs::VirtualDirectory;
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
use deno_media_type::MediaType;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use indexmap::IndexMap;
use thiserror::Error;
use crate::file_system::FileBackedVfs;
use crate::file_system::VfsRoot;
pub struct StandaloneData {
pub metadata: Metadata,
pub modules: Arc<StandaloneModules>,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub vfs: Arc<FileBackedVfs>,
}
/// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file,
/// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>,
) -> Result<Option<StandaloneData>, AnyError> {
let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None);
};
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let root_url = deno_path_util::url_from_directory_path(&root_path)?;
let DeserializedDataSection {
mut metadata,
npm_snapshot,
modules_store: remote_modules,
vfs_root_entries,
vfs_files_data,
} = match deserialize_binary_data_section(&root_url, data)? {
Some(data_section) => data_section,
None => return Ok(None),
};
let cli_args = cli_args.into_owned();
metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap());
}
let vfs = {
let fs_root = VfsRoot {
dir: VirtualDirectory {
// align the name of the directory with the root dir
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
entries: vfs_root_entries,
},
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(
Cow::Borrowed(vfs_files_data),
fs_root,
metadata.vfs_case_sensitivity,
))
};
Ok(Some(StandaloneData {
metadata,
modules: Arc::new(StandaloneModules {
modules: remote_modules,
vfs: vfs.clone(),
}),
npm_snapshot,
root_path,
vfs,
}))
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub modules_store: RemoteModulesStore,
pub vfs_root_entries: VirtualDirectoryEntries,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
root_dir_url: &Url,
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) =
read_bytes_with_u64_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Specifiers
let (input, specifiers_store) =
SpecifierStore::deserialize(root_dir_url, input)
.context("deserializing specifiers")?;
// 4. Redirects
let (input, redirects_store) =
SpecifierDataStore::<SpecifierId>::deserialize(input)
.context("deserializing redirects")?;
// 5. Remote modules
let (input, remote_modules_store) =
SpecifierDataStore::<RemoteModuleEntry<'static>>::deserialize(input)
.context("deserializing remote modules")?;
// 6. VFS
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
let vfs_root_entries: VirtualDirectoryEntries =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_u64_len(input).context("reading vfs files data")?;
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
let modules_store = RemoteModulesStore::new(
specifiers_store,
redirects_store,
remote_modules_store,
);
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
modules_store,
vfs_root_entries,
vfs_files_data,
}))
}
struct SpecifierStore {
data: IndexMap<Arc<Url>, SpecifierId>,
reverse: IndexMap<SpecifierId, Arc<Url>>,
}
impl SpecifierStore {
pub fn deserialize<'a>(
root_dir_url: &Url,
input: &'a [u8],
) -> std::io::Result<(&'a [u8], Self)> {
let (input, len) = read_u32_as_usize(input)?;
let mut data = IndexMap::with_capacity(len);
let mut reverse = IndexMap::with_capacity(len);
let mut input = input;
for _ in 0..len {
let (new_input, specifier_str) = read_string_lossy(input)?;
let specifier = match Url::parse(&specifier_str) {
Ok(url) => url,
Err(err) => match root_dir_url.join(&specifier_str) {
Ok(url) => url,
Err(_) => {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
err,
));
}
},
};
let (new_input, id) = SpecifierId::deserialize(new_input)?;
let specifier = Arc::new(specifier);
data.insert(specifier.clone(), id);
reverse.insert(id, specifier);
input = new_input;
}
Ok((input, Self { data, reverse }))
}
pub fn get_id(&self, specifier: &Url) -> Option<SpecifierId> {
self.data.get(specifier).cloned()
}
pub fn get_specifier(&self, specifier_id: SpecifierId) -> Option<&Url> {
self.reverse.get(&specifier_id).map(|url| url.as_ref())
}
}
pub struct StandaloneModules {
modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, TooManyRedirectsError> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.modules.resolve_specifier(specifier)
}
}
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, JsErrorBox> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)
.map_err(JsErrorBox::from_err)?;
let mut transpiled = None;
let mut source_map = None;
let mut cjs_export_analysis = None;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => {
let bytes = self
.vfs
.read_file_all(entry)
.map_err(JsErrorBox::from_err)?;
transpiled = entry
.transpiled_offset
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
source_map = entry
.source_map_offset
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
cjs_export_analysis = entry
.cjs_export_analysis_offset
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
bytes
}
Err(err) if err.kind() == ErrorKind::NotFound => {
match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(JsErrorBox::from_err(err)),
}
}
Err(err) => return Err(JsErrorBox::from_err(err)),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
transpiled,
source_map,
cjs_export_analysis,
}))
} else {
self.modules.read(specifier).map_err(JsErrorBox::from_err)
}
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
pub transpiled: Option<Cow<'static, [u8]>>,
pub source_map: Option<Cow<'static, [u8]>>,
pub cjs_export_analysis: Option<Cow<'static, [u8]>>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data {
Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string
unsafe { std::str::from_utf8_unchecked(d) },
),
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
}
}
let data = self.transpiled.unwrap_or(self.data);
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => (ModuleType::JavaScript, into_string_unsafe(data)),
MediaType::Json => (ModuleType::Json, into_string_unsafe(data)),
MediaType::Wasm => {
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(data))
}
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => {
(ModuleType::JavaScript, DenoCompileModuleSource::Bytes(data))
}
};
(self.specifier, media_type, source)
}
}
pub enum DenoCompileModuleSource {
String(&'static str),
Bytes(Cow<'static, [u8]>),
}
impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Too many redirects resolving: {0}")]
pub struct TooManyRedirectsError(Url);
pub struct RemoteModulesStore {
specifiers: SpecifierStore,
redirects: SpecifierDataStore<SpecifierId>,
remote_modules: SpecifierDataStore<RemoteModuleEntry<'static>>,
}
impl RemoteModulesStore {
fn new(
specifiers: SpecifierStore,
redirects: SpecifierDataStore<SpecifierId>,
remote_modules: SpecifierDataStore<RemoteModuleEntry<'static>>,
) -> Self {
Self {
specifiers,
redirects,
remote_modules,
}
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, TooManyRedirectsError> {
let Some(mut current) = self.specifiers.get_id(specifier) else {
return Ok(None);
};
let mut count = 0;
loop {
if count > 10 {
return Err(TooManyRedirectsError(specifier.clone()));
}
match self.redirects.get(current) {
Some(to) => {
current = *to;
count += 1;
}
None => {
if count == 0 {
return Ok(Some(specifier));
} else {
return Ok(self.specifiers.get_specifier(current));
}
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, TooManyRedirectsError> {
#[allow(clippy::ptr_arg)]
fn handle_cow_ref(data: &Cow<'static, [u8]>) -> Cow<'static, [u8]> {
match data {
Cow::Borrowed(data) => Cow::Borrowed(data),
Cow::Owned(data) => {
// this variant should never happen because the data
// should always be borrowed static in denort
debug_assert!(false);
Cow::Owned(data.clone())
}
}
}
let mut count = 0;
let Some(mut specifier) = self.specifiers.get_id(original_specifier) else {
return Ok(None);
};
loop {
if count > 10 {
return Err(TooManyRedirectsError(original_specifier.clone()));
}
match self.redirects.get(specifier) {
Some(to) => {
specifier = *to;
count += 1;
}
None => {
let Some(entry) = self.remote_modules.get(specifier) else {
return Ok(None);
};
return Ok(Some(DenoCompileModuleData {
specifier: if count == 0 {
original_specifier
} else {
self.specifiers.get_specifier(specifier).unwrap()
},
media_type: entry.media_type,
data: handle_cow_ref(&entry.data),
transpiled: entry.maybe_transpiled.as_ref().map(handle_cow_ref),
source_map: entry.maybe_source_map.as_ref().map(handle_cow_ref),
cjs_export_analysis: entry
.maybe_cjs_export_analysis
.as_ref()
.map(handle_cow_ref),
}));
}
}
}
}
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
let req = StackString::from_cow(req);
Ok((input, (req, id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes_with_u64_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
if input.len() < len {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Unexpected end of data",
))
} else {
Ok(())
}
}
fn read_string_lossy(input: &[u8]) -> std::io::Result<(&[u8], Cow<str>)> {
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> std::io::Result<(&[u8], u64)> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into().unwrap());
Ok((input, len))
}

View file

@ -1,11 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
fn main() {
// Skip building from docs.rs.
if std::env::var_os("DOCS_RS").is_some() {
return;
}
deno_runtime::deno_napi::print_linker_flags("denort");
deno_runtime::deno_webgpu::print_linker_flags("denort");
}

File diff suppressed because it is too large Load diff

View file

@ -1,5 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn main() {
// this file exists to cause the executable to be built when running cargo test
}

View file

@ -1,165 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lib::loader::NpmModuleLoader;
use deno_lib::standalone::binary::CjsExportAnalysisEntry;
use deno_media_type::MediaType;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolver;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use node_resolver::analyze::CjsAnalysis;
use node_resolver::analyze::CjsAnalysisExports;
use node_resolver::analyze::NodeCodeTranslator;
use crate::binary::StandaloneModules;
use crate::file_system::DenoRtSys;
pub type DenoRtCjsTracker =
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, DenoRtSys>;
pub type DenoRtNpmResolver = deno_resolver::npm::NpmResolver<DenoRtSys>;
pub type DenoRtNpmModuleLoader = NpmModuleLoader<
CjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNodeCodeTranslator = NodeCodeTranslator<
CjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNodeResolver = deno_runtime::deno_node::NodeResolver<
DenoInNpmPackageChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNpmReqResolver = NpmReqResolver<
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub struct CjsCodeAnalyzer {
cjs_tracker: Arc<DenoRtCjsTracker>,
modules: Arc<StandaloneModules>,
sys: DenoRtSys,
}
impl CjsCodeAnalyzer {
pub fn new(
cjs_tracker: Arc<DenoRtCjsTracker>,
modules: Arc<StandaloneModules>,
sys: DenoRtSys,
) -> Self {
Self {
cjs_tracker,
modules,
sys,
}
}
fn inner_cjs_analysis<'a>(
&self,
specifier: &Url,
source: Cow<'a, str>,
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
let media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
let cjs_tracker = self.cjs_tracker.clone();
let is_maybe_cjs = cjs_tracker
.is_maybe_cjs(specifier, media_type)
.map_err(JsErrorBox::from_err)?;
let analysis = if is_maybe_cjs {
let data = self
.modules
.read(specifier)?
.and_then(|d| d.cjs_export_analysis);
match data {
Some(data) => {
let data: CjsExportAnalysisEntry = bincode::deserialize(&data)
.map_err(|err| JsErrorBox::generic(err.to_string()))?;
match data {
CjsExportAnalysisEntry::Esm => {
cjs_tracker.set_is_known_script(specifier, false);
CjsAnalysis::Esm(source)
}
CjsExportAnalysisEntry::Cjs(analysis) => {
cjs_tracker.set_is_known_script(specifier, true);
CjsAnalysis::Cjs(analysis)
}
}
}
None => {
if log::log_enabled!(log::Level::Debug) {
if self.sys.is_specifier_in_vfs(specifier) {
log::debug!(
"No CJS export analysis was stored for '{}'. Assuming ESM. This might indicate a bug in Deno.",
specifier
);
} else {
log::debug!(
"Analyzing potentially CommonJS files is not supported at runtime in a compiled executable ({}). Assuming ESM.",
specifier
);
}
}
// assume ESM as we don't have access to swc here
CjsAnalysis::Esm(source)
}
}
} else {
CjsAnalysis::Esm(source)
};
Ok(analysis)
}
}
#[async_trait::async_trait(?Send)]
impl node_resolver::analyze::CjsCodeAnalyzer for CjsCodeAnalyzer {
async fn analyze_cjs<'a>(
&self,
specifier: &Url,
source: Option<Cow<'a, str>>,
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
let source = match source {
Some(source) => source,
None => {
if let Ok(path) = deno_path_util::url_to_file_path(specifier) {
// todo(dsherret): should this use the sync method instead?
if let Ok(source_from_file) =
self.sys.read_text_file_lossy_async(path, None).await
{
source_from_file
} else {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} else {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
}
};
self.inner_cjs_analysis(specifier, source)
}
}

View file

@ -1,990 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::OnceLock;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError;
use deno_core::error::ModuleLoaderError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::url::Url;
use deno_core::v8_set_flags;
use deno_core::FastString;
use deno_core::FeatureChecker;
use deno_core::ModuleLoader;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_error::JsErrorBox;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::npm_pkg_req_ref_to_binary_command;
use deno_lib::args::CaData;
use deno_lib::args::RootCertStoreLoadError;
use deno_lib::loader::NpmModuleLoader;
use deno_lib::npm::create_npm_process_state_provider;
use deno_lib::npm::NpmRegistryReadPermissionChecker;
use deno_lib::npm::NpmRegistryReadPermissionCheckerMode;
use deno_lib::standalone::binary::NodeModules;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::util::text_encoding::from_utf8_lossy_cow;
use deno_lib::util::text_encoding::from_utf8_lossy_owned;
use deno_lib::util::v8::construct_v8_flags;
use deno_lib::worker::CreateModuleLoaderResult;
use deno_lib::worker::LibMainWorkerFactory;
use deno_lib::worker::LibMainWorkerOptions;
use deno_lib::worker::ModuleLoaderFactory;
use deno_lib::worker::StorageKeyResolver;
use deno_media_type::MediaType;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::cjs::CjsTracker;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolver;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::npm::NpmResolver;
use deno_resolver::npm::NpmResolverCreateOptions;
use deno_runtime::code_cache::CodeCache;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolutionKind;
use node_resolver::NodeResolver;
use node_resolver::PackageJsonResolver;
use node_resolver::ResolutionMode;
use crate::binary::DenoCompileModuleSource;
use crate::binary::StandaloneData;
use crate::binary::StandaloneModules;
use crate::code_cache::DenoCompileCodeCache;
use crate::file_system::DenoRtSys;
use crate::file_system::FileBackedVfs;
use crate::node::CjsCodeAnalyzer;
use crate::node::DenoRtCjsTracker;
use crate::node::DenoRtNodeCodeTranslator;
use crate::node::DenoRtNodeResolver;
use crate::node::DenoRtNpmModuleLoader;
use crate::node::DenoRtNpmReqResolver;
struct SharedModuleLoaderState {
cjs_tracker: Arc<DenoRtCjsTracker>,
code_cache: Option<Arc<DenoCompileCodeCache>>,
modules: Arc<StandaloneModules>,
node_code_translator: Arc<DenoRtNodeCodeTranslator>,
node_resolver: Arc<DenoRtNodeResolver>,
npm_module_loader: Arc<DenoRtNpmModuleLoader>,
npm_registry_permission_checker: NpmRegistryReadPermissionChecker<DenoRtSys>,
npm_req_resolver: Arc<DenoRtNpmReqResolver>,
vfs: Arc<FileBackedVfs>,
workspace_resolver: WorkspaceResolver,
}
impl SharedModuleLoaderState {
fn get_code_cache(
&self,
specifier: &Url,
source: &[u8],
) -> Option<SourceCodeCacheInfo> {
let Some(code_cache) = &self.code_cache else {
return None;
};
if !code_cache.enabled() {
return None;
}
// deno version is already included in the root cache key
let hash = FastInsecureHasher::new_without_deno_version()
.write_hashable(source)
.finish();
let data = code_cache.get_sync(
specifier,
deno_runtime::code_cache::CodeCacheType::EsModule,
hash,
);
Some(SourceCodeCacheInfo {
hash,
data: data.map(Cow::Owned),
})
}
}
#[derive(Clone)]
struct EmbeddedModuleLoader {
shared: Arc<SharedModuleLoaderState>,
}
impl std::fmt::Debug for EmbeddedModuleLoader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EmbeddedModuleLoader").finish()
}
}
impl ModuleLoader for EmbeddedModuleLoader {
fn resolve(
&self,
raw_specifier: &str,
referrer: &str,
kind: ResolutionKind,
) -> Result<Url, ModuleLoaderError> {
let referrer = if referrer == "." {
if kind != ResolutionKind::MainModule {
return Err(
JsErrorBox::generic(format!(
"Expected to resolve main module, got {:?} instead.",
kind
))
.into(),
);
}
let current_dir = std::env::current_dir().unwrap();
deno_core::resolve_path(".", &current_dir)?
} else {
Url::parse(referrer).map_err(|err| {
JsErrorBox::type_error(format!(
"Referrer uses invalid specifier: {}",
err
))
})?
};
let referrer_kind = if self
.shared
.cjs_tracker
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))
.map_err(JsErrorBox::from_err)?
{
ResolutionMode::Require
} else {
ResolutionMode::Import
};
if self.shared.node_resolver.in_npm_package(&referrer) {
return Ok(
self
.shared
.node_resolver
.resolve(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?
.into_url(),
);
}
let mapped_resolution = self
.shared
.workspace_resolver
.resolve(raw_specifier, &referrer);
match mapped_resolution {
Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => {
Ok(specifier)
}
Ok(MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
sub_path,
..
}) => Ok(
self
.shared
.node_resolver
.resolve_package_subpath_from_deno_module(
pkg_json.dir_path(),
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?,
),
Ok(MappedResolution::PackageJson {
dep_result,
sub_path,
alias,
..
}) => match dep_result
.as_ref()
.map_err(|e| JsErrorBox::from_err(e.clone()))?
{
PackageJsonDepValue::Req(req) => self
.shared
.npm_req_resolver
.resolve_req_with_sub_path(
req,
sub_path.as_deref(),
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(|e| JsErrorBox::from_err(e).into()),
PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = self
.shared
.workspace_resolver
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)
.map_err(JsErrorBox::from_err)?;
Ok(
self
.shared
.node_resolver
.resolve_package_subpath_from_deno_module(
pkg_folder,
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?,
)
}
},
Ok(MappedResolution::Normal { specifier, .. })
| Ok(MappedResolution::ImportMap { specifier, .. }) => {
if let Ok(reference) =
NpmPackageReqReference::from_specifier(&specifier)
{
return Ok(
self
.shared
.npm_req_resolver
.resolve_req_reference(
&reference,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?,
);
}
if specifier.scheme() == "jsr" {
if let Some(specifier) = self
.shared
.modules
.resolve_specifier(&specifier)
.map_err(JsErrorBox::from_err)?
{
return Ok(specifier.clone());
}
}
Ok(
self
.shared
.node_resolver
.handle_if_in_node_modules(&specifier)
.unwrap_or(specifier),
)
}
Err(err)
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
{
let maybe_res = self
.shared
.npm_req_resolver
.resolve_if_for_npm_pkg(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionKind::Execution,
)
.map_err(JsErrorBox::from_err)?;
if let Some(res) = maybe_res {
return Ok(res.into_url());
}
Err(JsErrorBox::from_err(err).into())
}
Err(err) => Err(JsErrorBox::from_err(err).into()),
}
}
fn get_host_defined_options<'s>(
&self,
scope: &mut deno_core::v8::HandleScope<'s>,
name: &str,
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
let name = Url::parse(name).ok()?;
if self.shared.node_resolver.in_npm_package(&name) {
Some(create_host_defined_options(scope))
} else {
None
}
}
fn load(
&self,
original_specifier: &Url,
maybe_referrer: Option<&Url>,
_is_dynamic: bool,
_requested_module_type: RequestedModuleType,
) -> deno_core::ModuleLoadResponse {
if original_specifier.scheme() == "data" {
let data_url_text =
match deno_media_type::data_url::RawDataUrl::parse(original_specifier)
.and_then(|url| url.decode())
{
Ok(response) => response,
Err(err) => {
return deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:#}", err)).into(),
));
}
};
return deno_core::ModuleLoadResponse::Sync(Ok(
deno_core::ModuleSource::new(
deno_core::ModuleType::JavaScript,
ModuleSourceCode::String(data_url_text.into()),
original_specifier,
None,
),
));
}
if self.shared.node_resolver.in_npm_package(original_specifier) {
let shared = self.shared.clone();
let original_specifier = original_specifier.clone();
let maybe_referrer = maybe_referrer.cloned();
return deno_core::ModuleLoadResponse::Async(
async move {
let code_source = shared
.npm_module_loader
.load(&original_specifier, maybe_referrer.as_ref())
.await
.map_err(JsErrorBox::from_err)?;
let code_cache_entry = shared.get_code_cache(
&code_source.found_url,
code_source.code.as_bytes(),
);
Ok(deno_core::ModuleSource::new_with_redirect(
match code_source.media_type {
MediaType::Json => ModuleType::Json,
_ => ModuleType::JavaScript,
},
code_source.code,
&original_specifier,
&code_source.found_url,
code_cache_entry,
))
}
.boxed_local(),
);
}
match self.shared.modules.read(original_specifier) {
Ok(Some(module)) => {
let media_type = module.media_type;
let (module_specifier, module_type, module_source) =
module.into_parts();
let is_maybe_cjs = match self
.shared
.cjs_tracker
.is_maybe_cjs(original_specifier, media_type)
{
Ok(is_maybe_cjs) => is_maybe_cjs,
Err(err) => {
return deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:?}", err)).into(),
));
}
};
if is_maybe_cjs {
let original_specifier = original_specifier.clone();
let module_specifier = module_specifier.clone();
let shared = self.shared.clone();
deno_core::ModuleLoadResponse::Async(
async move {
let source = match module_source {
DenoCompileModuleSource::String(string) => {
Cow::Borrowed(string)
}
DenoCompileModuleSource::Bytes(module_code_bytes) => {
match module_code_bytes {
Cow::Owned(bytes) => {
Cow::Owned(from_utf8_lossy_owned(bytes))
}
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
}
}
};
let source = shared
.node_code_translator
.translate_cjs_to_esm(&module_specifier, Some(source))
.await
.map_err(JsErrorBox::from_err)?;
let module_source = match source {
Cow::Owned(source) => ModuleSourceCode::String(source.into()),
Cow::Borrowed(source) => {
ModuleSourceCode::String(FastString::from_static(source))
}
};
let code_cache_entry = shared
.get_code_cache(&module_specifier, module_source.as_bytes());
Ok(deno_core::ModuleSource::new_with_redirect(
module_type,
module_source,
&original_specifier,
&module_specifier,
code_cache_entry,
))
}
.boxed_local(),
)
} else {
let module_source = module_source.into_for_v8();
let code_cache_entry = self
.shared
.get_code_cache(module_specifier, module_source.as_bytes());
deno_core::ModuleLoadResponse::Sync(Ok(
deno_core::ModuleSource::new_with_redirect(
module_type,
module_source,
original_specifier,
module_specifier,
code_cache_entry,
),
))
}
}
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!(
"Module not found: {}",
original_specifier
))
.into(),
)),
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(
JsErrorBox::type_error(format!("{:?}", err)).into(),
)),
}
}
fn code_cache_ready(
&self,
specifier: Url,
source_hash: u64,
code_cache_data: &[u8],
) -> LocalBoxFuture<'static, ()> {
if let Some(code_cache) = &self.shared.code_cache {
code_cache.set_sync(
specifier,
deno_runtime::code_cache::CodeCacheType::EsModule,
source_hash,
code_cache_data,
);
}
std::future::ready(()).boxed_local()
}
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
let url = Url::parse(file_name).ok()?;
let data = self.shared.modules.read(&url).ok()??;
data.source_map
}
fn get_source_mapped_source_line(
&self,
file_name: &str,
line_number: usize,
) -> Option<String> {
let specifier = Url::parse(file_name).ok()?;
let data = self.shared.modules.read(&specifier).ok()??;
let source = String::from_utf8_lossy(&data.data);
// Do NOT use .lines(): it skips the terminating empty line.
// (due to internally using_terminator() instead of .split())
let lines: Vec<&str> = source.split('\n').collect();
if line_number >= lines.len() {
Some(format!(
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
crate::colors::yellow("Warning"), line_number + 1,
))
} else {
Some(lines[line_number].to_string())
}
}
}
impl NodeRequireLoader for EmbeddedModuleLoader {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a std::path::Path,
) -> Result<Cow<'a, std::path::Path>, JsErrorBox> {
if self.shared.modules.has_file(path) {
// allow reading if the file is in the snapshot
return Ok(Cow::Borrowed(path));
}
self
.shared
.npm_registry_permission_checker
.ensure_read_permission(permissions, path)
.map_err(JsErrorBox::from_err)
}
fn load_text_file_lossy(
&self,
path: &std::path::Path,
) -> Result<Cow<'static, str>, JsErrorBox> {
let file_entry = self
.shared
.vfs
.file_entry(path)
.map_err(JsErrorBox::from_err)?;
let file_bytes = self
.shared
.vfs
.read_file_offset_with_len(
file_entry.transpiled_offset.unwrap_or(file_entry.offset),
)
.map_err(JsErrorBox::from_err)?;
Ok(from_utf8_lossy_cow(file_bytes))
}
fn is_maybe_cjs(&self, specifier: &Url) -> Result<bool, ClosestPkgJsonError> {
let media_type = MediaType::from_specifier(specifier);
self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
}
struct StandaloneModuleLoaderFactory {
shared: Arc<SharedModuleLoaderState>,
}
impl StandaloneModuleLoaderFactory {
pub fn create_result(&self) -> CreateModuleLoaderResult {
let loader = Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
});
CreateModuleLoaderResult {
module_loader: loader.clone(),
node_require_loader: loader,
}
}
}
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
fn create_for_main(
&self,
_root_permissions: PermissionsContainer,
) -> CreateModuleLoaderResult {
self.create_result()
}
fn create_for_worker(
&self,
_parent_permissions: PermissionsContainer,
_permissions: PermissionsContainer,
) -> CreateModuleLoaderResult {
self.create_result()
}
}
struct StandaloneRootCertStoreProvider {
ca_stores: Option<Vec<String>>,
ca_data: Option<CaData>,
cell: OnceLock<Result<RootCertStore, RootCertStoreLoadError>>,
}
impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> {
self
.cell
// get_or_try_init was not stable yet when this was written
.get_or_init(|| {
get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone())
})
.as_ref()
.map_err(|err| JsErrorBox::from_err(err.clone()))
}
}
pub async fn run(
fs: Arc<dyn FileSystem>,
sys: DenoRtSys,
data: StandaloneData,
) -> Result<i32, AnyError> {
let StandaloneData {
metadata,
modules,
npm_snapshot,
root_path,
vfs,
} = data;
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
ca_stores: metadata.ca_stores,
ca_data: metadata.ca_data.map(CaData::Bytes),
cell: Default::default(),
});
// use a dummy npm registry url
let npm_registry_url = Url::parse("https://localhost/").unwrap();
let root_dir_url = Arc::new(Url::from_directory_path(&root_path).unwrap());
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(sys.clone()));
let npm_registry_permission_checker = {
let mode = match &metadata.node_modules {
Some(NodeModules::Managed {
node_modules_dir: Some(path),
}) => NpmRegistryReadPermissionCheckerMode::Local(PathBuf::from(path)),
Some(NodeModules::Byonm { .. }) => {
NpmRegistryReadPermissionCheckerMode::Byonm
}
Some(NodeModules::Managed {
node_modules_dir: None,
})
| None => NpmRegistryReadPermissionCheckerMode::Global(
npm_global_cache_dir.clone(),
),
};
NpmRegistryReadPermissionChecker::new(sys.clone(), mode)
};
let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules {
Some(NodeModules::Managed { node_modules_dir }) => {
// create an npmrc that uses the fake npm_registry_url to resolve packages
let npmrc = Arc::new(ResolvedNpmRc {
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
registry_url: npm_registry_url.clone(),
config: Default::default(),
},
scopes: Default::default(),
registry_configs: Default::default(),
});
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&sys,
npm_global_cache_dir,
npmrc.get_all_known_registries_urls(),
));
let snapshot = npm_snapshot.unwrap();
let maybe_node_modules_path = node_modules_dir
.map(|node_modules_dir| root_path.join(node_modules_dir));
let in_npm_pkg_checker =
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed(
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: maybe_node_modules_path.as_deref(),
},
));
let npm_resolution =
Arc::new(NpmResolutionCell::new(NpmResolutionSnapshot::new(snapshot)));
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
npm_resolution,
npm_cache_dir,
sys: sys.clone(),
maybe_node_modules_path,
npm_system_info: Default::default(),
npmrc,
}),
);
(in_npm_pkg_checker, npm_resolver)
}
Some(NodeModules::Byonm {
root_node_modules_dir,
}) => {
let root_node_modules_dir =
root_node_modules_dir.map(|p| vfs.root().join(p));
let in_npm_pkg_checker =
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Byonm);
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
NpmResolverCreateOptions::Byonm(ByonmNpmResolverCreateOptions {
sys: sys.clone(),
pkg_json_resolver: pkg_json_resolver.clone(),
root_node_modules_dir,
}),
);
(in_npm_pkg_checker, npm_resolver)
}
None => {
// Packages from different registries are already inlined in the binary,
// so no need to create actual `.npmrc` configuration.
let npmrc = create_default_npmrc();
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&sys,
npm_global_cache_dir,
npmrc.get_all_known_registries_urls(),
));
let in_npm_pkg_checker =
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed(
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: None,
},
));
let npm_resolution = Arc::new(NpmResolutionCell::default());
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
npm_resolution,
sys: sys.clone(),
npm_cache_dir,
maybe_node_modules_path: None,
npm_system_info: Default::default(),
npmrc: create_default_npmrc(),
}),
);
(in_npm_pkg_checker, npm_resolver)
}
};
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
let node_resolver = Arc::new(NodeResolver::new(
in_npm_pkg_checker.clone(),
RealIsBuiltInNodeModuleChecker,
npm_resolver.clone(),
pkg_json_resolver.clone(),
sys.clone(),
node_resolver::ConditionsFromResolutionMode::default(),
));
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
if metadata.unstable_config.detect_cjs {
IsCjsResolutionMode::ImplicitTypeCommonJs
} else if metadata.workspace_resolver.package_jsons.is_empty() {
IsCjsResolutionMode::Disabled
} else {
IsCjsResolutionMode::ExplicitTypeCommonJs
},
));
let npm_req_resolver = Arc::new(NpmReqResolver::new(NpmReqResolverOptions {
sys: sys.clone(),
in_npm_pkg_checker: in_npm_pkg_checker.clone(),
node_resolver: node_resolver.clone(),
npm_resolver: npm_resolver.clone(),
}));
let cjs_esm_code_analyzer =
CjsCodeAnalyzer::new(cjs_tracker.clone(), modules.clone(), sys.clone());
let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer,
in_npm_pkg_checker,
node_resolver.clone(),
npm_resolver.clone(),
pkg_json_resolver.clone(),
sys.clone(),
));
let workspace_resolver = {
let import_map = match metadata.workspace_resolver.import_map {
Some(import_map) => Some(
import_map::parse_from_json_with_options(
root_dir_url.join(&import_map.specifier).unwrap(),
&import_map.json,
import_map::ImportMapOptions {
address_hook: None,
expand_imports: true,
},
)?
.import_map,
),
None => None,
};
let pkg_jsons = metadata
.workspace_resolver
.package_jsons
.into_iter()
.map(|(relative_path, json)| {
let path = root_dir_url
.join(&relative_path)
.unwrap()
.to_file_path()
.unwrap();
let pkg_json =
deno_package_json::PackageJson::load_from_value(path, json);
Arc::new(pkg_json)
})
.collect();
WorkspaceResolver::new_raw(
root_dir_url.clone(),
import_map,
metadata
.workspace_resolver
.jsr_pkgs
.iter()
.map(|pkg| ResolverWorkspaceJsrPackage {
is_patch: false, // only used for enhancing the diagnostic, which isn't shown in deno compile
base: root_dir_url.join(&pkg.relative_base).unwrap(),
name: pkg.name.clone(),
version: pkg.version.clone(),
exports: pkg.exports.clone(),
})
.collect(),
pkg_jsons,
metadata.workspace_resolver.pkg_json_resolution,
)
};
let code_cache = match metadata.code_cache_key {
Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new(
root_path.with_file_name(format!(
"{}.cache",
root_path.file_name().unwrap().to_string_lossy()
)),
code_cache_key,
))),
None => {
log::debug!("Code cache disabled.");
None
}
};
let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState {
cjs_tracker: cjs_tracker.clone(),
code_cache: code_cache.clone(),
modules,
node_code_translator: node_code_translator.clone(),
node_resolver: node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new(
cjs_tracker.clone(),
node_code_translator,
sys.clone(),
)),
npm_registry_permission_checker,
npm_req_resolver,
vfs: vfs.clone(),
workspace_resolver,
}),
};
let permissions = {
let mut permissions = metadata.permissions;
// grant read access to the vfs
match &mut permissions.allow_read {
Some(vec) if vec.is_empty() => {
// do nothing, already granted
}
Some(vec) => {
vec.push(root_path.to_string_lossy().to_string());
}
None => {
permissions.allow_read =
Some(vec![root_path.to_string_lossy().to_string()]);
}
}
let desc_parser =
Arc::new(RuntimePermissionDescriptorParser::new(sys.clone()));
let permissions =
Permissions::from_options(desc_parser.as_ref(), &permissions)?;
PermissionsContainer::new(desc_parser, permissions)
};
let feature_checker = Arc::new({
let mut checker = FeatureChecker::default();
checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
for feature in metadata.unstable_config.features {
// `metadata` is valid for the whole lifetime of the program, so we
// can leak the string here.
checker.enable_feature(feature.leak());
}
checker
});
let lib_main_worker_options = LibMainWorkerOptions {
argv: metadata.argv,
log_level: WorkerLogLevel::Info,
enable_op_summary_metrics: false,
enable_testing_features: false,
has_node_modules_dir,
inspect_brk: false,
inspect_wait: false,
strace_ops: None,
is_inspecting: false,
skip_op_registration: true,
location: metadata.location,
argv0: NpmPackageReqReference::from_specifier(&main_module)
.ok()
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref))
.or(std::env::args().next()),
node_debug: std::env::var("NODE_DEBUG").ok(),
origin_data_folder_path: None,
seed: metadata.seed,
unsafely_ignore_certificate_errors: metadata
.unsafely_ignore_certificate_errors,
node_ipc: None,
serve_port: None,
serve_host: None,
otel_config: metadata.otel_config,
startup_snapshot: deno_snapshots::CLI_SNAPSHOT,
};
let worker_factory = LibMainWorkerFactory::new(
Arc::new(BlobStore::default()),
code_cache.map(|c| c.for_deno_core()),
feature_checker,
fs,
None,
Box::new(module_loader_factory),
node_resolver.clone(),
create_npm_process_state_provider(&npm_resolver),
pkg_json_resolver,
root_cert_store_provider,
StorageKeyResolver::empty(),
sys.clone(),
lib_main_worker_options,
);
// Initialize v8 once from the main thread.
v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![]));
// TODO(bartlomieju): remove last argument once Deploy no longer needs it
deno_core::JsRuntime::init_platform(None, true);
let main_module = match NpmPackageReqReference::from_specifier(&main_module) {
Ok(package_ref) => {
let pkg_folder = npm_resolver.resolve_pkg_folder_from_deno_module_req(
package_ref.req(),
&deno_path_util::url_from_file_path(&vfs.root().join("package.json"))?,
)?;
worker_factory
.resolve_npm_binary_entrypoint(&pkg_folder, package_ref.sub_path())?
}
Err(_) => main_module,
};
let mut worker = worker_factory.create_main_worker(
WorkerExecutionMode::Run,
permissions,
main_module,
)?;
let exit_code = worker.run().await?;
Ok(exit_code)
}
fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
// this is fine because multiple registries are combined into
// one when compiling the binary
Arc::new(ResolvedNpmRc {
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
registry_url: Url::parse("https://registry.npmjs.org").unwrap(),
config: Default::default(),
},
scopes: Default::default(),
registry_configs: Default::default(),
})
}

View file

@ -1,11 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license.
/// This module is shared between build script and the binaries. Use it sparsely.
use thiserror::Error;
#[derive(Debug, Error)]
#[error("Unrecognized release channel: {0}")]
pub struct UnrecognizedReleaseChannelError(pub String);
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ReleaseChannel {
@ -53,17 +50,13 @@ impl ReleaseChannel {
// NOTE(bartlomieju): do not ever change these values, tools like `patchver`
// rely on them.
#[allow(unused)]
pub fn deserialize(
str_: &str,
) -> Result<Self, UnrecognizedReleaseChannelError> {
pub fn deserialize(str_: &str) -> Result<Self, AnyError> {
Ok(match str_ {
"stable" => Self::Stable,
"canary" => Self::Canary,
"rc" => Self::Rc,
"lts" => Self::Lts,
unknown => {
return Err(UnrecognizedReleaseChannelError(unknown.to_string()))
}
unknown => bail!("Unrecognized release channel: {}", unknown),
})
}
}

View file

@ -1,20 +0,0 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_snapshots"
version = "0.1.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "v8 snapshot used by the Deno CLI"
[lib]
path = "lib.rs"
[features]
disable = []
[build-dependencies]
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources", "snapshotting"] }

View file

@ -1,3 +0,0 @@
# deno_snapshots
v8 snapshot used in the Deno CLI.

View file

@ -1,30 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(feature = "disable"))]
mod shared;
fn main() {
#[cfg(not(feature = "disable"))]
{
let o = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
create_cli_snapshot(cli_snapshot_path);
}
}
#[cfg(not(feature = "disable"))]
fn create_cli_snapshot(snapshot_path: std::path::PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions;
let snapshot_options = SnapshotOptions {
ts_version: shared::TS_VERSION.to_string(),
v8_version: deno_runtime::deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(),
};
deno_runtime::snapshot::create_runtime_snapshot(
snapshot_path,
snapshot_options,
vec![],
);
}

View file

@ -1,13 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(feature = "disable"))]
pub static CLI_SNAPSHOT: Option<&[u8]> = Some(include_bytes!(concat!(
env!("OUT_DIR"),
"/CLI_SNAPSHOT.bin"
)));
#[cfg(feature = "disable")]
pub static CLI_SNAPSHOT: Option<&[u8]> = None;
mod shared;
pub use shared::TS_VERSION;

View file

@ -1,3 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub static TS_VERSION: &str = "5.6.2";

View file

@ -1,70 +1,110 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::env;
use std::env::current_exe;
use std::ffi::OsString;
use std::fs;
use std::fs::File;
use std::future::Future;
use std::io::ErrorKind;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::io::Write;
use std::ops::Range;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use capacity_builder::BytesAppendable;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::io::AllowStdIo;
use deno_core::futures::AsyncReadExt;
use deno_core::futures::AsyncSeekExt;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::ModuleGraph;
use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::shared::ReleaseChannel;
use deno_lib::standalone::binary::CjsExportAnalysisEntry;
use deno_lib::standalone::binary::Metadata;
use deno_lib::standalone::binary::NodeModules;
use deno_lib::standalone::binary::RemoteModuleEntry;
use deno_lib::standalone::binary::SerializedResolverWorkspaceJsrPackage;
use deno_lib::standalone::binary::SerializedWorkspaceResolver;
use deno_lib::standalone::binary::SerializedWorkspaceResolverImportMap;
use deno_lib::standalone::binary::SpecifierDataStore;
use deno_lib::standalone::binary::SpecifierId;
use deno_lib::standalone::binary::MAGIC_BYTES;
use deno_lib::standalone::virtual_fs::BuiltVfs;
use deno_lib::standalone::virtual_fs::VfsBuilder;
use deno_lib::standalone::virtual_fs::VfsEntry;
use deno_lib::standalone::virtual_fs::VirtualDirectory;
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath;
use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError;
use deno_telemetry::OtelConfig;
use indexmap::IndexMap;
use node_resolver::analyze::CjsAnalysis;
use node_resolver::analyze::CjsCodeAnalyzer;
use log::Level;
use serde::Deserialize;
use serde::Serialize;
use super::file_system::DenoCompileFileSystem;
use super::serialization::deserialize_binary_data_section;
use super::serialization::serialize_binary_data_section;
use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder;
use super::serialization::SourceMapStore;
use super::virtual_fs::output_vfs;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::FileSystemCaseSensitivity;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsFileSubDataKind;
use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory;
use super::virtual_fs::VirtualDirectoryEntries;
use super::virtual_fs::WindowsSystemRootablePath;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::CompileFlags;
use crate::args::NpmInstallDepsProvider;
use crate::args::PermissionFlags;
use crate::args::UnstableConfig;
use crate::cache::DenoDir;
use crate::cache::FastInsecureHasher;
use crate::emit::Emitter;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::node::CliCjsCodeAnalyzer;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::npm::InnerCliNpmResolverRef;
use crate::resolver::CjsTracker;
use crate::shared::ReleaseChannel;
use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive;
use crate::util::fs::canonicalize_path;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
/// A URL that can be designated as the base for relative URLs.
///
/// After creation, this URL may be used to get the key for a
@ -110,60 +150,104 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
}
}
struct SpecifierStore<'a> {
data: IndexMap<&'a Url, SpecifierId>,
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
/// Relative path for the node_modules directory in the vfs.
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: Option<String>,
},
}
impl<'a> SpecifierStore<'a> {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn get_or_add(&mut self, specifier: &'a Url) -> SpecifierId {
let len = self.data.len();
let entry = self.data.entry(specifier);
match entry {
indexmap::map::Entry::Occupied(occupied_entry) => *occupied_entry.get(),
indexmap::map::Entry::Vacant(vacant_entry) => {
let new_id = SpecifierId::new(len as u32);
vacant_entry.insert(new_id);
new_id
}
}
}
pub fn for_serialization(
self,
base_url: &StandaloneRelativeFileBaseUrl<'a>,
) -> SpecifierStoreForSerialization<'a> {
SpecifierStoreForSerialization {
data: self
.data
.into_iter()
.map(|(specifier, id)| (base_url.specifier_key(specifier), id))
.collect(),
}
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolverImportMap {
pub specifier: String,
pub json: String,
}
struct SpecifierStoreForSerialization<'a> {
data: Vec<(Cow<'a, str>, SpecifierId)>,
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
impl<'a> BytesAppendable<'a> for &'a SpecifierStoreForSerialization<'a> {
fn append_to_builder<TBytes: capacity_builder::BytesType>(
self,
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
) {
builder.append_le(self.data.len() as u32);
for (specifier_str, id) in &self.data {
builder.append_le(specifier_str.len() as u32);
builder.append(specifier_str.as_ref());
builder.append(*id);
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
// Note: Don't use hashmaps/hashsets. Ensure the serialization
// is deterministic.
#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<Level>,
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: IndexMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
#[allow(clippy::too_many_arguments)]
fn write_binary_bytes(
mut file_writer: File,
original_bin: Vec<u8>,
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
source_map_store: &SourceMapStore,
vfs: &BuiltVfs,
compile_flags: &CompileFlags,
) -> Result<(), AnyError> {
let data_section_bytes = serialize_binary_data_section(
metadata,
npm_snapshot,
remote_modules,
source_map_store,
vfs,
)
.context("Serializing binary data section.")?;
let target = compile_flags.resolve_target();
if target.contains("linux") {
libsui::Elf::new(&original_bin).append(
"d3n0l4nd",
&data_section_bytes,
&mut file_writer,
)?;
} else if target.contains("windows") {
let mut pe = libsui::PortableExecutable::from(&original_bin)?;
if let Some(icon) = compile_flags.icon.as_ref() {
let icon = std::fs::read(icon)?;
pe = pe.set_icon(&icon)?;
}
pe.write_resource("d3n0l4nd", data_section_bytes)?
.build(&mut file_writer)?;
} else if target.contains("darwin") {
libsui::Macho::from(original_bin)?
.write_section("d3n0l4nd", data_section_bytes)?
.build_and_sign(&mut file_writer)?;
}
Ok(())
}
pub fn is_standalone_binary(exe_path: &Path) -> bool {
@ -176,6 +260,146 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|| libsui::utils::is_macho(&data)
}
pub struct StandaloneData {
pub metadata: Metadata,
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub source_maps: SourceMapStore,
pub vfs: Arc<FileBackedVfs>,
}
pub struct StandaloneModules {
remote_modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<&'a ModuleSpecifier>, AnyError> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.remote_modules.resolve_specifier(specifier)
}
}
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
kind: VfsFileSubDataKind,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
}
}
Err(err) => return Err(err.into()),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
}))
} else {
self.remote_modules.read(specifier).map(|maybe_entry| {
maybe_entry.map(|entry| DenoCompileModuleData {
media_type: entry.media_type,
specifier: entry.specifier,
data: match kind {
VfsFileSubDataKind::Raw => entry.data,
VfsFileSubDataKind::ModuleGraph => {
entry.transpiled_data.unwrap_or(entry.data)
}
},
})
})
}
}
}
/// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file,
/// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>,
) -> Result<Option<StandaloneData>, AnyError> {
let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None);
};
let DeserializedDataSection {
mut metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
None => return Ok(None),
};
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let cli_args = cli_args.into_owned();
metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap());
}
let vfs = {
let fs_root = VfsRoot {
dir: VirtualDirectory {
// align the name of the directory with the root dir
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
entries: vfs_root_entries,
},
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(
Cow::Borrowed(vfs_files_data),
fs_root,
metadata.vfs_case_sensitivity,
))
};
Ok(Some(StandaloneData {
metadata,
modules: StandaloneModules {
remote_modules,
vfs: vfs.clone(),
},
npm_snapshot,
root_path,
source_maps,
vfs,
}))
}
pub struct WriteBinOptions<'a> {
pub writer: File,
pub display_output_filename: &'a str,
@ -186,13 +410,13 @@ pub struct WriteBinOptions<'a> {
}
pub struct DenoCompileBinaryWriter<'a> {
cjs_code_analyzer: CliCjsCodeAnalyzer,
cjs_tracker: &'a CliCjsTracker,
cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a CliNpmResolver,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
npm_system_info: NpmSystemInfo,
}
@ -200,22 +424,22 @@ pub struct DenoCompileBinaryWriter<'a> {
impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)]
pub fn new(
cjs_code_analyzer: CliCjsCodeAnalyzer,
cjs_tracker: &'a CliCjsTracker,
cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a CliNpmResolver,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
npm_system_info: NpmSystemInfo,
) -> Self {
Self {
cjs_code_analyzer,
cjs_tracker,
cli_options,
deno_dir,
emitter,
file_fetcher,
http_client_provider,
npm_resolver,
workspace_resolver,
@ -251,7 +475,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
)
}
}
self.write_standalone_binary(options, original_binary).await
self.write_standalone_binary(options, original_binary)
}
async fn get_base_binary(
@ -271,14 +495,19 @@ impl<'a> DenoCompileBinaryWriter<'a> {
let target = compile_flags.resolve_target();
let binary_name = format!("denort-{target}.zip");
let binary_path_suffix = match DENO_VERSION_INFO.release_channel {
ReleaseChannel::Canary => {
format!("canary/{}/{}", DENO_VERSION_INFO.git_hash, binary_name)
}
_ => {
format!("release/v{}/{}", DENO_VERSION_INFO.deno, binary_name)
}
};
let binary_path_suffix =
match crate::version::DENO_VERSION_INFO.release_channel {
ReleaseChannel::Canary => {
format!(
"canary/{}/{}",
crate::version::DENO_VERSION_INFO.git_hash,
binary_name
)
}
_ => {
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
}
};
let download_directory = self.deno_dir.dl_folder_path();
let binary_path = download_directory.join(&binary_path_suffix);
@ -349,7 +578,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
/// This functions creates a standalone deno binary by appending a bundle
/// and magic trailer to the currently executing binary.
#[allow(clippy::too_many_arguments)]
async fn write_standalone_binary(
fn write_standalone_binary(
&self,
options: WriteBinOptions<'_>,
original_bin: Vec<u8>,
@ -370,11 +599,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None => None,
};
let mut vfs = VfsBuilder::new();
let npm_snapshot = match &self.npm_resolver {
CliNpmResolver::Managed(managed) => {
let snapshot = managed
.resolution()
.serialized_valid_snapshot_for_system(&self.npm_system_info);
let npm_snapshot = match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(managed) => {
let snapshot =
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
if !snapshot.as_serialized().packages.is_empty() {
self.fill_npm_vfs(&mut vfs).context("Building npm vfs.")?;
Some(snapshot)
@ -382,7 +610,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None
}
}
CliNpmResolver::Byonm(_) => {
InnerCliNpmResolverRef::Byonm(_) => {
self.fill_npm_vfs(&mut vfs)?;
None
}
@ -393,50 +621,23 @@ impl<'a> DenoCompileBinaryWriter<'a> {
.add_file_at_path(&path)
.with_context(|| format!("Including {}", path.display()))?;
}
let specifiers_count = graph.specifiers_count();
let mut specifier_store = SpecifierStore::with_capacity(specifiers_count);
let mut remote_modules_store =
SpecifierDataStore::with_capacity(specifiers_count);
// todo(dsherret): transpile and analyze CJS in parallel
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
let mut source_maps = Vec::with_capacity(graph.specifiers_count());
// todo(dsherret): transpile in parallel
for module in graph.modules() {
if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code
}
let mut maybe_source_map = None;
let mut maybe_transpiled = None;
let mut maybe_cjs_analysis = None;
let (maybe_original_source, media_type) = match module {
let (maybe_original_source, maybe_transpiled, media_type) = match module {
deno_graph::Module::Js(m) => {
let specifier = &m.specifier;
let original_bytes = m.source.as_bytes();
if self.cjs_tracker.is_maybe_cjs(specifier, m.media_type)? {
if self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
let original_bytes = m.source.as_bytes().to_vec();
let maybe_transpiled = if m.media_type.is_emittable() {
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
&m.specifier,
m.media_type,
m.is_script,
)? {
let cjs_analysis = self
.cjs_code_analyzer
.analyze_cjs(
module.specifier(),
Some(Cow::Borrowed(m.source.as_ref())),
)
.await?;
maybe_cjs_analysis = Some(match cjs_analysis {
CjsAnalysis::Esm(_) => CjsExportAnalysisEntry::Esm,
CjsAnalysis::Cjs(exports) => {
CjsExportAnalysisEntry::Cjs(exports)
}
});
} else {
maybe_cjs_analysis = Some(CjsExportAnalysisEntry::Esm);
}
}
if m.media_type.is_emittable() {
let module_kind = match maybe_cjs_analysis.as_ref() {
Some(CjsExportAnalysisEntry::Cjs(_)) => ModuleKind::Cjs,
_ => ModuleKind::Esm,
};
)?;
let module_kind = ModuleKind::from_is_cjs(is_cjs);
let (source, source_map) =
self.emitter.emit_parsed_source_for_deno_compile(
&m.specifier,
@ -445,67 +646,60 @@ impl<'a> DenoCompileBinaryWriter<'a> {
&m.source,
)?;
if source != m.source.as_ref() {
maybe_source_map = Some(source_map.into_bytes());
maybe_transpiled = Some(source.into_bytes());
source_maps.push((&m.specifier, source_map));
Some(source.into_bytes())
} else {
None
}
}
(Some(original_bytes), m.media_type)
} else {
None
};
(Some(original_bytes), maybe_transpiled, m.media_type)
}
deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes()), m.media_type)
(Some(m.source.as_bytes().to_vec()), None, m.media_type)
}
deno_graph::Module::Wasm(m) => {
(Some(m.source.as_ref()), MediaType::Wasm)
(Some(m.source.to_vec()), None, MediaType::Wasm)
}
deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
| deno_graph::Module::External(_) => (None, None, MediaType::Unknown),
};
if let Some(original_source) = maybe_original_source {
let maybe_cjs_export_analysis = maybe_cjs_analysis
.as_ref()
.map(bincode::serialize)
.transpose()?;
if module.specifier().scheme() == "file" {
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
vfs
.add_file_with_data(
&file_path,
deno_lib::standalone::virtual_fs::AddFileDataOptions {
data: original_source.to_vec(),
maybe_transpiled,
maybe_source_map,
maybe_cjs_export_analysis,
},
original_source,
VfsFileSubDataKind::Raw,
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
if let Some(transpiled_source) = maybe_transpiled {
vfs
.add_file_with_data(
&file_path,
transpiled_source,
VfsFileSubDataKind::ModuleGraph,
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
}
} else {
let specifier_id = specifier_store.get_or_add(module.specifier());
remote_modules_store.add(
specifier_id,
RemoteModuleEntry {
media_type,
data: Cow::Borrowed(original_source),
maybe_transpiled: maybe_transpiled.map(Cow::Owned),
maybe_source_map: maybe_source_map.map(Cow::Owned),
maybe_cjs_export_analysis: maybe_cjs_export_analysis
.map(Cow::Owned),
},
module.specifier(),
media_type,
original_source,
maybe_transpiled,
);
}
}
}
let mut redirects_store =
SpecifierDataStore::with_capacity(graph.redirects.len());
for (from, to) in &graph.redirects {
redirects_store.add(
specifier_store.get_or_add(from),
specifier_store.get_or_add(to),
);
}
remote_modules_store.add_redirects(&graph.redirects);
if let Some(import_map) = self.workspace_resolver.maybe_import_map() {
if let Ok(file_path) = url_to_file_path(import_map.base_url()) {
@ -523,48 +717,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}
}
// do CJS export analysis on all the files in the VFS
// todo(dsherret): analyze cjs in parallel
let mut to_add = Vec::new();
for (file_path, file) in vfs.iter_files() {
if file.cjs_export_analysis_offset.is_some() {
continue; // already analyzed
}
let specifier = deno_path_util::url_from_file_path(&file_path)?;
let media_type = MediaType::from_specifier(&specifier);
if self.cjs_tracker.is_maybe_cjs(&specifier, media_type)? {
let maybe_source = vfs
.file_bytes(file.offset)
.map(|text| String::from_utf8_lossy(text));
let cjs_analysis_result = self
.cjs_code_analyzer
.analyze_cjs(&specifier, maybe_source)
.await;
let maybe_analysis = match cjs_analysis_result {
Ok(CjsAnalysis::Esm(_)) => Some(CjsExportAnalysisEntry::Esm),
Ok(CjsAnalysis::Cjs(exports)) => {
Some(CjsExportAnalysisEntry::Cjs(exports))
}
Err(err) => {
log::debug!(
"Ignoring cjs export analysis for '{}': {}",
specifier,
err
);
None
}
};
if let Some(analysis) = &maybe_analysis {
to_add.push((file_path, bincode::serialize(analysis)?));
}
}
}
for (file_path, analysis) in to_add {
vfs.add_cjs_export_analysis(&file_path, analysis);
}
let vfs = self.build_vfs_consolidating_global_npm_cache(vfs);
let root_dir_url = match &vfs.root_path {
WindowsSystemRootablePath::Path(dir) => {
Some(url_from_directory_path(dir)?)
@ -590,8 +743,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None
};
let node_modules = match &self.npm_resolver {
CliNpmResolver::Managed(_) => {
let mut source_map_store = SourceMapStore::with_capacity(source_maps.len());
for (specifier, source_map) in source_maps {
source_map_store.add(
Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()),
Cow::Owned(source_map.into_bytes()),
);
}
let node_modules = match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(_) => {
npm_snapshot.as_ref().map(|_| NodeModules::Managed {
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|path| {
@ -604,7 +765,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
),
})
}
CliNpmResolver::Byonm(resolver) => Some(NodeModules::Byonm {
InnerCliNpmResolverRef::Byonm(resolver) => Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
@ -699,18 +860,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
vfs_case_sensitivity: vfs.case_sensitivity,
};
let data_section_bytes = serialize_binary_data_section(
write_binary_bytes(
writer,
original_bin,
&metadata,
npm_snapshot.map(|s| s.into_serialized()),
&specifier_store.for_serialization(&root_dir_url),
&redirects_store,
&remote_modules_store,
&source_map_store,
&vfs,
compile_flags,
)
.context("Serializing binary data section.")?;
write_binary_bytes(writer, original_bin, data_section_bytes, compile_flags)
.context("Writing binary bytes")
.context("Writing binary bytes")
}
fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> {
@ -720,17 +880,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}
}
match &self.npm_resolver {
CliNpmResolver::Managed(npm_resolver) => {
match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
maybe_warn_different_system(&self.npm_system_info);
builder.add_dir_recursive(node_modules_path)?;
Ok(())
} else {
// we'll flatten to remove any custom registries later
let mut packages = npm_resolver
.resolution()
.all_system_packages(&self.npm_system_info);
let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
for package in packages {
let folder =
@ -740,7 +899,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Ok(())
}
}
CliNpmResolver::Byonm(_) => {
InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info);
for pkg_json in self.cli_options.workspace().package_jsons() {
builder.add_file_at_path(&pkg_json.path)?;
@ -783,8 +942,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
&self,
mut vfs: VfsBuilder,
) -> BuiltVfs {
match &self.npm_resolver {
CliNpmResolver::Managed(npm_resolver) => {
match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if npm_resolver.root_node_modules_path().is_some() {
return vfs.build();
}
@ -876,151 +1035,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
.insert(npm_global_cache_dir_entry, case_sensitivity);
built_vfs
}
CliNpmResolver::Byonm(_) => vfs.build(),
InnerCliNpmResolverRef::Byonm(_) => vfs.build(),
}
}
}
#[allow(clippy::too_many_arguments)]
fn write_binary_bytes(
mut file_writer: File,
original_bin: Vec<u8>,
data_section_bytes: Vec<u8>,
compile_flags: &CompileFlags,
) -> Result<(), AnyError> {
let target = compile_flags.resolve_target();
if target.contains("linux") {
libsui::Elf::new(&original_bin).append(
"d3n0l4nd",
&data_section_bytes,
&mut file_writer,
)?;
} else if target.contains("windows") {
let mut pe = libsui::PortableExecutable::from(&original_bin)?;
if let Some(icon) = compile_flags.icon.as_ref() {
let icon = std::fs::read(icon)?;
pe = pe.set_icon(&icon)?;
}
pe.write_resource("d3n0l4nd", data_section_bytes)?
.build(&mut file_writer)?;
} else if target.contains("darwin") {
libsui::Macho::from(original_bin)?
.write_section("d3n0l4nd", data_section_bytes)?
.build_and_sign(&mut file_writer)?;
}
Ok(())
}
/// Binary format:
/// * d3n0l4nd
/// * <metadata_len><metadata>
/// * <npm_snapshot_len><npm_snapshot>
/// * <specifiers>
/// * <redirects>
/// * <remote_modules>
/// * <vfs_headers_len><vfs_headers>
/// * <vfs_file_data_len><vfs_file_data>
/// * d3n0l4nd
#[allow(clippy::too_many_arguments)]
fn serialize_binary_data_section(
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
specifiers: &SpecifierStoreForSerialization,
redirects: &SpecifierDataStore<SpecifierId>,
remote_modules: &SpecifierDataStore<RemoteModuleEntry<'_>>,
vfs: &BuiltVfs,
) -> Result<Vec<u8>, AnyError> {
let metadata = serde_json::to_string(metadata)?;
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
let bytes = capacity_builder::BytesBuilder::build(|builder| {
builder.append(MAGIC_BYTES);
// 1. Metadata
{
builder.append_le(metadata.len() as u64);
builder.append(&metadata);
}
// 2. Npm snapshot
{
builder.append_le(npm_snapshot.len() as u64);
builder.append(&npm_snapshot);
}
// 3. Specifiers
builder.append(specifiers);
// 4. Redirects
redirects.serialize(builder);
// 5. Remote modules
remote_modules.serialize(builder);
// 6. VFS
{
builder.append_le(serialized_vfs.len() as u64);
builder.append(&serialized_vfs);
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
builder.append_le(vfs_bytes_len);
for file in &vfs.files {
builder.append(file);
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
builder.append(MAGIC_BYTES);
})?;
Ok(bytes)
}
fn serialize_npm_snapshot(
mut snapshot: SerializedNpmResolutionSnapshot,
) -> Vec<u8> {
fn append_string(bytes: &mut Vec<u8>, string: &str) {
let len = string.len() as u32;
bytes.extend_from_slice(&len.to_le_bytes());
bytes.extend_from_slice(string.as_bytes());
}
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
let ids_to_stored_ids = snapshot
.packages
.iter()
.enumerate()
.map(|(i, pkg)| (&pkg.id, i as u32))
.collect::<HashMap<_, _>>();
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
root_packages.sort();
let mut bytes = Vec::new();
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
for pkg in &snapshot.packages {
append_string(&mut bytes, &pkg.id.as_serialized());
}
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
for (req, id) in root_packages {
append_string(&mut bytes, &req.to_string());
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
for pkg in &snapshot.packages {
let deps_len = pkg.dependencies.len() as u32;
bytes.extend_from_slice(&deps_len.to_le_bytes());
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
deps.sort();
for (req, id) in deps {
append_string(&mut bytes, req);
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
}
bytes
}
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {
let mut denort = deno_exe;
denort.set_file_name(if cfg!(windows) {

View file

@ -1,5 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::BufReader;
use std::io::BufWriter;
@ -9,15 +10,17 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::sync::AtomicFlag;
use deno_lib::util::hash::FastInsecureHasher;
use deno_path_util::get_atomic_path;
use deno_runtime::code_cache::CodeCache;
use deno_runtime::code_cache::CodeCacheType;
use url::Url;
use crate::cache::FastInsecureHasher;
use crate::worker::CliCodeCache;
enum CodeCacheStrategy {
FirstRun(FirstRunCodeCacheStrategy),
@ -73,27 +76,12 @@ impl DenoCompileCodeCache {
}
}
}
pub fn for_deno_core(self: Arc<Self>) -> Arc<dyn CodeCache> {
self.clone()
}
pub fn enabled(&self) -> bool {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
!strategy.is_finished.is_raised()
}
CodeCacheStrategy::SubsequentRun(strategy) => {
!strategy.is_finished.is_raised()
}
}
}
}
impl CodeCache for DenoCompileCodeCache {
fn get_sync(
&self,
specifier: &Url,
specifier: &ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
@ -118,7 +106,7 @@ impl CodeCache for DenoCompileCodeCache {
fn set_sync(
&self,
specifier: Url,
specifier: ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
bytes: &[u8],
@ -164,6 +152,23 @@ impl CodeCache for DenoCompileCodeCache {
}
}
impl CliCodeCache for DenoCompileCodeCache {
fn enabled(&self) -> bool {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
!strategy.is_finished.is_raised()
}
CodeCacheStrategy::SubsequentRun(strategy) => {
!strategy.is_finished.is_raised()
}
}
}
fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> {
self
}
}
type CodeCacheKey = (String, CodeCacheType);
struct FirstRunCodeCacheData {
@ -211,7 +216,7 @@ struct SubsequentRunCodeCacheStrategy {
impl SubsequentRunCodeCacheStrategy {
fn take_from_cache(
&self,
specifier: &Url,
specifier: &ModuleSpecifier,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
@ -390,6 +395,8 @@ fn deserialize_with_reader<T: Read>(
#[cfg(test)]
mod test {
use std::fs::File;
use test_util::TempDir;
use super::*;
@ -456,8 +463,8 @@ mod test {
fn code_cache() {
let temp_dir = TempDir::new();
let file_path = temp_dir.path().join("cache.bin").to_path_buf();
let url1 = Url::parse("https://deno.land/example1.js").unwrap();
let url2 = Url::parse("https://deno.land/example2.js").unwrap();
let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap();
let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap();
// first run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);

View file

@ -0,0 +1,884 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::time::Duration;
use std::time::SystemTime;
use deno_runtime::deno_fs::AccessCheckCb;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::FsDirEntry;
use deno_runtime::deno_fs::FsFileType;
use deno_runtime::deno_fs::OpenOptions;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::File;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_io::fs::FsResult;
use deno_runtime::deno_io::fs::FsStat;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsMetadataValue;
use sys_traits::boxed::FsMetadataBoxed;
use sys_traits::boxed::FsReadDirBoxed;
use sys_traits::FsCopy;
use sys_traits::FsMetadata;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::FileBackedVfsDirEntry;
use super::virtual_fs::FileBackedVfsFile;
use super::virtual_fs::FileBackedVfsMetadata;
use super::virtual_fs::VfsFileSubDataKind;
#[derive(Debug, Clone)]
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
impl DenoCompileFileSystem {
pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
Self(vfs)
}
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {
if self.0.is_path_within(path) {
Err(FsError::NotSupported)
} else {
Ok(())
}
}
fn copy_to_real_path(
&self,
oldpath: &Path,
newpath: &Path,
) -> std::io::Result<u64> {
let old_file = self.0.file_entry(oldpath)?;
let old_file_bytes =
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
let len = old_file_bytes.len() as u64;
RealFs
.write_file_sync(
newpath,
OpenOptions {
read: false,
write: true,
create: true,
truncate: true,
append: false,
create_new: false,
mode: None,
},
None,
&old_file_bytes,
)
.map_err(|err| err.into_io_error())?;
Ok(len)
}
}
#[async_trait::async_trait(?Send)]
impl FileSystem for DenoCompileFileSystem {
fn cwd(&self) -> FsResult<PathBuf> {
RealFs.cwd()
}
fn tmp_dir(&self) -> FsResult<PathBuf> {
RealFs.tmp_dir()
}
fn chdir(&self, path: &Path) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chdir(path)
}
fn umask(&self, mask: Option<u32>) -> FsResult<u32> {
RealFs.umask(mask)
}
fn open_sync(
&self,
path: &Path,
options: OpenOptions,
access_check: Option<AccessCheckCb>,
) -> FsResult<Rc<dyn File>> {
if self.0.is_path_within(path) {
Ok(Rc::new(self.0.open_file(path)?))
} else {
RealFs.open_sync(path, options, access_check)
}
}
async fn open_async<'a>(
&'a self,
path: PathBuf,
options: OpenOptions,
access_check: Option<AccessCheckCb<'a>>,
) -> FsResult<Rc<dyn File>> {
if self.0.is_path_within(&path) {
Ok(Rc::new(self.0.open_file(&path)?))
} else {
RealFs.open_async(path, options, access_check).await
}
}
fn mkdir_sync(
&self,
path: &Path,
recursive: bool,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.mkdir_sync(path, recursive, mode)
}
async fn mkdir_async(
&self,
path: PathBuf,
recursive: bool,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.mkdir_async(path, recursive, mode).await
}
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chmod_sync(path, mode)
}
async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.chmod_async(path, mode).await
}
fn chown_sync(
&self,
path: &Path,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chown_sync(path, uid, gid)
}
async fn chown_async(
&self,
path: PathBuf,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.chown_async(path, uid, gid).await
}
fn lchown_sync(
&self,
path: &Path,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.lchown_sync(path, uid, gid)
}
async fn lchown_async(
&self,
path: PathBuf,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.lchown_async(path, uid, gid).await
}
fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.remove_sync(path, recursive)
}
async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.remove_async(path, recursive).await
}
fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(newpath)?;
if self.0.is_path_within(oldpath) {
self
.copy_to_real_path(oldpath, newpath)
.map(|_| ())
.map_err(FsError::Io)
} else {
RealFs.copy_file_sync(oldpath, newpath)
}
}
async fn copy_file_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&newpath)?;
if self.0.is_path_within(&oldpath) {
let fs = self.clone();
tokio::task::spawn_blocking(move || {
fs.copy_to_real_path(&oldpath, &newpath)
.map(|_| ())
.map_err(FsError::Io)
})
.await?
} else {
RealFs.copy_file_async(oldpath, newpath).await
}
}
fn cp_sync(&self, from: &Path, to: &Path) -> FsResult<()> {
self.error_if_in_vfs(to)?;
RealFs.cp_sync(from, to)
}
async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
self.error_if_in_vfs(&to)?;
RealFs.cp_async(from, to).await
}
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
if self.0.is_path_within(path) {
Ok(self.0.stat(path)?.as_fs_stat())
} else {
RealFs.stat_sync(path)
}
}
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
if self.0.is_path_within(&path) {
Ok(self.0.stat(&path)?.as_fs_stat())
} else {
RealFs.stat_async(path).await
}
}
fn lstat_sync(&self, path: &Path) -> FsResult<FsStat> {
if self.0.is_path_within(path) {
Ok(self.0.lstat(path)?.as_fs_stat())
} else {
RealFs.lstat_sync(path)
}
}
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
if self.0.is_path_within(&path) {
Ok(self.0.lstat(&path)?.as_fs_stat())
} else {
RealFs.lstat_async(path).await
}
}
fn realpath_sync(&self, path: &Path) -> FsResult<PathBuf> {
if self.0.is_path_within(path) {
Ok(self.0.canonicalize(path)?)
} else {
RealFs.realpath_sync(path)
}
}
async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
if self.0.is_path_within(&path) {
Ok(self.0.canonicalize(&path)?)
} else {
RealFs.realpath_async(path).await
}
}
fn read_dir_sync(&self, path: &Path) -> FsResult<Vec<FsDirEntry>> {
if self.0.is_path_within(path) {
Ok(self.0.read_dir(path)?)
} else {
RealFs.read_dir_sync(path)
}
}
async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
if self.0.is_path_within(&path) {
Ok(self.0.read_dir(&path)?)
} else {
RealFs.read_dir_async(path).await
}
}
fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.rename_sync(oldpath, newpath)
}
async fn rename_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.rename_async(oldpath, newpath).await
}
fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.link_sync(oldpath, newpath)
}
async fn link_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.link_async(oldpath, newpath).await
}
fn symlink_sync(
&self,
oldpath: &Path,
newpath: &Path,
file_type: Option<FsFileType>,
) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.symlink_sync(oldpath, newpath, file_type)
}
async fn symlink_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
file_type: Option<FsFileType>,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.symlink_async(oldpath, newpath, file_type).await
}
fn read_link_sync(&self, path: &Path) -> FsResult<PathBuf> {
if self.0.is_path_within(path) {
Ok(self.0.read_link(path)?)
} else {
RealFs.read_link_sync(path)
}
}
async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
if self.0.is_path_within(&path) {
Ok(self.0.read_link(&path)?)
} else {
RealFs.read_link_async(path).await
}
}
fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.truncate_sync(path, len)
}
async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.truncate_async(path, len).await
}
fn utime_sync(
&self,
path: &Path,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
}
async fn utime_async(
&self,
path: PathBuf,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs
.utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
.await
}
fn lutime_sync(
&self,
path: &Path,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
}
async fn lutime_async(
&self,
path: PathBuf,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs
.lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
.await
}
}
impl sys_traits::BaseFsHardLink for DenoCompileFileSystem {
#[inline]
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
self.link_sync(src, dst).map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRead for DenoCompileFileSystem {
#[inline]
fn base_fs_read(&self, path: &Path) -> std::io::Result<Cow<'static, [u8]>> {
self
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::FsMetadataValue for FileBackedVfsMetadata {
fn file_type(&self) -> sys_traits::FileType {
self.file_type
}
fn len(&self) -> u64 {
self.len
}
fn accessed(&self) -> std::io::Result<SystemTime> {
Err(not_supported("accessed time"))
}
fn created(&self) -> std::io::Result<SystemTime> {
Err(not_supported("created time"))
}
fn changed(&self) -> std::io::Result<SystemTime> {
Err(not_supported("changed time"))
}
fn modified(&self) -> std::io::Result<SystemTime> {
Err(not_supported("modified time"))
}
fn dev(&self) -> std::io::Result<u64> {
Ok(0)
}
fn ino(&self) -> std::io::Result<u64> {
Ok(0)
}
fn mode(&self) -> std::io::Result<u32> {
Ok(0)
}
fn nlink(&self) -> std::io::Result<u64> {
Ok(0)
}
fn uid(&self) -> std::io::Result<u32> {
Ok(0)
}
fn gid(&self) -> std::io::Result<u32> {
Ok(0)
}
fn rdev(&self) -> std::io::Result<u64> {
Ok(0)
}
fn blksize(&self) -> std::io::Result<u64> {
Ok(0)
}
fn blocks(&self) -> std::io::Result<u64> {
Ok(0)
}
fn is_block_device(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_char_device(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_fifo(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_socket(&self) -> std::io::Result<bool> {
Ok(false)
}
fn file_attributes(&self) -> std::io::Result<u32> {
Ok(0)
}
}
fn not_supported(name: &str) -> std::io::Error {
std::io::Error::new(
ErrorKind::Unsupported,
format!(
"{} is not supported for an embedded deno compile file",
name
),
)
}
impl sys_traits::FsDirEntry for FileBackedVfsDirEntry {
type Metadata = BoxedFsMetadataValue;
fn file_name(&self) -> Cow<std::ffi::OsStr> {
Cow::Borrowed(self.metadata.name.as_ref())
}
fn file_type(&self) -> std::io::Result<sys_traits::FileType> {
Ok(self.metadata.file_type)
}
fn metadata(&self) -> std::io::Result<Self::Metadata> {
Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone())))
}
fn path(&self) -> Cow<Path> {
Cow::Owned(self.parent_path.join(&self.metadata.name))
}
}
impl sys_traits::BaseFsReadDir for DenoCompileFileSystem {
type ReadDirEntry = BoxedFsDirEntry;
fn base_fs_read_dir(
&self,
path: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
> {
if self.0.is_path_within(path) {
let entries = self.0.read_dir_with_metadata(path)?;
Ok(Box::new(
entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))),
))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_read_dir_boxed(path)
}
}
}
impl sys_traits::BaseFsCanonicalize for DenoCompileFileSystem {
#[inline]
fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result<PathBuf> {
self.realpath_sync(path).map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsMetadata for DenoCompileFileSystem {
type Metadata = BoxedFsMetadataValue;
#[inline]
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
if self.0.is_path_within(path) {
Ok(BoxedFsMetadataValue::new(self.0.stat(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_metadata_boxed(path)
}
}
#[inline]
fn base_fs_symlink_metadata(
&self,
path: &Path,
) -> std::io::Result<Self::Metadata> {
if self.0.is_path_within(path) {
Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path)
}
}
}
impl sys_traits::BaseFsCopy for DenoCompileFileSystem {
#[inline]
fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result<u64> {
self
.error_if_in_vfs(to)
.map_err(|err| err.into_io_error())?;
if self.0.is_path_within(from) {
self.copy_to_real_path(from, to)
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_copy(from, to)
}
}
}
impl sys_traits::BaseFsCloneFile for DenoCompileFileSystem {
fn base_fs_clone_file(
&self,
_from: &Path,
_to: &Path,
) -> std::io::Result<()> {
// will cause a fallback in the code that uses this
Err(not_supported("cloning files"))
}
}
impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem {
#[inline]
fn base_fs_create_dir(
&self,
path: &Path,
options: &sys_traits::CreateDirOptions,
) -> std::io::Result<()> {
self
.mkdir_sync(path, options.recursive, options.mode)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRemoveFile for DenoCompileFileSystem {
#[inline]
fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> {
self
.remove_sync(path, false)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRename for DenoCompileFileSystem {
#[inline]
fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> {
self
.rename_sync(from, to)
.map_err(|err| err.into_io_error())
}
}
pub enum FsFileAdapter {
Real(sys_traits::impls::RealFsFile),
Vfs(FileBackedVfsFile),
}
impl sys_traits::FsFile for FsFileAdapter {}
impl sys_traits::FsFileAsRaw for FsFileAdapter {
#[cfg(windows)]
fn fs_file_as_raw_handle(&self) -> Option<std::os::windows::io::RawHandle> {
match self {
Self::Real(file) => file.fs_file_as_raw_handle(),
Self::Vfs(_) => None,
}
}
#[cfg(unix)]
fn fs_file_as_raw_fd(&self) -> Option<std::os::fd::RawFd> {
match self {
Self::Real(file) => file.fs_file_as_raw_fd(),
Self::Vfs(_) => None,
}
}
}
impl sys_traits::FsFileSyncData for FsFileAdapter {
fn fs_file_sync_data(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_sync_data(),
Self::Vfs(_) => Ok(()),
}
}
}
impl sys_traits::FsFileSyncAll for FsFileAdapter {
fn fs_file_sync_all(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_sync_all(),
Self::Vfs(_) => Ok(()),
}
}
}
impl sys_traits::FsFileSetPermissions for FsFileAdapter {
#[inline]
fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_permissions(mode),
Self::Vfs(_) => Ok(()),
}
}
}
impl std::io::Read for FsFileAdapter {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
match self {
Self::Real(file) => file.read(buf),
Self::Vfs(file) => file.read_to_buf(buf),
}
}
}
impl std::io::Seek for FsFileAdapter {
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
match self {
Self::Real(file) => file.seek(pos),
Self::Vfs(file) => file.seek(pos),
}
}
}
impl std::io::Write for FsFileAdapter {
#[inline]
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
match self {
Self::Real(file) => file.write(buf),
Self::Vfs(_) => Err(not_supported("writing files")),
}
}
#[inline]
fn flush(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.flush(),
Self::Vfs(_) => Err(not_supported("writing files")),
}
}
}
impl sys_traits::FsFileSetLen for FsFileAdapter {
#[inline]
fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_len(len),
Self::Vfs(_) => Err(not_supported("setting file length")),
}
}
}
impl sys_traits::FsFileSetTimes for FsFileAdapter {
fn fs_file_set_times(
&mut self,
times: sys_traits::FsFileTimes,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_times(times),
Self::Vfs(_) => Err(not_supported("setting file times")),
}
}
}
impl sys_traits::FsFileLock for FsFileAdapter {
fn fs_file_lock(
&mut self,
mode: sys_traits::FsFileLockMode,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_lock(mode),
Self::Vfs(_) => Err(not_supported("locking files")),
}
}
fn fs_file_try_lock(
&mut self,
mode: sys_traits::FsFileLockMode,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_try_lock(mode),
Self::Vfs(_) => Err(not_supported("locking files")),
}
}
fn fs_file_unlock(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_unlock(),
Self::Vfs(_) => Err(not_supported("unlocking files")),
}
}
}
impl sys_traits::FsFileIsTerminal for FsFileAdapter {
#[inline]
fn fs_file_is_terminal(&self) -> bool {
match self {
Self::Real(file) => file.fs_file_is_terminal(),
Self::Vfs(_) => false,
}
}
}
impl sys_traits::BaseFsOpen for DenoCompileFileSystem {
type File = FsFileAdapter;
fn base_fs_open(
&self,
path: &Path,
options: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
if self.0.is_path_within(path) {
Ok(FsFileAdapter::Vfs(self.0.open_file(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
Ok(FsFileAdapter::Real(
sys_traits::impls::RealSys.base_fs_open(path, options)?,
))
}
}
}
impl sys_traits::BaseFsSymlinkDir for DenoCompileFileSystem {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
self
.symlink_sync(src, dst, Some(FsFileType::Directory))
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::SystemRandom for DenoCompileFileSystem {
#[inline]
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.sys_random(buf)
}
}
impl sys_traits::SystemTimeNow for DenoCompileFileSystem {
#[inline]
fn sys_time_now(&self) -> SystemTime {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.sys_time_now()
}
}
impl sys_traits::ThreadSleep for DenoCompileFileSystem {
#[inline]
fn thread_sleep(&self, dur: Duration) {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.thread_sleep(dur)
}
}
impl sys_traits::EnvCurrentDir for DenoCompileFileSystem {
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.env_current_dir()
}
}
impl sys_traits::BaseEnvVar for DenoCompileFileSystem {
fn base_env_var_os(
&self,
key: &std::ffi::OsStr,
) -> Option<std::ffi::OsString> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.base_env_var_os(key)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,786 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
use capacity_builder::BytesAppendable;
use deno_ast::swc::common::source_map;
use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use indexmap::IndexMap;
use super::binary::Metadata;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileSystemCaseSensitivity;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VirtualDirectoryEntries;
use crate::standalone::virtual_fs::VirtualDirectory;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
/// Binary format:
/// * d3n0l4nd
/// * <metadata_len><metadata>
/// * <npm_snapshot_len><npm_snapshot>
/// * <remote_modules>
/// * <vfs_headers_len><vfs_headers>
/// * <vfs_file_data_len><vfs_file_data>
/// * <source_map_data>
/// * d3n0l4nd
pub fn serialize_binary_data_section(
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
source_map_store: &SourceMapStore,
vfs: &BuiltVfs,
) -> Result<Vec<u8>, AnyError> {
let metadata = serde_json::to_string(metadata)?;
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
let bytes = capacity_builder::BytesBuilder::build(|builder| {
builder.append(MAGIC_BYTES);
// 1. Metadata
{
builder.append_le(metadata.len() as u64);
builder.append(&metadata);
}
// 2. Npm snapshot
{
builder.append_le(npm_snapshot.len() as u64);
builder.append(&npm_snapshot);
}
// 3. Remote modules
{
remote_modules.write(builder);
}
// 4. VFS
{
builder.append_le(serialized_vfs.len() as u64);
builder.append(&serialized_vfs);
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
builder.append_le(vfs_bytes_len);
for file in &vfs.files {
builder.append(file);
}
}
// 5. Source maps
{
builder.append_le(source_map_store.data.len() as u32);
for (specifier, source_map) in &source_map_store.data {
builder.append_le(specifier.len() as u32);
builder.append(specifier);
builder.append_le(source_map.len() as u32);
builder.append(source_map.as_ref());
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
builder.append(MAGIC_BYTES);
})?;
Ok(bytes)
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub source_maps: SourceMapStore,
pub vfs_root_entries: VirtualDirectoryEntries,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
#[allow(clippy::type_complexity)]
fn read_source_map_entry(
input: &[u8],
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let (input, source_map) = read_bytes_with_u32_len(input)?;
Ok((input, (specifier, source_map)))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) =
read_bytes_with_u64_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, remote_modules) =
RemoteModulesStore::build(input).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
let vfs_root_entries: VirtualDirectoryEntries =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_u64_len(input).context("reading vfs files data")?;
// 5. Source maps
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
for _ in 0..source_map_data_len {
let (current_input, (specifier, source_map)) =
read_source_map_entry(input)?;
input = current_input;
source_maps.add(specifier, Cow::Borrowed(source_map));
}
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
}))
}
#[derive(Default)]
pub struct RemoteModulesStoreBuilder {
specifiers: Vec<(String, u64)>,
data: Vec<(MediaType, Vec<u8>, Option<Vec<u8>>)>,
data_byte_len: u64,
redirects: Vec<(String, String)>,
redirects_len: u64,
}
impl RemoteModulesStoreBuilder {
pub fn add(
&mut self,
specifier: &Url,
media_type: MediaType,
data: Vec<u8>,
maybe_transpiled: Option<Vec<u8>>,
) {
log::debug!("Adding '{}' ({})", specifier, media_type);
let specifier = specifier.to_string();
self.specifiers.push((specifier, self.data_byte_len));
let maybe_transpiled_len = match &maybe_transpiled {
// data length (4 bytes), data
Some(data) => 4 + data.len() as u64,
None => 0,
};
// media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length
self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len;
self.data.push((media_type, data, maybe_transpiled));
}
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
self.redirects.reserve(redirects.len());
for (from, to) in redirects {
log::debug!("Adding redirect '{}' -> '{}'", from, to);
let from = from.to_string();
let to = to.to_string();
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
self.redirects.push((from, to));
}
}
fn write<'a, TBytes: capacity_builder::BytesType>(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
) {
builder.append_le(self.specifiers.len() as u32);
builder.append_le(self.redirects.len() as u32);
for (specifier, offset) in &self.specifiers {
builder.append_le(specifier.len() as u32);
builder.append(specifier);
builder.append_le(*offset);
}
for (from, to) in &self.redirects {
builder.append_le(from.len() as u32);
builder.append(from);
builder.append_le(to.len() as u32);
builder.append(to);
}
builder.append_le(
self
.data
.iter()
.map(|(_, data, maybe_transpiled)| {
1 + 4
+ (data.len() as u64)
+ 1
+ match maybe_transpiled {
Some(transpiled) => 4 + (transpiled.len() as u64),
None => 0,
}
})
.sum::<u64>(),
);
for (media_type, data, maybe_transpiled) in &self.data {
builder.append(serialize_media_type(*media_type));
builder.append_le(data.len() as u32);
builder.append(data);
if let Some(transpiled) = maybe_transpiled {
builder.append(1);
builder.append_le(transpiled.len() as u32);
builder.append(transpiled);
} else {
builder.append(0);
}
}
}
}
pub enum DenoCompileModuleSource {
String(&'static str),
Bytes(Cow<'static, [u8]>),
}
impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
pub struct SourceMapStore {
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
}
impl SourceMapStore {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn add(
&mut self,
specifier: Cow<'static, str>,
source_map: Cow<'static, [u8]>,
) {
self.data.insert(specifier, source_map);
}
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
self.data.get(specifier).map(|v| v.as_ref())
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data {
Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string
unsafe { std::str::from_utf8_unchecked(d) },
),
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
}
}
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
(ModuleType::JavaScript, into_string_unsafe(self.data))
}
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => {
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
}
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
ModuleType::JavaScript,
DenoCompileModuleSource::Bytes(self.data),
),
};
(self.specifier, media_type, source)
}
}
pub struct RemoteModuleEntry<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
pub transpiled_data: Option<Cow<'static, [u8]>>,
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
}
pub struct RemoteModulesStore {
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
files_data: &'static [u8],
}
impl RemoteModulesStore {
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
let (input, offset) = read_u64(input)?;
Ok((input, (specifier, offset)))
}
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
let (input, from) = read_string_lossy(input)?;
let from = Url::parse(&from)?;
let (input, to) = read_string_lossy(input)?;
let to = Url::parse(&to)?;
Ok((input, (from, to)))
}
fn read_headers(
input: &[u8],
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
{
let (input, specifiers_len) = read_u32_as_usize(input)?;
let (mut input, redirects_len) = read_u32_as_usize(input)?;
let mut specifiers =
HashMap::with_capacity(specifiers_len + redirects_len);
for _ in 0..specifiers_len {
let (current_input, (specifier, offset)) =
read_specifier(input).context("reading specifier")?;
input = current_input;
specifiers.insert(
specifier,
RemoteModulesStoreSpecifierValue::Data(offset as usize),
);
}
for _ in 0..redirects_len {
let (current_input, (from, to)) = read_redirect(input)?;
input = current_input;
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
}
Ok((input, specifiers))
}
let (input, specifiers) = read_headers(input)?;
let (input, files_data) = read_bytes_with_u64_len(input)?;
Ok((
input,
Self {
specifiers,
files_data,
},
))
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, AnyError> {
let mut count = 0;
let mut current = specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", specifier);
}
match self.specifiers.get(current) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
return Ok(Some(current));
}
None => {
return Ok(None);
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", original_specifier);
}
match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
specifier = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, data) = read_bytes_with_u32_len(input)?;
check_has_len(input, 1)?;
let (input, has_transpiled) = (&input[1..], input[0]);
let (_, transpiled_data) = match has_transpiled {
0 => (input, None),
1 => {
let (input, data) = read_bytes_with_u32_len(input)?;
(input, Some(data))
}
value => bail!(
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
value
),
};
return Ok(Some(RemoteModuleEntry {
specifier,
media_type,
data: Cow::Borrowed(data),
transpiled_data: transpiled_data.map(Cow::Borrowed),
}));
}
None => {
return Ok(None);
}
}
}
}
}
fn serialize_npm_snapshot(
mut snapshot: SerializedNpmResolutionSnapshot,
) -> Vec<u8> {
fn append_string(bytes: &mut Vec<u8>, string: &str) {
let len = string.len() as u32;
bytes.extend_from_slice(&len.to_le_bytes());
bytes.extend_from_slice(string.as_bytes());
}
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
let ids_to_stored_ids = snapshot
.packages
.iter()
.enumerate()
.map(|(i, pkg)| (&pkg.id, i as u32))
.collect::<HashMap<_, _>>();
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
root_packages.sort();
let mut bytes = Vec::new();
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
for pkg in &snapshot.packages {
append_string(&mut bytes, &pkg.id.as_serialized());
}
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
for (req, id) in root_packages {
append_string(&mut bytes, &req.to_string());
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
for pkg in &snapshot.packages {
let deps_len = pkg.dependencies.len() as u32;
bytes.extend_from_slice(&deps_len.to_le_bytes());
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
deps.sort();
for (req, id) in deps {
append_string(&mut bytes, req);
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
}
bytes
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
let req = StackString::from_cow(req);
Ok((input, (req, id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
MediaType::Jsx => 1,
MediaType::Mjs => 2,
MediaType::Cjs => 3,
MediaType::TypeScript => 4,
MediaType::Mts => 5,
MediaType::Cts => 6,
MediaType::Dts => 7,
MediaType::Dmts => 8,
MediaType::Dcts => 9,
MediaType::Tsx => 10,
MediaType::Json => 11,
MediaType::Wasm => 12,
MediaType::Css => 13,
MediaType::SourceMap => 14,
MediaType::Unknown => 15,
}
}
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
match value {
0 => Ok(MediaType::JavaScript),
1 => Ok(MediaType::Jsx),
2 => Ok(MediaType::Mjs),
3 => Ok(MediaType::Cjs),
4 => Ok(MediaType::TypeScript),
5 => Ok(MediaType::Mts),
6 => Ok(MediaType::Cts),
7 => Ok(MediaType::Dts),
8 => Ok(MediaType::Dmts),
9 => Ok(MediaType::Dcts),
10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::Css),
14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value),
}
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.");
}
Ok(())
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into()?);
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into()?);
Ok((input, len))
}

File diff suppressed because it is too large Load diff

230
cli/sys.rs Normal file
View file

@ -0,0 +1,230 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// todo(dsherret): this should instead use conditional compilation and directly
// surface the underlying implementation.
//
// The problem atm is that there's no way to have conditional compilation for
// denort or the deno binary. We should extract out denort to a separate binary.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsFile;
use sys_traits::boxed::BoxedFsMetadataValue;
use sys_traits::boxed::FsMetadataBoxed;
use sys_traits::boxed::FsOpenBoxed;
use sys_traits::boxed::FsReadDirBoxed;
use sys_traits::CreateDirOptions;
use crate::standalone::DenoCompileFileSystem;
#[derive(Debug, Clone)]
pub enum CliSys {
#[allow(dead_code)] // will be dead code for denort
#[allow(clippy::disallowed_types)] // ok because sys impl
Real(sys_traits::impls::RealSys),
#[allow(dead_code)] // will be dead code for deno
DenoCompile(DenoCompileFileSystem),
}
impl Default for CliSys {
fn default() -> Self {
Self::Real(sys_traits::impls::RealSys)
}
}
impl deno_runtime::deno_node::ExtNodeSys for CliSys {}
impl sys_traits::BaseFsCloneFile for CliSys {
fn base_fs_clone_file(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_clone_file(src, dst),
Self::DenoCompile(sys) => sys.base_fs_clone_file(src, dst),
}
}
}
impl sys_traits::BaseFsSymlinkDir for CliSys {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_symlink_dir(src, dst),
Self::DenoCompile(sys) => sys.base_fs_symlink_dir(src, dst),
}
}
}
impl sys_traits::BaseFsCopy for CliSys {
fn base_fs_copy(&self, src: &Path, dst: &Path) -> std::io::Result<u64> {
match self {
Self::Real(sys) => sys.base_fs_copy(src, dst),
Self::DenoCompile(sys) => sys.base_fs_copy(src, dst),
}
}
}
impl sys_traits::BaseFsHardLink for CliSys {
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_hard_link(src, dst),
Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst),
}
}
}
impl sys_traits::BaseFsRead for CliSys {
fn base_fs_read(&self, p: &Path) -> std::io::Result<Cow<'static, [u8]>> {
match self {
Self::Real(sys) => sys.base_fs_read(p),
Self::DenoCompile(sys) => sys.base_fs_read(p),
}
}
}
impl sys_traits::BaseFsReadDir for CliSys {
type ReadDirEntry = BoxedFsDirEntry;
fn base_fs_read_dir(
&self,
p: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
> {
match self {
Self::Real(sys) => sys.fs_read_dir_boxed(p),
Self::DenoCompile(sys) => sys.fs_read_dir_boxed(p),
}
}
}
impl sys_traits::BaseFsCanonicalize for CliSys {
fn base_fs_canonicalize(&self, p: &Path) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.base_fs_canonicalize(p),
Self::DenoCompile(sys) => sys.base_fs_canonicalize(p),
}
}
}
impl sys_traits::BaseFsMetadata for CliSys {
type Metadata = BoxedFsMetadataValue;
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_metadata_boxed(path),
Self::DenoCompile(sys) => sys.fs_metadata_boxed(path),
}
}
fn base_fs_symlink_metadata(
&self,
path: &Path,
) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_symlink_metadata_boxed(path),
Self::DenoCompile(sys) => sys.fs_symlink_metadata_boxed(path),
}
}
}
impl sys_traits::BaseFsCreateDir for CliSys {
fn base_fs_create_dir(
&self,
p: &Path,
options: &CreateDirOptions,
) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_create_dir(p, options),
Self::DenoCompile(sys) => sys.base_fs_create_dir(p, options),
}
}
}
impl sys_traits::BaseFsOpen for CliSys {
type File = BoxedFsFile;
fn base_fs_open(
&self,
path: &Path,
options: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
match self {
Self::Real(sys) => sys.fs_open_boxed(path, options),
Self::DenoCompile(sys) => sys.fs_open_boxed(path, options),
}
}
}
impl sys_traits::BaseFsRemoveFile for CliSys {
fn base_fs_remove_file(&self, p: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_remove_file(p),
Self::DenoCompile(sys) => sys.base_fs_remove_file(p),
}
}
}
impl sys_traits::BaseFsRename for CliSys {
fn base_fs_rename(&self, old: &Path, new: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_rename(old, new),
Self::DenoCompile(sys) => sys.base_fs_rename(old, new),
}
}
}
impl sys_traits::SystemRandom for CliSys {
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.sys_random(buf),
Self::DenoCompile(sys) => sys.sys_random(buf),
}
}
}
impl sys_traits::SystemTimeNow for CliSys {
fn sys_time_now(&self) -> std::time::SystemTime {
match self {
Self::Real(sys) => sys.sys_time_now(),
Self::DenoCompile(sys) => sys.sys_time_now(),
}
}
}
impl sys_traits::ThreadSleep for CliSys {
fn thread_sleep(&self, dur: std::time::Duration) {
match self {
Self::Real(sys) => sys.thread_sleep(dur),
Self::DenoCompile(sys) => sys.thread_sleep(dur),
}
}
}
impl sys_traits::EnvCurrentDir for CliSys {
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.env_current_dir(),
Self::DenoCompile(sys) => sys.env_current_dir(),
}
}
}
impl sys_traits::BaseEnvVar for CliSys {
fn base_env_var_os(
&self,
key: &std::ffi::OsStr,
) -> Option<std::ffi::OsString> {
match self {
Self::Real(sys) => sys.base_env_var_os(key),
Self::DenoCompile(sys) => sys.base_env_var_os(key),
}
}
}
impl sys_traits::EnvHomeDir for CliSys {
fn env_home_dir(&self) -> Option<PathBuf> {
#[allow(clippy::disallowed_types)] // ok because sys impl
sys_traits::impls::RealSys.env_home_dir()
}
}

View file

@ -25,8 +25,9 @@ use tokio::task::LocalSet;
use tokio_util::sync::CancellationToken;
use crate::node::CliNodeResolver;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::npm::ManagedCliNpmResolver;
pub fn get_script_with_args(script: &str, argv: &[String]) -> String {
let additional_args = argv
@ -413,15 +414,15 @@ impl ShellCommand for NodeModulesFileRunCommand {
}
pub fn resolve_custom_commands(
npm_resolver: &CliNpmResolver,
npm_resolver: &dyn CliNpmResolver,
node_resolver: &CliNodeResolver,
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
let mut commands = match npm_resolver {
CliNpmResolver::Byonm(npm_resolver) => {
let mut commands = match npm_resolver.as_inner() {
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
resolve_npm_commands_from_bin_dir(node_modules_dir)
}
CliNpmResolver::Managed(npm_resolver) => {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
resolve_managed_npm_commands(npm_resolver, node_resolver)?
}
};
@ -520,12 +521,13 @@ fn resolve_execution_path_from_npx_shim(
}
fn resolve_managed_npm_commands(
npm_resolver: &CliManagedNpmResolver,
npm_resolver: &ManagedCliNpmResolver,
node_resolver: &CliNodeResolver,
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
let mut result = HashMap::new();
for id in npm_resolver.resolution().top_level_packages() {
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(&id)?;
let snapshot = npm_resolver.snapshot();
for id in snapshot.top_level_packages() {
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(id)?;
let bin_commands =
node_resolver.resolve_binary_commands(&package_folder)?;
for bin_command in bin_commands {
@ -596,7 +598,7 @@ async fn listen_ctrl_c(kill_signal: KillSignal) {
#[cfg(unix)]
async fn listen_and_forward_all_signals(kill_signal: KillSignal) {
use deno_core::futures::FutureExt;
use deno_runtime::deno_os::signal::SIGNAL_NUMS;
use deno_runtime::signal::SIGNAL_NUMS;
// listen and forward every signal we support
let mut futures = Vec::with_capacity(SIGNAL_NUMS.len());

View file

@ -48,7 +48,6 @@ use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CreateCustomWorkerError;
mod mitata;
mod reporters;
@ -165,7 +164,7 @@ async fn bench_specifier(
.await
{
Ok(()) => Ok(()),
Err(CreateCustomWorkerError::Core(CoreError::Js(error))) => {
Err(CoreError::Js(error)) => {
sender.send(BenchEvent::UncaughtError(
specifier.to_string(),
Box::new(error),
@ -183,7 +182,7 @@ async fn bench_specifier_inner(
specifier: ModuleSpecifier,
sender: &UnboundedSender<BenchEvent>,
filter: TestFilter,
) -> Result<(), CreateCustomWorkerError> {
) -> Result<(), CoreError> {
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Bench,
@ -202,7 +201,7 @@ async fn bench_specifier_inner(
// Ensure that there are no pending exceptions before we start running tests
worker.run_up_to_duration(Duration::from_millis(0)).await?;
worker.dispatch_load_event().map_err(CoreError::Js)?;
worker.dispatch_load_event()?;
let benchmarks = {
let state_rc = worker.js_runtime.op_state();
@ -237,13 +236,11 @@ async fn bench_specifier_inner(
used_only,
names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(),
}))
.map_err(JsErrorBox::from_err)
.map_err(CoreError::JsBox)?;
.map_err(JsErrorBox::from_err)?;
for (desc, function) in benchmarks {
sender
.send(BenchEvent::Wait(desc.id))
.map_err(JsErrorBox::from_err)
.map_err(CoreError::JsBox)?;
.map_err(JsErrorBox::from_err)?;
let call = worker.js_runtime.call(&function);
let result = worker
.js_runtime
@ -252,26 +249,18 @@ async fn bench_specifier_inner(
let scope = &mut worker.js_runtime.handle_scope();
let result = v8::Local::new(scope, result);
let result = serde_v8::from_v8::<BenchResult>(scope, result)
.map_err(JsErrorBox::from_err)
.map_err(CoreError::JsBox)?;
.map_err(JsErrorBox::from_err)?;
sender
.send(BenchEvent::Result(desc.id, result))
.map_err(JsErrorBox::from_err)
.map_err(CoreError::JsBox)?;
.map_err(JsErrorBox::from_err)?;
}
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
// event loop to continue beyond what's needed to await results.
worker
.dispatch_beforeunload_event()
.map_err(CoreError::Js)?;
worker
.dispatch_process_beforeexit_event()
.map_err(CoreError::Js)?;
worker.dispatch_unload_event().map_err(CoreError::Js)?;
worker
.dispatch_process_exit_event()
.map_err(CoreError::Js)?;
worker.dispatch_beforeunload_event()?;
worker.dispatch_process_beforeexit_event()?;
worker.dispatch_unload_event()?;
worker.dispatch_process_exit_event()?;
// Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't
// want to wait forever here.

View file

@ -1,10 +1,10 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_lib::version::DENO_VERSION_INFO;
use serde::Serialize;
use super::*;
use crate::tools::test::TestFailureFormatOptions;
use crate::version;
pub trait BenchReporter {
fn report_group_summary(&mut self);
@ -31,7 +31,11 @@ impl Default for JsonReporterOutput {
fn default() -> Self {
Self {
version: JSON_SCHEMA_VERSION,
runtime: format!("{} {}", DENO_VERSION_INFO.user_agent, env!("TARGET")),
runtime: format!(
"{} {}",
version::DENO_VERSION_INFO.user_agent,
env!("TARGET")
),
cpu: mitata::cpu::name(),
benches: vec![],
}
@ -159,7 +163,7 @@ impl BenchReporter for ConsoleReporter {
"{}\n",
colors::gray(format!(
"Runtime | Deno {} ({})",
DENO_VERSION_INFO.deno,
crate::version::DENO_VERSION_INFO.deno,
env!("TARGET")
))
);

View file

@ -13,8 +13,6 @@ use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleLoadError;
use deno_lib::util::hash::FastInsecureHasher;
use deno_semver::npm::NpmPackageNvReference;
use deno_terminal::colors;
use once_cell::sync::Lazy;
use regex::Regex;
@ -29,6 +27,7 @@ use crate::args::TsTypeLib;
use crate::args::TypeCheckMode;
use crate::cache::CacheDBHash;
use crate::cache::Caches;
use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::factory::CliFactory;
use crate::graph_util::maybe_additional_sloppy_imports_message;
@ -113,7 +112,7 @@ pub struct TypeChecker {
module_graph_builder: Arc<ModuleGraphBuilder>,
npm_installer: Option<Arc<NpmInstaller>>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
}
@ -147,7 +146,7 @@ impl TypeChecker {
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
) -> Self {
Self {
@ -190,29 +189,6 @@ impl TypeChecker {
mut graph: ModuleGraph,
options: CheckOptions,
) -> Result<(Arc<ModuleGraph>, Diagnostics), CheckError> {
fn check_state_hash(resolver: &CliNpmResolver) -> Option<u64> {
match resolver {
CliNpmResolver::Byonm(_) => {
// not feasible and probably slower to compute
None
}
CliNpmResolver::Managed(resolver) => {
// we should probably go further and check all the individual npm packages
let mut package_reqs = resolver.resolution().package_reqs();
package_reqs.sort_by(|a, b| a.0.cmp(&b.0)); // determinism
let mut hasher = FastInsecureHasher::new_without_deno_version();
// ensure the cache gets busted when turning nodeModulesDir on or off
// as this could cause changes in resolution
hasher.write_hashable(resolver.root_node_modules_path().is_some());
for (pkg_req, pkg_nv) in package_reqs {
hasher.write_hashable(&pkg_req);
hasher.write_hashable(&pkg_nv);
}
Some(hasher.finish())
}
}
}
if !options.type_check_mode.is_true() || graph.roots.is_empty() {
return Ok((graph.into(), Default::default()));
}
@ -262,11 +238,9 @@ impl TypeChecker {
maybe_check_hash,
} = get_tsc_roots(
&self.sys,
&self.npm_resolver,
&self.node_resolver,
&graph,
check_js,
check_state_hash(&self.npm_resolver),
self.npm_resolver.check_state_hash(),
type_check_mode,
&ts_config,
);
@ -376,11 +350,8 @@ struct TscRoots {
/// redirects resolved. We need to include all the emittable files in
/// the roots, so they get type checked and optionally emitted,
/// otherwise they would be ignored if only imported into JavaScript.
#[allow(clippy::too_many_arguments)]
fn get_tsc_roots(
sys: &CliSys,
npm_resolver: &CliNpmResolver,
node_resolver: &CliNodeResolver,
graph: &ModuleGraph,
check_js: bool,
npm_cache_state_hash: Option<u64>,
@ -463,7 +434,6 @@ fn get_tsc_roots(
if let Some(hasher) = hasher {
hasher.write_str(module.specifier.as_str());
}
None
}
}
@ -500,33 +470,17 @@ fn get_tsc_roots(
let mut pending = VecDeque::new();
// put in the global types first so that they're resolved before anything else
for (referrer, import) in graph.imports.iter() {
for specifier in import
.dependencies
let get_import_specifiers = || {
graph
.imports
.values()
.flat_map(|i| i.dependencies.values())
.filter_map(|dep| dep.get_type().or_else(|| dep.get_code()))
{
let specifier = graph.resolve(specifier);
if seen.insert(specifier) {
if let Ok(nv_ref) = NpmPackageNvReference::from_specifier(specifier) {
let Some(resolved) =
resolve_npm_nv_ref(npm_resolver, node_resolver, &nv_ref, referrer)
else {
result.missing_diagnostics.push(
tsc::Diagnostic::from_missing_error(
specifier,
None,
maybe_additional_sloppy_imports_message(sys, specifier),
),
);
continue;
};
let mt = MediaType::from_specifier(&resolved);
result.roots.push((resolved, mt));
} else {
pending.push_back((specifier, false));
}
}
};
for specifier in get_import_specifiers() {
let specifier = graph.resolve(specifier);
if seen.insert(specifier) {
pending.push_back((specifier, false));
}
}
@ -647,29 +601,6 @@ fn get_tsc_roots(
result
}
fn resolve_npm_nv_ref(
npm_resolver: &CliNpmResolver,
node_resolver: &CliNodeResolver,
nv_ref: &NpmPackageNvReference,
referrer: &ModuleSpecifier,
) -> Option<ModuleSpecifier> {
let pkg_dir = npm_resolver
.as_managed()
.unwrap()
.resolve_pkg_folder_from_deno_module(nv_ref.nv())
.ok()?;
let resolved = node_resolver
.resolve_package_subpath_from_deno_module(
&pkg_dir,
nv_ref.sub_path(),
Some(referrer),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Types,
)
.ok()?;
Some(resolved)
}
/// Matches the `@ts-check` pragma.
static TS_CHECK_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r#"(?i)^\s*@ts-check(?:\s+|$)"#);

View file

@ -18,13 +18,10 @@ use deno_config::glob::PathOrPatternSet;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::serde_json;
use deno_core::sourcemap::SourceMap;
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_error::JsErrorBox;
use deno_resolver::npm::DenoInNpmPackageChecker;
use node_resolver::InNpmPackageChecker;
use regex::Regex;
use text_lines::TextLines;
@ -55,7 +52,7 @@ pub struct CoverageCollector {
#[async_trait::async_trait(?Send)]
impl crate::worker::CoverageCollector for CoverageCollector {
async fn start_collecting(&mut self) -> Result<(), CoreError> {
async fn start_collecting(&mut self) -> Result<(), AnyError> {
self.enable_debugger().await?;
self.enable_profiler().await?;
self
@ -69,7 +66,7 @@ impl crate::worker::CoverageCollector for CoverageCollector {
Ok(())
}
async fn stop_collecting(&mut self) -> Result<(), CoreError> {
async fn stop_collecting(&mut self) -> Result<(), AnyError> {
fs::create_dir_all(&self.dir)?;
let script_coverages = self.take_precise_coverage().await?.result;
@ -90,8 +87,7 @@ impl crate::worker::CoverageCollector for CoverageCollector {
let filepath = self.dir.join(filename);
let mut out = BufWriter::new(File::create(&filepath)?);
let coverage = serde_json::to_string(&script_coverage)
.map_err(JsErrorBox::from_err)?;
let coverage = serde_json::to_string(&script_coverage)?;
let formatted_coverage =
format_json(&filepath, &coverage, &Default::default())
.ok()
@ -114,7 +110,7 @@ impl CoverageCollector {
Self { dir, session }
}
async fn enable_debugger(&mut self) -> Result<(), CoreError> {
async fn enable_debugger(&mut self) -> Result<(), AnyError> {
self
.session
.post_message::<()>("Debugger.enable", None)
@ -122,7 +118,7 @@ impl CoverageCollector {
Ok(())
}
async fn enable_profiler(&mut self) -> Result<(), CoreError> {
async fn enable_profiler(&mut self) -> Result<(), AnyError> {
self
.session
.post_message::<()>("Profiler.enable", None)
@ -130,7 +126,7 @@ impl CoverageCollector {
Ok(())
}
async fn disable_debugger(&mut self) -> Result<(), CoreError> {
async fn disable_debugger(&mut self) -> Result<(), AnyError> {
self
.session
.post_message::<()>("Debugger.disable", None)
@ -138,7 +134,7 @@ impl CoverageCollector {
Ok(())
}
async fn disable_profiler(&mut self) -> Result<(), CoreError> {
async fn disable_profiler(&mut self) -> Result<(), AnyError> {
self
.session
.post_message::<()>("Profiler.disable", None)
@ -149,28 +145,26 @@ impl CoverageCollector {
async fn start_precise_coverage(
&mut self,
parameters: cdp::StartPreciseCoverageArgs,
) -> Result<cdp::StartPreciseCoverageResponse, CoreError> {
) -> Result<cdp::StartPreciseCoverageResponse, AnyError> {
let return_value = self
.session
.post_message("Profiler.startPreciseCoverage", Some(parameters))
.await?;
let return_object =
serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?;
let return_object = serde_json::from_value(return_value)?;
Ok(return_object)
}
async fn take_precise_coverage(
&mut self,
) -> Result<cdp::TakePreciseCoverageResponse, CoreError> {
) -> Result<cdp::TakePreciseCoverageResponse, AnyError> {
let return_value = self
.session
.post_message::<()>("Profiler.takePreciseCoverage", None)
.await?;
let return_object =
serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?;
let return_object = serde_json::from_value(return_value)?;
Ok(return_object)
}
@ -470,7 +464,7 @@ fn filter_coverages(
coverages: Vec<cdp::ScriptCoverage>,
include: Vec<String>,
exclude: Vec<String>,
in_npm_pkg_checker: &DenoInNpmPackageChecker,
in_npm_pkg_checker: &dyn InNpmPackageChecker,
) -> Vec<cdp::ScriptCoverage> {
let include: Vec<Regex> =
include.iter().map(|e| Regex::new(e).unwrap()).collect();
@ -538,7 +532,7 @@ pub fn cover_files(
script_coverages,
coverage_flags.include,
coverage_flags.exclude,
in_npm_pkg_checker,
in_npm_pkg_checker.as_ref(),
);
if script_coverages.is_empty() {
return Err(anyhow!("No covered files included in the report"));

View file

@ -11,7 +11,6 @@ use std::path::PathBuf;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_lib::version::DENO_VERSION_INFO;
use super::util;
use super::CoverageReport;
@ -560,7 +559,7 @@ impl HtmlCoverageReporter {
/// Creates footer part of the contents for html report.
pub fn create_html_footer(&self, now: &str) -> String {
let version = DENO_VERSION_INFO.deno;
let version = env!("CARGO_PKG_VERSION");
format!(
"
<div class='footer quiet pad2 space-top1 center small'>

View file

@ -20,7 +20,6 @@ use deno_graph::EsParser;
use deno_graph::GraphKind;
use deno_graph::ModuleAnalyzer;
use deno_graph::ModuleSpecifier;
use deno_lib::version::DENO_VERSION_INFO;
use doc::html::ShortPath;
use doc::DocDiagnostic;
use indexmap::IndexMap;
@ -281,7 +280,7 @@ impl deno_doc::html::HrefResolver for DocResolver {
if self.deno_ns.contains_key(symbol) {
Some(format!(
"https://deno.land/api@v{}?s={}",
DENO_VERSION_INFO.deno,
env!("CARGO_PKG_VERSION"),
symbol.join(".")
))
} else {

View file

@ -18,7 +18,6 @@ use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::Resolution;
use deno_lib::util::checksum;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
@ -33,7 +32,9 @@ use crate::args::InfoFlags;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliNpmResolver;
use crate::npm::ManagedCliNpmResolver;
use crate::util::checksum;
use crate::util::display::DisplayTreeNode;
const JSON_SCHEMA_VERSION: u8 = 1;
@ -137,10 +138,6 @@ pub async fn info(
lockfile.write_if_changed()?;
}
let maybe_npm_info = npm_resolver
.as_managed()
.map(|r| (r, r.resolution().snapshot()));
if info_flags.json {
let mut json_graph = serde_json::json!(graph);
if let Some(output) = json_graph.as_object_mut() {
@ -151,19 +148,11 @@ pub async fn info(
);
}
add_npm_packages_to_json(
&mut json_graph,
maybe_npm_info.as_ref().map(|(_, s)| s),
npmrc,
);
add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref(), npmrc);
display::write_json_to_stdout(&json_graph)?;
} else {
let mut output = String::new();
GraphDisplayContext::write(
&graph,
maybe_npm_info.as_ref().map(|(r, s)| (*r, s)),
&mut output,
)?;
GraphDisplayContext::write(&graph, npm_resolver.as_ref(), &mut output)?;
display::write_to_stdout_ignore_sigpipe(output.as_bytes())?;
}
} else {
@ -191,7 +180,7 @@ fn print_cache_info(
let registry_cache = dir.registries_folder_path();
let mut origin_dir = dir.origin_data_folder_path();
let deno_dir = dir.root_path_for_display().to_string();
let web_cache_dir = deno_lib::worker::get_cache_storage_dir();
let web_cache_dir = crate::worker::get_cache_storage_dir();
if let Some(location) = &location {
origin_dir =
@ -262,14 +251,15 @@ fn print_cache_info(
fn add_npm_packages_to_json(
json: &mut serde_json::Value,
npm_snapshot: Option<&NpmResolutionSnapshot>,
npm_resolver: &dyn CliNpmResolver,
npmrc: &ResolvedNpmRc,
) {
let Some(npm_snapshot) = npm_snapshot else {
let Some(npm_resolver) = npm_resolver.as_managed() else {
return; // does not include byonm to deno info's output
};
// ideally deno_graph could handle this, but for now we just modify the json here
let snapshot = npm_resolver.snapshot();
let json = json.as_object_mut().unwrap();
let modules = json.get_mut("modules").and_then(|m| m.as_array_mut());
if let Some(modules) = modules {
@ -283,7 +273,7 @@ fn add_npm_packages_to_json(
.and_then(|k| k.as_str())
.and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok())
.and_then(|package_ref| {
npm_snapshot
snapshot
.resolve_package_from_deno_module(package_ref.nv())
.ok()
});
@ -305,8 +295,7 @@ fn add_npm_packages_to_json(
if let Some(specifier) = dep.get("specifier").and_then(|s| s.as_str())
{
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
if let Ok(pkg) =
npm_snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
if let Ok(pkg) = snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
{
dep.insert(
"npmPackage".to_string(),
@ -332,9 +321,8 @@ fn add_npm_packages_to_json(
}
}
let mut sorted_packages = npm_snapshot
.all_packages_for_every_system()
.collect::<Vec<_>>();
let mut sorted_packages =
snapshot.all_packages_for_every_system().collect::<Vec<_>>();
sorted_packages.sort_by(|a, b| a.id.cmp(&b.id));
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
@ -368,7 +356,7 @@ struct NpmInfo {
impl NpmInfo {
pub fn build<'a>(
graph: &'a ModuleGraph,
npm_resolver: &'a CliManagedNpmResolver,
npm_resolver: &'a ManagedCliNpmResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) -> Self {
let mut info = NpmInfo::default();
@ -394,7 +382,7 @@ impl NpmInfo {
fn fill_package_info<'a>(
&mut self,
package: &NpmResolutionPackage,
npm_resolver: &'a CliManagedNpmResolver,
npm_resolver: &'a ManagedCliNpmResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) {
self.packages.insert(package.id.clone(), package.clone());
@ -431,15 +419,13 @@ struct GraphDisplayContext<'a> {
impl<'a> GraphDisplayContext<'a> {
pub fn write<TWrite: Write>(
graph: &'a ModuleGraph,
managed_npm_info: Option<(
&'a CliManagedNpmResolver,
&'a NpmResolutionSnapshot,
)>,
npm_resolver: &'a dyn CliNpmResolver,
writer: &mut TWrite,
) -> Result<(), AnyError> {
let npm_info = match managed_npm_info {
Some((npm_resolver, npm_snapshot)) => {
NpmInfo::build(graph, npm_resolver, npm_snapshot)
let npm_info = match npm_resolver.as_managed() {
Some(npm_resolver) => {
let npm_snapshot = npm_resolver.snapshot();
NpmInfo::build(graph, npm_resolver, &npm_snapshot)
}
None => NpmInfo::default(),
};

View file

@ -18,7 +18,6 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_lib::args::CaData;
use deno_semver::npm::NpmPackageReqReference;
use log::Level;
use once_cell::sync::Lazy;
@ -27,6 +26,7 @@ use regex::RegexBuilder;
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
@ -657,7 +657,6 @@ fn is_in_path(dir: &Path) -> bool {
mod tests {
use std::process::Command;
use deno_lib::args::UnstableConfig;
use test_util::testdata_path;
use test_util::TempDir;
@ -665,6 +664,7 @@ mod tests {
use crate::args::ConfigFlag;
use crate::args::PermissionFlags;
use crate::args::UninstallFlagsGlobal;
use crate::args::UnstableConfig;
use crate::util::fs::canonicalize_path;
#[tokio::test]

View file

@ -18,7 +18,6 @@ use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use deno_lib::version::DENO_VERSION_INFO;
use jupyter_runtime::messaging;
use jupyter_runtime::ConnectionInfo;
use jupyter_runtime::ExecutionCount;
@ -680,10 +679,10 @@ fn kernel_info() -> messaging::KernelInfoReply {
status: ReplyStatus::Ok,
protocol_version: "5.3".to_string(),
implementation: "Deno kernel".to_string(),
implementation_version: DENO_VERSION_INFO.deno.to_string(),
implementation_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
language_info: messaging::LanguageInfo {
name: "typescript".to_string(),
version: DENO_VERSION_INFO.typescript.to_string(),
version: crate::version::DENO_VERSION_INFO.typescript.to_string(),
mimetype: "text/x.typescript".to_string(),
file_extension: ".ts".to_string(),
pygments_lexer: "typescript".to_string(),

View file

@ -451,7 +451,7 @@ pub struct DepManager {
// TODO(nathanwhit): probably shouldn't be pub
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub(crate) npm_fetch_resolver: Arc<NpmFetchResolver>,
npm_resolver: CliNpmResolver,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_installer: Arc<NpmInstaller>,
permissions_container: PermissionsContainer,
main_module_graph_container: Arc<MainModuleGraphContainer>,
@ -463,7 +463,7 @@ pub struct DepManagerArgs {
pub jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub npm_fetch_resolver: Arc<NpmFetchResolver>,
pub npm_installer: Arc<NpmInstaller>,
pub npm_resolver: CliNpmResolver,
pub npm_resolver: Arc<dyn CliNpmResolver>,
pub permissions_container: PermissionsContainer,
pub main_module_graph_container: Arc<MainModuleGraphContainer>,
pub lockfile: Option<Arc<CliLockfile>>,
@ -551,10 +551,9 @@ impl DepManager {
let npm_resolver = self.npm_resolver.as_managed().unwrap();
if self.deps.iter().all(|dep| match dep.kind {
DepKind::Npm => npm_resolver
.resolution()
.resolve_pkg_id_from_pkg_req(&dep.req)
.is_ok(),
DepKind::Npm => {
npm_resolver.resolve_pkg_id_from_pkg_req(&dep.req).is_ok()
}
DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req),
}) {
self.dependencies_resolved.raise();
@ -631,12 +630,7 @@ impl DepManager {
let graph = self.main_module_graph_container.graph();
let mut resolved = Vec::with_capacity(self.deps.len());
let snapshot = self
.npm_resolver
.as_managed()
.unwrap()
.resolution()
.snapshot();
let snapshot = self.npm_resolver.as_managed().unwrap().snapshot();
let resolved_npm = snapshot.package_reqs();
let resolved_jsr = graph.packages.mappings();
for dep in &self.deps {
@ -683,21 +677,10 @@ impl DepManager {
.and_then(|info| {
let latest_tag = info.dist_tags.get("latest")?;
let lower_bound = &semver_compatible.as_ref()?.version;
if latest_tag >= lower_bound {
if latest_tag > lower_bound {
Some(latest_tag.clone())
} else {
latest_version(
Some(latest_tag),
info.versions.iter().filter_map(
|(version, version_info)| {
if version_info.deprecated.is_none() {
Some(version)
} else {
None
}
},
),
)
latest_version(Some(latest_tag), info.versions.keys())
}
})
.map(|version| PackageNv {

View file

@ -280,15 +280,9 @@ fn choose_new_version_req(
if preferred.version <= resolved?.version {
return None;
}
let exact = if let Some(range) = dep.req.version_req.range() {
range.0[0].start == range.0[0].end
} else {
false
};
Some(
VersionReq::parse_from_specifier(
format!("{}{}", if exact { "" } else { "^" }, preferred.version)
.as_str(),
format!("^{}", preferred.version).as_str(),
)
.unwrap(),
)

View file

@ -8,7 +8,6 @@ use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::unsync::spawn_blocking;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::WorkerExecutionMode;
use rustyline::error::ReadlineError;
@ -245,7 +244,7 @@ pub async fn run(
if !cli_options.is_quiet() {
let mut handle = io::stdout().lock();
writeln!(handle, "Deno {}", DENO_VERSION_INFO.deno)?;
writeln!(handle, "Deno {}", crate::version::DENO_VERSION_INFO.deno)?;
writeln!(handle, "exit using ctrl+d, ctrl+c, or close()")?;
if repl_flags.is_default_command {

View file

@ -32,7 +32,6 @@ use deno_error::JsErrorBox;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionKind;
@ -403,16 +402,18 @@ impl ReplSession {
}
Err(err) => {
// handle a parsing diagnostic
match any_and_jserrorbox_downcast_ref::<deno_ast::ParseDiagnostic>(
&err,
) {
match crate::util::result::any_and_jserrorbox_downcast_ref::<
deno_ast::ParseDiagnostic,
>(&err)
{
Some(diagnostic) => {
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
}
None => {
match any_and_jserrorbox_downcast_ref::<ParseDiagnosticsError>(
&err,
) {
match crate::util::result::any_and_jserrorbox_downcast_ref::<
ParseDiagnosticsError,
>(&err)
{
Some(diagnostics) => Ok(EvaluationOutput::Error(
diagnostics
.0

View file

@ -43,8 +43,7 @@ pub async fn serve(
maybe_npm_install(&factory).await?;
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let worker_factory = factory.create_cli_main_worker_factory().await?;
let hmr = serve_flags
.watch
.map(|watch_flags| watch_flags.hmr)
@ -59,7 +58,7 @@ pub async fn serve(
}
async fn do_serve(
worker_factory: Arc<CliMainWorkerFactory>,
worker_factory: CliMainWorkerFactory,
main_module: ModuleSpecifier,
worker_count: Option<usize>,
hmr: bool,
@ -117,7 +116,7 @@ async fn do_serve(
async fn run_worker(
worker_count: usize,
worker_factory: Arc<CliMainWorkerFactory>,
worker_factory: CliMainWorkerFactory,
main_module: ModuleSpecifier,
hmr: bool,
) -> Result<i32, AnyError> {
@ -165,8 +164,7 @@ async fn serve_with_watch(
maybe_npm_install(&factory).await?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let worker_factory = factory.create_cli_main_worker_factory().await?;
do_serve(worker_factory, main_module.clone(), worker_count, hmr)
.await?;

View file

@ -220,7 +220,7 @@ pub async fn execute_script(
let task_runner = TaskRunner {
task_flags: &task_flags,
npm_installer: npm_installer.map(|n| n.as_ref()),
npm_resolver,
npm_resolver: npm_resolver.as_ref(),
node_resolver: node_resolver.as_ref(),
env_vars,
cli_options,
@ -271,7 +271,7 @@ struct RunSingleOptions<'a> {
struct TaskRunner<'a> {
task_flags: &'a TaskFlags,
npm_installer: Option<&'a NpmInstaller>,
npm_resolver: &'a CliNpmResolver,
npm_resolver: &'a dyn CliNpmResolver,
node_resolver: &'a CliNodeResolver,
env_vars: HashMap<String, String>,
cli_options: &'a CliOptions,

Some files were not shown because too many files have changed in this diff Show more