mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Compare commits
24 commits
2b1fb7dc86
...
ad87d6f4b0
Author | SHA1 | Date | |
---|---|---|---|
|
ad87d6f4b0 | ||
|
4f27d7cdc0 | ||
|
e4a16e91fa | ||
|
9aa02769c8 | ||
|
b962b87cfe | ||
|
57dd66ec3d | ||
|
054075730c | ||
|
b55451b178 | ||
|
342ccbb99d | ||
|
0050857f51 | ||
|
339bc44c58 | ||
|
94dc5b16f5 | ||
|
a5ba198b9a | ||
|
256950ddb6 | ||
|
464ee9155e | ||
|
2debe9c8dd | ||
|
17d6e66ee3 | ||
|
8d2f76ae36 | ||
|
e54d467812 | ||
|
e49d6f2d45 | ||
|
32708213d5 | ||
|
a02ee7adf9 | ||
|
05dc69932d | ||
|
836a623d99 |
236 changed files with 11961 additions and 7628 deletions
16
.github/workflows/ci.generate.ts
vendored
16
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 33;
|
||||
const cacheVersion = 36;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
@ -14,7 +14,7 @@ const windowsX86Runner = "windows-2022";
|
|||
const windowsX86XlRunner = "windows-2022-xl";
|
||||
const macosX86Runner = "macos-13";
|
||||
const macosArmRunner = "macos-14";
|
||||
const selfHostedMacosArmRunner = "self-hosted";
|
||||
const selfHostedMacosArmRunner = "ghcr.io/cirruslabs/macos-runner:sonoma";
|
||||
|
||||
const Runners = {
|
||||
linuxX86: {
|
||||
|
@ -41,8 +41,14 @@ const Runners = {
|
|||
macosArm: {
|
||||
os: "macos",
|
||||
arch: "aarch64",
|
||||
runner: macosArmRunner,
|
||||
},
|
||||
macosArmSelfHosted: {
|
||||
os: "macos",
|
||||
arch: "aarch64",
|
||||
// Actually use self-hosted runner only in denoland/deno on `main` branch and for tags (release) builds.
|
||||
runner:
|
||||
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
|
||||
`\${{ github.repository == 'denoland/deno' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
|
||||
},
|
||||
windowsX86: {
|
||||
os: "windows",
|
||||
|
@ -384,7 +390,7 @@ const ci = {
|
|||
job: "test",
|
||||
profile: "debug",
|
||||
}, {
|
||||
...Runners.macosArm,
|
||||
...Runners.macosArmSelfHosted,
|
||||
job: "test",
|
||||
profile: "release",
|
||||
skip_pr: true,
|
||||
|
@ -486,7 +492,7 @@ const ci = {
|
|||
},
|
||||
{
|
||||
name: "Cache Cargo home",
|
||||
uses: "actions/cache@v4",
|
||||
uses: "cirruslabs/cache@v4",
|
||||
with: {
|
||||
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
|
||||
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
|
||||
|
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
@ -68,12 +68,12 @@ jobs:
|
|||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
runner: macos-14
|
||||
job: test
|
||||
profile: debug
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')) && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
|
@ -175,7 +175,7 @@ jobs:
|
|||
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
|
||||
-czvf target/release/deno_src.tar.gz -C .. deno
|
||||
- name: Cache Cargo home
|
||||
uses: actions/cache@v4
|
||||
uses: cirruslabs/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cargo/.crates.toml
|
||||
|
@ -184,8 +184,8 @@ jobs:
|
|||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
~/.cargo/git/db
|
||||
key: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '33-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||
key: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||
if: '!(matrix.skip)'
|
||||
- uses: dsherret/rust-toolchain-file@v1
|
||||
if: '!(matrix.skip)'
|
||||
|
@ -379,7 +379,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -689,7 +689,7 @@ jobs:
|
|||
!./target/*/gn_root
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '33-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-24.04
|
||||
|
|
254
Cargo.lock
generated
254
Cargo.lock
generated
|
@ -838,18 +838,16 @@ checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
|
|||
name = "cli_tests"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"deno_ast",
|
||||
"deno_bench_util",
|
||||
"deno_cache_dir",
|
||||
"deno_core",
|
||||
"deno_fetch",
|
||||
"deno_lockfile",
|
||||
"deno_semver",
|
||||
"deno_terminal 0.2.0",
|
||||
"deno_tls",
|
||||
"deno_tower_lsp",
|
||||
"deno_unsync",
|
||||
"fastwebsockets",
|
||||
"file_test_runner",
|
||||
"flaky_test",
|
||||
|
@ -866,7 +864,11 @@ dependencies = [
|
|||
"pretty_assertions",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"rustls",
|
||||
"rustls-pemfile",
|
||||
"rustls-tokio-stream",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sys_traits",
|
||||
"test_server",
|
||||
"tokio",
|
||||
|
@ -1244,7 +1246,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno"
|
||||
version = "2.1.5"
|
||||
version = "2.1.6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"async-trait",
|
||||
|
@ -1270,6 +1272,7 @@ dependencies = [
|
|||
"deno_doc",
|
||||
"deno_error",
|
||||
"deno_graph",
|
||||
"deno_lib",
|
||||
"deno_lint",
|
||||
"deno_lockfile",
|
||||
"deno_npm",
|
||||
|
@ -1279,6 +1282,7 @@ dependencies = [
|
|||
"deno_resolver",
|
||||
"deno_runtime",
|
||||
"deno_semver",
|
||||
"deno_snapshots",
|
||||
"deno_task_shell",
|
||||
"deno_telemetry",
|
||||
"deno_terminal 0.2.0",
|
||||
|
@ -1290,7 +1294,6 @@ dependencies = [
|
|||
"dprint-plugin-jupyter",
|
||||
"dprint-plugin-markdown",
|
||||
"dprint-plugin-typescript",
|
||||
"env_logger",
|
||||
"fancy-regex",
|
||||
"faster-hex",
|
||||
"flate2",
|
||||
|
@ -1421,7 +1424,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_bench_util"
|
||||
version = "0.179.0"
|
||||
version = "0.180.0"
|
||||
dependencies = [
|
||||
"bencher",
|
||||
"deno_core",
|
||||
|
@ -1430,7 +1433,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.179.0"
|
||||
version = "0.180.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1442,7 +1445,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_cache"
|
||||
version = "0.117.0"
|
||||
version = "0.118.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1485,7 +1488,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_canvas"
|
||||
version = "0.54.0"
|
||||
version = "0.55.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -1524,16 +1527,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_console"
|
||||
version = "0.185.0"
|
||||
version = "0.186.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_core"
|
||||
version = "0.330.0"
|
||||
version = "0.331.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd38bbbd68ed873165ccb630322704b44140d3a8c8d50f898beac4d1a8a3358c"
|
||||
checksum = "ce2d1779358cad2bc56d71176298767be628d707bb75585f6f8a4be2da8ccda1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"az",
|
||||
|
@ -1575,7 +1578,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
|
|||
|
||||
[[package]]
|
||||
name = "deno_cron"
|
||||
version = "0.65.0"
|
||||
version = "0.66.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1589,7 +1592,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_crypto"
|
||||
version = "0.199.0"
|
||||
version = "0.200.0"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"aes-gcm",
|
||||
|
@ -1657,9 +1660,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_error"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4da6a58de6932a96f84e133c072fd3b525966ee122a71f3efd48bbff2eed5ac"
|
||||
checksum = "9c23dbc46d5804814b08b4675838f9884e3a52916987ec5105af36d42f9911b5"
|
||||
dependencies = [
|
||||
"deno_error_macro",
|
||||
"libc",
|
||||
|
@ -1671,9 +1674,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_error_macro"
|
||||
version = "0.5.3"
|
||||
version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46351dff93aed2039407c91e2ded2a5591e42d2795ab3d111288625bb710d3d2"
|
||||
checksum = "babccedee31ce7e57c3e6dff2cb3ab8d68c49d0df8222fe0d11d628e65192790"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1682,7 +1685,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fetch"
|
||||
version = "0.209.0"
|
||||
version = "0.210.0"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
|
@ -1719,7 +1722,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ffi"
|
||||
version = "0.172.0"
|
||||
version = "0.173.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -1740,7 +1743,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fs"
|
||||
version = "0.95.0"
|
||||
version = "0.96.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base32",
|
||||
|
@ -1798,7 +1801,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_http"
|
||||
version = "0.183.0"
|
||||
version = "0.184.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"async-trait",
|
||||
|
@ -1838,7 +1841,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_io"
|
||||
version = "0.95.0"
|
||||
version = "0.96.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1860,7 +1863,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_kv"
|
||||
version = "0.93.0"
|
||||
version = "0.94.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1891,6 +1894,40 @@ dependencies = [
|
|||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_lib"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"capacity_builder 0.5.0",
|
||||
"deno_config",
|
||||
"deno_error",
|
||||
"deno_fs",
|
||||
"deno_media_type",
|
||||
"deno_node",
|
||||
"deno_npm",
|
||||
"deno_path_util",
|
||||
"deno_resolver",
|
||||
"deno_runtime",
|
||||
"deno_semver",
|
||||
"deno_terminal 0.2.0",
|
||||
"env_logger",
|
||||
"faster-hex",
|
||||
"indexmap 2.3.0",
|
||||
"libsui",
|
||||
"log",
|
||||
"node_resolver",
|
||||
"parking_lot",
|
||||
"ring",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sys_traits",
|
||||
"test_server",
|
||||
"thiserror 2.0.3",
|
||||
"tokio",
|
||||
"twox-hash",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_lint"
|
||||
version = "0.68.2"
|
||||
|
@ -1923,18 +1960,19 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_media_type"
|
||||
version = "0.2.3"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a417f8bd3f1074185c4c8ccb6ea6261ae173781596cc358e68ad07aaac11009d"
|
||||
checksum = "577fe2bbe04f3e9b1b7c6fac6a75101a9fbd611c50a6b68789e69f4d63dcb2b4"
|
||||
dependencies = [
|
||||
"data-url",
|
||||
"encoding_rs",
|
||||
"serde",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_napi"
|
||||
version = "0.116.0"
|
||||
version = "0.117.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -1963,7 +2001,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_net"
|
||||
version = "0.177.0"
|
||||
version = "0.178.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -1975,14 +2013,17 @@ dependencies = [
|
|||
"quinn",
|
||||
"rustls-tokio-stream",
|
||||
"serde",
|
||||
"sha2",
|
||||
"socket2",
|
||||
"thiserror 2.0.3",
|
||||
"tokio",
|
||||
"url",
|
||||
"web-transport-proto",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_node"
|
||||
version = "0.123.0"
|
||||
version = "0.124.0"
|
||||
dependencies = [
|
||||
"aead-gcm-stream",
|
||||
"aes",
|
||||
|
@ -2000,11 +2041,11 @@ dependencies = [
|
|||
"deno_fetch",
|
||||
"deno_fs",
|
||||
"deno_io",
|
||||
"deno_media_type",
|
||||
"deno_net",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"deno_process",
|
||||
"deno_whoami",
|
||||
"der",
|
||||
"digest",
|
||||
|
@ -2013,7 +2054,7 @@ dependencies = [
|
|||
"ecdsa",
|
||||
"ed25519-dalek",
|
||||
"elliptic-curve",
|
||||
"errno 0.2.8",
|
||||
"errno",
|
||||
"faster-hex",
|
||||
"h2 0.4.4",
|
||||
"hkdf",
|
||||
|
@ -2042,7 +2083,6 @@ dependencies = [
|
|||
"p384",
|
||||
"path-clean",
|
||||
"pbkdf2",
|
||||
"pin-project-lite",
|
||||
"pkcs8",
|
||||
"rand",
|
||||
"regex",
|
||||
|
@ -2095,7 +2135,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_npm_cache"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.21.7",
|
||||
|
@ -2125,9 +2165,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ops"
|
||||
version = "0.206.0"
|
||||
version = "0.207.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c25ffa9d088ea00748dbef870bba110ac22ebf8cf7b2e9eb288409c5d852af3"
|
||||
checksum = "96f000a21f6969b4c945bc8e9e785aa439f11ca4fd3fbddcd5bebc102167eb37"
|
||||
dependencies = [
|
||||
"indexmap 2.3.0",
|
||||
"proc-macro-rules",
|
||||
|
@ -2142,7 +2182,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_os"
|
||||
version = "0.1.0"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -2194,7 +2234,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_permissions"
|
||||
version = "0.44.0"
|
||||
version = "0.45.0"
|
||||
dependencies = [
|
||||
"capacity_builder 0.5.0",
|
||||
"deno_core",
|
||||
|
@ -2212,9 +2252,36 @@ dependencies = [
|
|||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_process"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
"deno_fs",
|
||||
"deno_io",
|
||||
"deno_os",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"libc",
|
||||
"log",
|
||||
"memchr",
|
||||
"nix",
|
||||
"pin-project-lite",
|
||||
"rand",
|
||||
"serde",
|
||||
"simd-json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.3",
|
||||
"tokio",
|
||||
"which",
|
||||
"winapi",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_resolver"
|
||||
version = "0.16.0"
|
||||
version = "0.17.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -2240,7 +2307,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_runtime"
|
||||
version = "0.193.0"
|
||||
version = "0.194.0"
|
||||
dependencies = [
|
||||
"color-print",
|
||||
"deno_ast",
|
||||
|
@ -2264,6 +2331,7 @@ dependencies = [
|
|||
"deno_os",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"deno_process",
|
||||
"deno_resolver",
|
||||
"deno_telemetry",
|
||||
"deno_terminal 0.2.0",
|
||||
|
@ -2277,7 +2345,6 @@ dependencies = [
|
|||
"dlopen2",
|
||||
"encoding_rs",
|
||||
"fastwebsockets",
|
||||
"flate2",
|
||||
"http 1.1.0",
|
||||
"http-body-util",
|
||||
"hyper 0.14.28",
|
||||
|
@ -2325,6 +2392,13 @@ dependencies = [
|
|||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_snapshots"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"deno_runtime",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_task_shell"
|
||||
version = "0.20.2"
|
||||
|
@ -2345,7 +2419,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_telemetry"
|
||||
version = "0.7.0"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -2388,7 +2462,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_tls"
|
||||
version = "0.172.0"
|
||||
version = "0.173.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -2439,7 +2513,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_url"
|
||||
version = "0.185.0"
|
||||
version = "0.186.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_console",
|
||||
|
@ -2452,7 +2526,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_web"
|
||||
version = "0.216.0"
|
||||
version = "0.217.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64-simd 0.8.0",
|
||||
|
@ -2475,7 +2549,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webgpu"
|
||||
version = "0.152.0"
|
||||
version = "0.153.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -2489,7 +2563,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webidl"
|
||||
version = "0.185.0"
|
||||
version = "0.186.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_core",
|
||||
|
@ -2497,7 +2571,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_websocket"
|
||||
version = "0.190.0"
|
||||
version = "0.191.0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"deno_core",
|
||||
|
@ -2520,7 +2594,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webstorage"
|
||||
version = "0.180.0"
|
||||
version = "0.181.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
|
@ -2606,6 +2680,43 @@ dependencies = [
|
|||
"v8_valueserializer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "denort"
|
||||
version = "2.1.5"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"deno_cache_dir",
|
||||
"deno_config",
|
||||
"deno_core",
|
||||
"deno_error",
|
||||
"deno_lib",
|
||||
"deno_media_type",
|
||||
"deno_npm",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"deno_resolver",
|
||||
"deno_runtime",
|
||||
"deno_semver",
|
||||
"deno_snapshots",
|
||||
"deno_terminal 0.2.0",
|
||||
"import_map",
|
||||
"indexmap 2.3.0",
|
||||
"libsui",
|
||||
"log",
|
||||
"node_resolver",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sys_traits",
|
||||
"test_server",
|
||||
"thiserror 2.0.3",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"twox-hash",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "der"
|
||||
version = "0.7.9"
|
||||
|
@ -3135,17 +3246,6 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
|
||||
dependencies = [
|
||||
"errno-dragonfly",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.8"
|
||||
|
@ -3156,16 +3256,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno-dragonfly"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "error-code"
|
||||
version = "3.2.0"
|
||||
|
@ -5092,7 +5182,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "napi_sym"
|
||||
version = "0.115.0"
|
||||
version = "0.116.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"serde",
|
||||
|
@ -5147,13 +5237,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "node_resolver"
|
||||
version = "0.23.0"
|
||||
version = "0.24.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"boxed_error",
|
||||
"deno_error",
|
||||
"deno_media_type",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"futures",
|
||||
|
@ -5161,6 +5250,7 @@ dependencies = [
|
|||
"once_cell",
|
||||
"path-clean",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sys_traits",
|
||||
"thiserror 2.0.3",
|
||||
|
@ -6558,7 +6648,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"errno 0.3.8",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.52.0",
|
||||
|
@ -6893,9 +6983,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_v8"
|
||||
version = "0.239.0"
|
||||
version = "0.240.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3caa6d882827148e5d9052d9d8d6d1c9d6ad426ed00cab46cafb8c07a0e7126a"
|
||||
checksum = "cd0494d74c40ab94f53a19485de359ea6a55f05341b817b93440b673c1ce8ec6"
|
||||
dependencies = [
|
||||
"deno_error",
|
||||
"num-bigint",
|
||||
|
@ -8782,6 +8872,18 @@ dependencies = [
|
|||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-transport-proto"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a3806ea43df5817f0d90618c842d28db5946bc18a5db0659b2275c2be48d472"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"http 1.1.0",
|
||||
"thiserror 1.0.64",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-root-certs"
|
||||
version = "0.26.6"
|
||||
|
|
77
Cargo.toml
77
Cargo.toml
|
@ -5,6 +5,9 @@ resolver = "2"
|
|||
members = [
|
||||
"bench_util",
|
||||
"cli",
|
||||
"cli/lib",
|
||||
"cli/rt",
|
||||
"cli/snapshot",
|
||||
"ext/broadcast_channel",
|
||||
"ext/cache",
|
||||
"ext/canvas",
|
||||
|
@ -48,19 +51,19 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.330.0" }
|
||||
deno_core = { version = "0.331.0" }
|
||||
|
||||
deno_bench_util = { version = "0.179.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.180.0", path = "./bench_util" }
|
||||
deno_config = { version = "=0.45.0", features = ["workspace", "sync"] }
|
||||
deno_lockfile = "=0.24.0"
|
||||
deno_media_type = { version = "0.2.3", features = ["module_specifier"] }
|
||||
deno_media_type = { version = "0.2.4", features = ["module_specifier"] }
|
||||
deno_npm = "=0.27.2"
|
||||
deno_path_util = "=0.3.0"
|
||||
deno_permissions = { version = "0.44.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.193.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.45.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.194.0", path = "./runtime" }
|
||||
deno_semver = "=0.7.1"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.115.0", path = "./ext/napi/sym" }
|
||||
napi_sym = { version = "0.116.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.9.0"
|
||||
|
@ -69,35 +72,38 @@ denokv_remote = "0.9.0"
|
|||
denokv_sqlite = { default-features = false, version = "0.9.0" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.179.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.117.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.54.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.185.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.65.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.199.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.209.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.172.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.95.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.183.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.95.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.93.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.116.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.177.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.123.0", path = "./ext/node" }
|
||||
deno_os = { version = "0.1.0", path = "./ext/os" }
|
||||
deno_telemetry = { version = "0.7.0", path = "./ext/telemetry" }
|
||||
deno_tls = { version = "0.172.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.185.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.216.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.152.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.185.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.190.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.180.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.180.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.118.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.55.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.186.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.66.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.200.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.210.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.173.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.96.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.184.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.96.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.94.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.117.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.178.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.124.0", path = "./ext/node" }
|
||||
deno_os = { version = "0.3.0", path = "./ext/os" }
|
||||
deno_process = { version = "0.1.0", path = "./ext/process" }
|
||||
deno_telemetry = { version = "0.8.0", path = "./ext/telemetry" }
|
||||
deno_tls = { version = "0.173.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.186.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.217.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.153.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.186.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.191.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.181.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_npm_cache = { version = "0.4.0", path = "./resolvers/npm_cache" }
|
||||
deno_resolver = { version = "0.16.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.23.0", path = "./resolvers/node" }
|
||||
# workspace libraries
|
||||
deno_lib = { version = "0.2.0", path = "./cli/lib" }
|
||||
deno_npm_cache = { version = "0.5.0", path = "./resolvers/npm_cache" }
|
||||
deno_resolver = { version = "0.17.0", path = "./resolvers/deno" }
|
||||
deno_snapshots = { version = "0.1.0", path = "./cli/snapshot" }
|
||||
node_resolver = { version = "0.24.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -120,7 +126,7 @@ dashmap = "5.5.3"
|
|||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.1"
|
||||
deno_cache_dir = "=0.16.0"
|
||||
deno_error = "=0.5.3"
|
||||
deno_error = "=0.5.5"
|
||||
deno_package_json = { version = "0.4.0", default-features = false }
|
||||
deno_unsync = "0.4.2"
|
||||
dlopen2 = "0.6.1"
|
||||
|
@ -151,6 +157,7 @@ ipnet = "2.3"
|
|||
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.168"
|
||||
libsui = "0.5.0"
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
log = { version = "0.4.20", features = ["kv"] }
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
|
||||
<img align="right" src="https://deno.land/logo.svg" height="150px" alt="the deno mascot dinosaur standing in the rain">
|
||||
|
||||
[Deno](https://www.deno.com)
|
||||
([/ˈdiːnoʊ/](http://ipa-reader.xyz/?text=%CB%88di%CB%90no%CA%8A), pronounced
|
||||
[Deno](https://deno.com)
|
||||
([/ˈdiːnoʊ/](https://ipa-reader.com/?text=%CB%88di%CB%90no%CA%8A), pronounced
|
||||
`dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure
|
||||
defaults and a great developer experience. It's built on [V8](https://v8.dev/),
|
||||
[Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/).
|
||||
|
|
26
Releases.md
26
Releases.md
|
@ -6,6 +6,32 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.1.6 / 2025.01.16
|
||||
|
||||
- fix(check/lsp): correctly resolve compilerOptions.types (#27686)
|
||||
- fix(check/lsp): fix bugs with tsc type resolution, allow npm packages to
|
||||
augment `ImportMeta` (#27690)
|
||||
- fix(compile): store embedded fs case sensitivity (#27653)
|
||||
- fix(compile/windows): better handling of deno_dir on different drive letter
|
||||
than code (#27654)
|
||||
- fix(ext/console): change Temporal color (#27684)
|
||||
- fix(ext/node): add `writev` method to `FileHandle` (#27563)
|
||||
- fix(ext/node): add chown method to FileHandle class (#27638)
|
||||
- fix(ext/node): apply `@npmcli/agent` workaround to `npm-check-updates`
|
||||
(#27639)
|
||||
- fix(ext/node): fix playwright http client (#27662)
|
||||
- fix(ext/node): show bare-node-builtin hint when using an import map (#27632)
|
||||
- fix(ext/node): use primordials in `ext/node/polyfills/_fs_common.ts` (#27589)
|
||||
- fix(lsp): handle pathless untitled URIs (#27637)
|
||||
- fix(lsp/check): don't resolve unknown media types to a `.js` extension
|
||||
(#27631)
|
||||
- fix(node): Prevent node:child_process from always inheriting the parent
|
||||
environment (#27343) (#27340)
|
||||
- fix(node/fs): add utimes method to the FileHandle class (#27582)
|
||||
- fix(outdated): Use `latest` tag even when it's the same as the current version
|
||||
(#27699)
|
||||
- fix(outdated): retain strict semver specifier when updating (#27701)
|
||||
|
||||
### 2.1.5 / 2025.01.09
|
||||
|
||||
- feat(unstable): implement QUIC (#21942)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.179.0"
|
||||
version = "0.180.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.1.5"
|
||||
version = "2.1.6"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -16,11 +16,6 @@ name = "deno"
|
|||
path = "main.rs"
|
||||
doc = false
|
||||
|
||||
[[bin]]
|
||||
name = "denort"
|
||||
path = "mainrt.rs"
|
||||
doc = false
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
path = "integration_tests_runner.rs"
|
||||
|
@ -49,7 +44,7 @@ dhat-heap = ["dhat"]
|
|||
upgrade = []
|
||||
# A dev feature to disable creations and loading of snapshots in favor of
|
||||
# loading JS sources at runtime.
|
||||
hmr = ["deno_runtime/hmr"]
|
||||
hmr = ["deno_runtime/hmr", "deno_snapshots/disable"]
|
||||
# Vendor zlib as zlib-ng
|
||||
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
|
||||
|
||||
|
@ -60,10 +55,12 @@ lazy-regex.workspace = true
|
|||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
zstd.workspace = true
|
||||
glibc_version = "0.1.2"
|
||||
flate2 = { workspace = true, features = ["default"] }
|
||||
deno_error.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.build-dependencies]
|
||||
glibc_version = "0.1.2"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
winapi.workspace = true
|
||||
winres.workspace = true
|
||||
|
@ -76,6 +73,7 @@ deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"]
|
|||
deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] }
|
||||
deno_error.workspace = true
|
||||
deno_graph = { version = "=0.87.0" }
|
||||
deno_lib.workspace = true
|
||||
deno_lint = { version = "=0.68.2", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm.workspace = true
|
||||
|
@ -85,10 +83,11 @@ deno_path_util.workspace = true
|
|||
deno_resolver = { workspace = true, features = ["sync"] }
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_snapshots = { workspace = true }
|
||||
deno_task_shell = "=0.20.2"
|
||||
deno_telemetry.workspace = true
|
||||
deno_terminal.workspace = true
|
||||
libsui = "0.5.0"
|
||||
libsui.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
|
@ -114,7 +113,6 @@ dprint-plugin-json = "=0.19.4"
|
|||
dprint-plugin-jupyter = "=0.1.5"
|
||||
dprint-plugin-markdown = "=0.17.8"
|
||||
dprint-plugin-typescript = "=0.93.3"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
|
||||
|
@ -155,7 +153,6 @@ rustyline-derive = "=0.7.0"
|
|||
serde.workspace = true
|
||||
serde_repr.workspace = true
|
||||
sha2.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
spki = { version = "0.7", features = ["pem"] }
|
||||
sqlformat = "=0.3.2"
|
||||
strsim = "0.11.1"
|
||||
|
@ -184,6 +181,7 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "
|
|||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
|
|
|
@ -31,6 +31,9 @@ use deno_core::error::AnyError;
|
|||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::args::UnstableConfig;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_permissions::SysDescriptor;
|
||||
|
@ -546,15 +549,6 @@ impl Default for TypeCheckMode {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CaData {
|
||||
/// The string is a file path
|
||||
File(String),
|
||||
/// This variant is not exposed as an option in the CLI, it is used internally
|
||||
/// for standalone binaries.
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
// Info needed to run NPM lifecycle scripts
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct LifecycleScriptsConfig {
|
||||
|
@ -582,19 +576,6 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
pub struct UnstableConfig {
|
||||
// TODO(bartlomieju): remove in Deno 2.5
|
||||
pub legacy_flag_enabled: bool, // --unstable
|
||||
pub bare_node_builtins: bool,
|
||||
pub detect_cjs: bool,
|
||||
pub sloppy_imports: bool,
|
||||
pub npm_lazy_caching: bool,
|
||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct InternalFlags {
|
||||
/// Used when the language server is configured with an
|
||||
|
@ -1484,14 +1465,15 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) {
|
|||
}
|
||||
|
||||
pub fn clap_root() -> Command {
|
||||
debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION);
|
||||
let long_version = format!(
|
||||
"{} ({}, {}, {})\nv8 {}\ntypescript {}",
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
crate::version::DENO_VERSION_INFO.release_channel.name(),
|
||||
DENO_VERSION_INFO.deno,
|
||||
DENO_VERSION_INFO.release_channel.name(),
|
||||
env!("PROFILE"),
|
||||
env!("TARGET"),
|
||||
deno_core::v8::VERSION_STRING,
|
||||
crate::version::DENO_VERSION_INFO.typescript
|
||||
DENO_VERSION_INFO.typescript
|
||||
);
|
||||
|
||||
run_args(Command::new("deno"), true)
|
||||
|
@ -1507,7 +1489,7 @@ pub fn clap_root() -> Command {
|
|||
)
|
||||
.color(ColorChoice::Auto)
|
||||
.term_width(800)
|
||||
.version(crate::version::DENO_VERSION_INFO.deno)
|
||||
.version(DENO_VERSION_INFO.deno)
|
||||
.long_version(long_version)
|
||||
.disable_version_flag(true)
|
||||
.disable_help_flag(true)
|
||||
|
|
|
@ -61,11 +61,13 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
|||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
#[error("Failed writing lockfile")]
|
||||
#[class(inherit)]
|
||||
struct AtomicWriteFileWithRetriesError {
|
||||
#[source]
|
||||
source: std::io::Error,
|
||||
pub enum AtomicWriteFileWithRetriesError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Changed(JsErrorBox),
|
||||
#[class(inherit)]
|
||||
#[error("Failed writing lockfile")]
|
||||
Io(#[source] std::io::Error),
|
||||
}
|
||||
|
||||
impl CliLockfile {
|
||||
|
@ -87,12 +89,16 @@ impl CliLockfile {
|
|||
self.lockfile.lock().overwrite
|
||||
}
|
||||
|
||||
pub fn write_if_changed(&self) -> Result<(), JsErrorBox> {
|
||||
pub fn write_if_changed(
|
||||
&self,
|
||||
) -> Result<(), AtomicWriteFileWithRetriesError> {
|
||||
if self.skip_write {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.error_if_changed()?;
|
||||
self
|
||||
.error_if_changed()
|
||||
.map_err(AtomicWriteFileWithRetriesError::Changed)?;
|
||||
let mut lockfile = self.lockfile.lock();
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
return Ok(()); // nothing to do
|
||||
|
@ -105,9 +111,7 @@ impl CliLockfile {
|
|||
&bytes,
|
||||
cache::CACHE_PERM,
|
||||
)
|
||||
.map_err(|source| {
|
||||
JsErrorBox::from_err(AtomicWriteFileWithRetriesError { source })
|
||||
})?;
|
||||
.map_err(AtomicWriteFileWithRetriesError::Io)?;
|
||||
lockfile.has_content_changed = false;
|
||||
Ok(())
|
||||
}
|
||||
|
|
263
cli/args/mod.rs
263
cli/args/mod.rs
|
@ -10,10 +10,6 @@ mod package_json;
|
|||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io::BufReader;
|
||||
use std::io::Cursor;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::net::SocketAddr;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
|
@ -58,6 +54,13 @@ use deno_core::serde_json;
|
|||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
pub use deno_json::check_warn_tsconfig;
|
||||
use deno_lib::args::has_flag_env_var;
|
||||
use deno_lib::args::npm_pkg_req_ref_to_binary_command;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::args::NpmProcessStateKind;
|
||||
use deno_lib::args::NPM_PROCESS_STATE;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_lib::worker::StorageKeyResolver;
|
||||
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||
use deno_npm::npm_rc::NpmRc;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
|
@ -65,27 +68,20 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
|
||||
use deno_runtime::deno_tls::rustls;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::rustls_pemfile;
|
||||
use deno_runtime::deno_tls::webpki_roots;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::StackString;
|
||||
use deno_telemetry::OtelConfig;
|
||||
use deno_telemetry::OtelRuntimeConfig;
|
||||
use deno_terminal::colors;
|
||||
use dotenvy::from_filename;
|
||||
pub use flags::*;
|
||||
use import_map::resolve_import_map_value_from_specifier;
|
||||
pub use lockfile::AtomicWriteFileWithRetriesError;
|
||||
pub use lockfile::CliLockfile;
|
||||
pub use lockfile::CliLockfileReadFromPathOptions;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use package_json::NpmInstallDepsProvider;
|
||||
pub use package_json::PackageJsonDepValueParseWithLocationError;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sys_traits::EnvHomeDir;
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -93,7 +89,6 @@ use crate::cache::DenoDirProvider;
|
|||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::sys::CliSys;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::version;
|
||||
|
||||
pub fn npm_registry_url() -> &'static Url {
|
||||
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
|
||||
|
@ -605,147 +600,6 @@ pub fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
|
|||
})
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone, deno_error::JsError)]
|
||||
#[class(generic)]
|
||||
pub enum RootCertStoreLoadError {
|
||||
#[error(
|
||||
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
|
||||
)]
|
||||
UnknownStore(String),
|
||||
#[error("Unable to add pem file to certificate store: {0}")]
|
||||
FailedAddPemFile(String),
|
||||
#[error("Failed opening CA file: {0}")]
|
||||
CaFileOpenError(String),
|
||||
}
|
||||
|
||||
/// Create and populate a root cert store based on the passed options and
|
||||
/// environment.
|
||||
pub fn get_root_cert_store(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Result<RootCertStore, RootCertStoreLoadError> {
|
||||
let mut root_cert_store = RootCertStore::empty();
|
||||
let ca_stores: Vec<String> = maybe_ca_stores
|
||||
.or_else(|| {
|
||||
let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?;
|
||||
Some(
|
||||
env_ca_store
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| vec!["mozilla".to_string()]);
|
||||
|
||||
for store in ca_stores.iter() {
|
||||
match store.as_str() {
|
||||
"mozilla" => {
|
||||
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
|
||||
}
|
||||
"system" => {
|
||||
let roots = load_native_certs().expect("could not load platform certs");
|
||||
for root in roots {
|
||||
if let Err(err) = root_cert_store
|
||||
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
|
||||
{
|
||||
log::error!(
|
||||
"{}",
|
||||
colors::yellow(&format!(
|
||||
"Unable to add system certificate to certificate store: {:?}",
|
||||
err
|
||||
))
|
||||
);
|
||||
let hex_encoded_root = faster_hex::hex_string(&root.0);
|
||||
log::error!("{}", colors::gray(&hex_encoded_root));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ca_data =
|
||||
maybe_ca_data.or_else(|| env::var("DENO_CERT").ok().map(CaData::File));
|
||||
if let Some(ca_data) = ca_data {
|
||||
let result = match ca_data {
|
||||
CaData::File(ca_file) => {
|
||||
let ca_file = if let Some(root) = &maybe_root_path {
|
||||
root.join(&ca_file)
|
||||
} else {
|
||||
PathBuf::from(ca_file)
|
||||
};
|
||||
let certfile = std::fs::File::open(ca_file).map_err(|err| {
|
||||
RootCertStoreLoadError::CaFileOpenError(err.to_string())
|
||||
})?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
CaData::Bytes(data) => {
|
||||
let mut reader = BufReader::new(Cursor::new(data));
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(certs) => {
|
||||
root_cert_store.add_parsable_certificates(certs);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(root_cert_store)
|
||||
}
|
||||
|
||||
/// State provided to the process via an environment variable.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NpmProcessState {
|
||||
pub kind: NpmProcessStateKind,
|
||||
pub local_node_modules_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum NpmProcessStateKind {
|
||||
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
|
||||
Byonm,
|
||||
}
|
||||
|
||||
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
|
||||
use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
|
||||
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
|
||||
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
|
||||
let fd = fd.parse::<usize>().ok()?;
|
||||
let mut file = {
|
||||
use deno_runtime::deno_io::FromRawIoHandle;
|
||||
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
|
||||
};
|
||||
let mut buf = Vec::new();
|
||||
// seek to beginning. after the file is written the position will be inherited by this subprocess,
|
||||
// and also this file might have been read before
|
||||
file.seek(std::io::SeekFrom::Start(0)).unwrap();
|
||||
file
|
||||
.read_to_end(&mut buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!("failed to read npm process state from fd {fd}: {e}");
|
||||
})
|
||||
.ok()?;
|
||||
let state: NpmProcessState = serde_json::from_slice(&buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!(
|
||||
"failed to deserialize npm process state: {e} {}",
|
||||
String::from_utf8_lossy(&buf)
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
Some(state)
|
||||
});
|
||||
|
||||
/// Overrides for the options below that when set will
|
||||
/// use these values over the values derived from the
|
||||
/// CLI flags or config file.
|
||||
|
@ -1227,6 +1081,16 @@ impl CliOptions {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn resolve_storage_key_resolver(&self) -> StorageKeyResolver {
|
||||
if let Some(location) = &self.flags.location {
|
||||
StorageKeyResolver::from_flag(location)
|
||||
} else if let Some(deno_json) = self.start_dir.maybe_deno_json() {
|
||||
StorageKeyResolver::from_config_file_url(&deno_json.specifier)
|
||||
} else {
|
||||
StorageKeyResolver::new_use_main_module()
|
||||
}
|
||||
}
|
||||
|
||||
// If the main module should be treated as being in an npm package.
|
||||
// This is triggered via a secret environment variable which is used
|
||||
// for functionality like child_process.fork. Users should NOT depend
|
||||
|
@ -1281,7 +1145,7 @@ impl CliOptions {
|
|||
|
||||
Ok(Some(InspectorServer::new(
|
||||
host,
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
)?))
|
||||
}
|
||||
|
||||
|
@ -1975,72 +1839,11 @@ fn resolve_import_map_specifier(
|
|||
}
|
||||
}
|
||||
|
||||
pub struct StorageKeyResolver(Option<Option<String>>);
|
||||
|
||||
impl StorageKeyResolver {
|
||||
pub fn from_options(options: &CliOptions) -> Self {
|
||||
Self(if let Some(location) = &options.flags.location {
|
||||
// if a location is set, then the ascii serialization of the location is
|
||||
// used, unless the origin is opaque, and then no storage origin is set, as
|
||||
// we can't expect the origin to be reproducible
|
||||
let storage_origin = location.origin();
|
||||
if storage_origin.is_tuple() {
|
||||
Some(Some(storage_origin.ascii_serialization()))
|
||||
} else {
|
||||
Some(None)
|
||||
}
|
||||
} else {
|
||||
// otherwise we will use the path to the config file or None to
|
||||
// fall back to using the main module's path
|
||||
options
|
||||
.start_dir
|
||||
.maybe_deno_json()
|
||||
.map(|config_file| Some(config_file.specifier.to_string()))
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a storage key resolver that will always resolve to being empty.
|
||||
pub fn empty() -> Self {
|
||||
Self(Some(None))
|
||||
}
|
||||
|
||||
/// Resolves the storage key to use based on the current flags, config, or main module.
|
||||
pub fn resolve_storage_key(
|
||||
&self,
|
||||
main_module: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
// use the stored value or fall back to using the path of the main module.
|
||||
if let Some(maybe_value) = &self.0 {
|
||||
maybe_value.clone()
|
||||
} else {
|
||||
Some(main_module.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the no_prompt value based on the cli flags and environment.
|
||||
pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
|
||||
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
|
||||
}
|
||||
|
||||
pub fn has_trace_permissions_enabled() -> bool {
|
||||
has_flag_env_var("DENO_TRACE_PERMISSIONS")
|
||||
}
|
||||
|
||||
pub fn has_flag_env_var(name: &str) -> bool {
|
||||
let value = env::var(name);
|
||||
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
|
||||
}
|
||||
|
||||
pub fn npm_pkg_req_ref_to_binary_command(
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> String {
|
||||
req_ref
|
||||
.sub_path()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| req_ref.req().name.to_string())
|
||||
}
|
||||
|
||||
pub fn config_to_deno_graph_workspace_member(
|
||||
config: &ConfigFile,
|
||||
) -> Result<deno_graph::WorkspaceMember, AnyError> {
|
||||
|
@ -2101,13 +1904,6 @@ pub enum NpmCachingStrategy {
|
|||
Manual,
|
||||
}
|
||||
|
||||
pub fn otel_runtime_config() -> OtelRuntimeConfig {
|
||||
OtelRuntimeConfig {
|
||||
runtime_name: Cow::Borrowed("deno"),
|
||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
@ -2160,27 +1956,6 @@ mod test {
|
|||
assert_eq!(actual, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn storage_key_resolver_test() {
|
||||
let resolver = StorageKeyResolver(None);
|
||||
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some(specifier.to_string())
|
||||
);
|
||||
let resolver = StorageKeyResolver(Some(None));
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
let resolver = StorageKeyResolver(Some(Some("value".to_string())));
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some("value".to_string())
|
||||
);
|
||||
|
||||
// test empty
|
||||
let resolver = StorageKeyResolver::empty();
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn jsr_urls() {
|
||||
let reg_url = jsr_url();
|
||||
|
|
90
cli/build.rs
90
cli/build.rs
|
@ -5,7 +5,6 @@ use std::path::PathBuf;
|
|||
|
||||
use deno_core::snapshot::*;
|
||||
use deno_runtime::*;
|
||||
mod shared;
|
||||
|
||||
mod ts {
|
||||
use std::collections::HashMap;
|
||||
|
@ -310,57 +309,6 @@ mod ts {
|
|||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> String {
|
||||
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
|
||||
let version_text = " version = \"";
|
||||
for line in file_text.lines() {
|
||||
if let Some(index) = line.find(version_text) {
|
||||
let remaining_line = &line[index + version_text.len()..];
|
||||
return remaining_line[..remaining_line.find('"').unwrap()].to_string();
|
||||
}
|
||||
}
|
||||
panic!("Could not find ts version.")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) {
|
||||
use deno_runtime::ops::bootstrap::SnapshotOptions;
|
||||
|
||||
let snapshot_options = SnapshotOptions {
|
||||
ts_version: ts::version(),
|
||||
v8_version: deno_core::v8::VERSION_STRING,
|
||||
target: std::env::var("TARGET").unwrap(),
|
||||
};
|
||||
|
||||
deno_runtime::snapshot::create_runtime_snapshot(
|
||||
snapshot_path,
|
||||
snapshot_options,
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
fn git_commit_hash() -> String {
|
||||
if let Ok(output) = std::process::Command::new("git")
|
||||
.arg("rev-list")
|
||||
.arg("-1")
|
||||
.arg("HEAD")
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
std::str::from_utf8(&output.stdout[..40])
|
||||
.unwrap()
|
||||
.to_string()
|
||||
} else {
|
||||
// When not in git repository
|
||||
// (e.g. when the user install by `cargo install deno`)
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
} else {
|
||||
// When there is no git command for some reason
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
@ -370,7 +318,7 @@ fn main() {
|
|||
}
|
||||
|
||||
deno_napi::print_linker_flags("deno");
|
||||
deno_napi::print_linker_flags("denort");
|
||||
deno_webgpu::print_linker_flags("deno");
|
||||
|
||||
// Host snapshots won't work when cross compiling.
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
@ -389,51 +337,15 @@ fn main() {
|
|||
}
|
||||
println!("cargo:rerun-if-env-changed=DENO_CANARY");
|
||||
|
||||
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash());
|
||||
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
|
||||
println!(
|
||||
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
|
||||
&git_commit_hash()[..7]
|
||||
);
|
||||
|
||||
let ts_version = ts::version();
|
||||
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
|
||||
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
|
||||
println!("cargo:rerun-if-env-changed=TS_VERSION");
|
||||
|
||||
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
|
||||
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
|
||||
|
||||
if cfg!(windows) {
|
||||
// these dls load slowly, so delay loading them
|
||||
let dlls = [
|
||||
// webgpu
|
||||
"d3dcompiler_47",
|
||||
"OPENGL32",
|
||||
// network related functions
|
||||
"iphlpapi",
|
||||
];
|
||||
for dll in dlls {
|
||||
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
|
||||
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
|
||||
}
|
||||
// enable delay loading
|
||||
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
|
||||
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
|
||||
}
|
||||
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
|
||||
|
||||
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
|
||||
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let mut res = winres::WindowsResource::new();
|
||||
|
|
3
cli/cache/cache_db.rs
vendored
3
cli/cache/cache_db.rs
vendored
|
@ -9,14 +9,13 @@ use deno_core::error::AnyError;
|
|||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_runtime::deno_webstorage::rusqlite;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Connection;
|
||||
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Params;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::FastInsecureHasher;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CacheDBHash(u64);
|
||||
|
||||
|
|
11
cli/cache/caches.rs
vendored
11
cli/cache/caches.rs
vendored
|
@ -3,17 +3,18 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::check::TYPE_CHECK_CACHE_DB;
|
||||
use super::code_cache::CODE_CACHE_DB;
|
||||
use super::deno_dir::DenoDirProvider;
|
||||
use super::fast_check::FAST_CHECK_CACHE_DB;
|
||||
use super::incremental::INCREMENTAL_CACHE_DB;
|
||||
use super::module_info::MODULE_INFO_CACHE_DB;
|
||||
use super::node::NODE_ANALYSIS_CACHE_DB;
|
||||
use crate::cache::DenoDirProvider;
|
||||
|
||||
pub struct Caches {
|
||||
dir_provider: Arc<DenoDirProvider>,
|
||||
|
@ -48,13 +49,9 @@ impl Caches {
|
|||
cell
|
||||
.get_or_init(|| {
|
||||
if let Some(path) = path {
|
||||
CacheDB::from_path(
|
||||
config,
|
||||
path,
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
)
|
||||
CacheDB::from_path(config, path, DENO_VERSION_INFO.deno)
|
||||
} else {
|
||||
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
|
||||
CacheDB::in_memory(config, DENO_VERSION_INFO.deno)
|
||||
}
|
||||
})
|
||||
.clone()
|
||||
|
|
9
cli/cache/code_cache.rs
vendored
9
cli/cache/code_cache.rs
vendored
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::code_cache;
|
||||
|
@ -11,7 +9,6 @@ use super::cache_db::CacheDB;
|
|||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
use super::cache_db::CacheFailure;
|
||||
use crate::worker::CliCodeCache;
|
||||
|
||||
pub static CODE_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
|
||||
table_initializer: concat!(
|
||||
|
@ -85,12 +82,6 @@ impl CodeCache {
|
|||
}
|
||||
}
|
||||
|
||||
impl CliCodeCache for CodeCache {
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl code_cache::CodeCache for CodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
|
|
3
cli/cache/deno_dir.rs
vendored
3
cli/cache/deno_dir.rs
vendored
|
@ -4,7 +4,6 @@ use std::env;
|
|||
use std::path::PathBuf;
|
||||
|
||||
use deno_cache_dir::DenoDirResolutionError;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::DiskCache;
|
||||
use crate::sys::CliSys;
|
||||
|
@ -14,7 +13,7 @@ use crate::sys::CliSys;
|
|||
pub struct DenoDirProvider {
|
||||
sys: CliSys,
|
||||
maybe_custom_root: Option<PathBuf>,
|
||||
deno_dir: OnceCell<Result<DenoDir, DenoDirResolutionError>>,
|
||||
deno_dir: std::sync::OnceLock<Result<DenoDir, DenoDirResolutionError>>,
|
||||
}
|
||||
|
||||
impl DenoDirProvider {
|
||||
|
|
13
cli/cache/disk_cache.rs
vendored
13
cli/cache/disk_cache.rs
vendored
|
@ -9,11 +9,11 @@ use std::path::Prefix;
|
|||
use std::str;
|
||||
|
||||
use deno_cache_dir::url_to_filename;
|
||||
use deno_cache_dir::CACHE_PERM;
|
||||
use deno_core::url::Host;
|
||||
use deno_core::url::Url;
|
||||
use deno_path_util::fs::atomic_write_file_with_retries;
|
||||
|
||||
use super::CACHE_PERM;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -130,6 +130,9 @@ impl DiskCache {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// ok, testing
|
||||
#[allow(clippy::disallowed_types)]
|
||||
use sys_traits::impls::RealSys;
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
@ -138,7 +141,7 @@ mod tests {
|
|||
fn test_set_get_cache_file() {
|
||||
let temp_dir = TempDir::new();
|
||||
let sub_dir = temp_dir.path().join("sub_dir");
|
||||
let cache = DiskCache::new(CliSys::default(), &sub_dir.to_path_buf());
|
||||
let cache = DiskCache::new(RealSys, &sub_dir.to_path_buf());
|
||||
let path = PathBuf::from("foo/bar.txt");
|
||||
cache.set(&path, b"hello").unwrap();
|
||||
assert_eq!(cache.get(&path).unwrap(), b"hello");
|
||||
|
@ -152,7 +155,7 @@ mod tests {
|
|||
PathBuf::from("/deno_dir/")
|
||||
};
|
||||
|
||||
let cache = DiskCache::new(CliSys::default(), &cache_location);
|
||||
let cache = DiskCache::new(RealSys, &cache_location);
|
||||
|
||||
let mut test_cases = vec![
|
||||
(
|
||||
|
@ -208,7 +211,7 @@ mod tests {
|
|||
} else {
|
||||
"/foo"
|
||||
};
|
||||
let cache = DiskCache::new(CliSys::default(), &PathBuf::from(p));
|
||||
let cache = DiskCache::new(RealSys, &PathBuf::from(p));
|
||||
|
||||
let mut test_cases = vec![
|
||||
(
|
||||
|
@ -256,7 +259,7 @@ mod tests {
|
|||
PathBuf::from("/deno_dir/")
|
||||
};
|
||||
|
||||
let cache = DiskCache::new(CliSys::default(), &cache_location);
|
||||
let cache = DiskCache::new(RealSys, &cache_location);
|
||||
|
||||
let mut test_cases = vec!["unknown://localhost/test.ts"];
|
||||
|
||||
|
|
5
cli/cache/emit.rs
vendored
5
cli/cache/emit.rs
vendored
|
@ -6,6 +6,7 @@ use deno_ast::ModuleSpecifier;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
|
||||
use super::DiskCache;
|
||||
|
||||
|
@ -23,7 +24,7 @@ impl EmitCache {
|
|||
disk_cache,
|
||||
emit_failed_flag: Default::default(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: crate::version::DENO_VERSION_INFO.deno,
|
||||
cli_version: DENO_VERSION_INFO.deno,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -147,7 +148,7 @@ impl EmitFileSerializer {
|
|||
// it's ok to use an insecure hash here because
|
||||
// if someone can change the emit source then they
|
||||
// can also change the version hash
|
||||
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
.write(bytes)
|
||||
// emit should not be re-used between cli versions
|
||||
.write_str(self.cli_version)
|
||||
|
|
2
cli/cache/mod.rs
vendored
2
cli/cache/mod.rs
vendored
|
@ -30,7 +30,6 @@ mod cache_db;
|
|||
mod caches;
|
||||
mod check;
|
||||
mod code_cache;
|
||||
mod common;
|
||||
mod deno_dir;
|
||||
mod disk_cache;
|
||||
mod emit;
|
||||
|
@ -44,7 +43,6 @@ pub use cache_db::CacheDBHash;
|
|||
pub use caches::Caches;
|
||||
pub use check::TypeCheckCache;
|
||||
pub use code_cache::CodeCache;
|
||||
pub use common::FastInsecureHasher;
|
||||
/// Permissions used to save a file in the disk caches.
|
||||
pub use deno_cache_dir::CACHE_PERM;
|
||||
pub use deno_dir::DenoDir;
|
||||
|
|
|
@ -20,9 +20,9 @@ use deno_error::JsErrorBox;
|
|||
use deno_graph::MediaType;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
|
||||
|
@ -112,9 +112,9 @@ impl Emitter {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
module_kind: deno_ast::ModuleKind,
|
||||
module_kind: ModuleKind,
|
||||
source: &Arc<str>,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, EmitParsedSourceHelperError> {
|
||||
// Note: keep this in sync with the sync version below
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, module_kind, source) {
|
||||
|
|
186
cli/factory.rs
186
cli/factory.rs
|
@ -11,6 +11,14 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::FutureExt;
|
||||
use deno_core::FeatureChecker;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::args::get_root_cert_store;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::loader::NpmModuleLoader;
|
||||
use deno_lib::npm::create_npm_process_state_provider;
|
||||
use deno_lib::npm::NpmRegistryReadPermissionChecker;
|
||||
use deno_lib::npm::NpmRegistryReadPermissionCheckerMode;
|
||||
use deno_lib::worker::LibMainWorkerFactory;
|
||||
use deno_lib::worker::LibMainWorkerOptions;
|
||||
use deno_npm_cache::NpmCacheSetting;
|
||||
use deno_resolver::cjs::IsCjsResolutionMode;
|
||||
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
|
||||
|
@ -36,13 +44,10 @@ use node_resolver::analyze::NodeCodeTranslator;
|
|||
use once_cell::sync::OnceCell;
|
||||
|
||||
use crate::args::check_warn_tsconfig;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::CodeCache;
|
||||
|
@ -79,8 +84,6 @@ use crate::npm::CliNpmResolver;
|
|||
use crate::npm::CliNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CliNpmTarballCache;
|
||||
use crate::npm::NpmRegistryReadPermissionChecker;
|
||||
use crate::npm::NpmRegistryReadPermissionCheckerMode;
|
||||
use crate::npm::NpmResolutionInitializer;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
use crate::resolver::CliDenoResolver;
|
||||
|
@ -89,7 +92,6 @@ use crate::resolver::CliNpmReqResolver;
|
|||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::FoundPackageJsonDepFlag;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::standalone::binary::DenoCompileBinaryWriter;
|
||||
use crate::sys::CliSys;
|
||||
use crate::tools::check::TypeChecker;
|
||||
|
@ -811,19 +813,11 @@ impl CliFactory {
|
|||
.services
|
||||
.node_code_translator
|
||||
.get_or_try_init_async(async {
|
||||
let caches = self.caches()?;
|
||||
let node_analysis_cache =
|
||||
NodeAnalysisCache::new(caches.node_analysis_db());
|
||||
let node_resolver = self.node_resolver().await?.clone();
|
||||
let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
|
||||
node_analysis_cache,
|
||||
self.cjs_tracker()?.clone(),
|
||||
self.fs().clone(),
|
||||
Some(self.parsed_source_cache().clone()),
|
||||
);
|
||||
let cjs_code_analyzer = self.create_cjs_code_analyzer()?;
|
||||
|
||||
Ok(Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_analyzer,
|
||||
cjs_code_analyzer,
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
node_resolver,
|
||||
self.npm_resolver().await?.clone(),
|
||||
|
@ -834,6 +828,17 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
fn create_cjs_code_analyzer(&self) -> Result<CliCjsCodeAnalyzer, AnyError> {
|
||||
let caches = self.caches()?;
|
||||
let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db());
|
||||
Ok(CliCjsCodeAnalyzer::new(
|
||||
node_analysis_cache,
|
||||
self.cjs_tracker()?.clone(),
|
||||
self.fs().clone(),
|
||||
Some(self.parsed_source_cache().clone()),
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn npm_req_resolver(
|
||||
&self,
|
||||
) -> Result<&Arc<CliNpmReqResolver>, AnyError> {
|
||||
|
@ -1023,11 +1028,11 @@ impl CliFactory {
|
|||
) -> Result<DenoCompileBinaryWriter, AnyError> {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.create_cjs_code_analyzer()?,
|
||||
self.cjs_tracker()?,
|
||||
self.cli_options()?,
|
||||
self.deno_dir()?,
|
||||
self.emitter()?,
|
||||
self.file_fetcher()?,
|
||||
self.http_client_provider(),
|
||||
self.npm_resolver().await?,
|
||||
self.workspace_resolver().await?.as_ref(),
|
||||
|
@ -1083,7 +1088,34 @@ impl CliFactory {
|
|||
Arc::new(NpmRegistryReadPermissionChecker::new(self.sys(), mode))
|
||||
};
|
||||
|
||||
Ok(CliMainWorkerFactory::new(
|
||||
let module_loader_factory = CliModuleLoaderFactory::new(
|
||||
cli_options,
|
||||
cjs_tracker,
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
self.emitter()?.clone(),
|
||||
in_npm_pkg_checker.clone(),
|
||||
self.main_module_graph_container().await?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
node_code_translator.clone(),
|
||||
node_resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
self.cjs_tracker()?.clone(),
|
||||
node_code_translator.clone(),
|
||||
self.sys(),
|
||||
),
|
||||
npm_registry_permission_checker,
|
||||
npm_req_resolver.clone(),
|
||||
cli_npm_resolver.clone(),
|
||||
self.parsed_source_cache().clone(),
|
||||
self.resolver().await?.clone(),
|
||||
self.sys(),
|
||||
);
|
||||
|
||||
let lib_main_worker_factory = LibMainWorkerFactory::new(
|
||||
self.blob_store().clone(),
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
|
@ -1092,50 +1124,67 @@ impl CliFactory {
|
|||
},
|
||||
self.feature_checker()?.clone(),
|
||||
fs.clone(),
|
||||
maybe_file_watcher_communicator,
|
||||
self.maybe_inspector_server()?.clone(),
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
Box::new(CliModuleLoaderFactory::new(
|
||||
cli_options,
|
||||
cjs_tracker,
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
self.emitter()?.clone(),
|
||||
in_npm_pkg_checker.clone(),
|
||||
self.main_module_graph_container().await?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
node_code_translator.clone(),
|
||||
node_resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
self.cjs_tracker()?.clone(),
|
||||
fs.clone(),
|
||||
node_code_translator.clone(),
|
||||
),
|
||||
npm_registry_permission_checker,
|
||||
npm_req_resolver.clone(),
|
||||
cli_npm_resolver.clone(),
|
||||
self.parsed_source_cache().clone(),
|
||||
self.resolver().await?.clone(),
|
||||
self.sys(),
|
||||
)),
|
||||
Box::new(module_loader_factory),
|
||||
node_resolver.clone(),
|
||||
self.npm_installer_if_managed()?.cloned(),
|
||||
npm_resolver.clone(),
|
||||
create_npm_process_state_provider(npm_resolver),
|
||||
pkg_json_resolver,
|
||||
self.root_cert_store_provider().clone(),
|
||||
self.root_permissions_container()?.clone(),
|
||||
StorageKeyResolver::from_options(cli_options),
|
||||
cli_options.resolve_storage_key_resolver(),
|
||||
self.sys(),
|
||||
self.create_lib_main_worker_options()?,
|
||||
);
|
||||
|
||||
Ok(CliMainWorkerFactory::new(
|
||||
lib_main_worker_factory,
|
||||
maybe_file_watcher_communicator,
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
self.npm_installer_if_managed()?.cloned(),
|
||||
npm_resolver.clone(),
|
||||
self.sys(),
|
||||
cli_options.sub_command().clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
self.cli_options()?.otel_config(),
|
||||
self.cli_options()?.default_npm_caching_strategy(),
|
||||
self.root_permissions_container()?.clone(),
|
||||
))
|
||||
}
|
||||
|
||||
fn create_lib_main_worker_options(
|
||||
&self,
|
||||
) -> Result<LibMainWorkerOptions, AnyError> {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(LibMainWorkerOptions {
|
||||
argv: cli_options.argv().clone(),
|
||||
// This optimization is only available for "run" subcommand
|
||||
// because we need to register new ops for testing and jupyter
|
||||
// integration.
|
||||
skip_op_registration: cli_options.sub_command().is_run(),
|
||||
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
|
||||
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
|
||||
enable_testing_features: cli_options.enable_testing_features(),
|
||||
has_node_modules_dir: cli_options.has_node_modules_dir(),
|
||||
inspect_brk: cli_options.inspect_brk().is_some(),
|
||||
inspect_wait: cli_options.inspect_wait().is_some(),
|
||||
strace_ops: cli_options.strace_ops().clone(),
|
||||
is_inspecting: cli_options.is_inspecting(),
|
||||
location: cli_options.location_flag().clone(),
|
||||
// if the user ran a binary command, we'll need to set process.argv[0]
|
||||
// to be the name of the binary command instead of deno
|
||||
argv0: cli_options
|
||||
.take_binary_npm_command_name()
|
||||
.or(std::env::args().next()),
|
||||
node_debug: std::env::var("NODE_DEBUG").ok(),
|
||||
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
|
||||
seed: cli_options.seed(),
|
||||
unsafely_ignore_certificate_errors: cli_options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
node_ipc: cli_options.node_ipc_fd(),
|
||||
serve_port: cli_options.serve_port(),
|
||||
serve_host: cli_options.serve_host(),
|
||||
otel_config: self.cli_options()?.otel_config(),
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
})
|
||||
}
|
||||
|
||||
fn create_cli_main_worker_options(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerOptions, AnyError> {
|
||||
|
@ -1167,37 +1216,10 @@ impl CliFactory {
|
|||
};
|
||||
|
||||
Ok(CliMainWorkerOptions {
|
||||
argv: cli_options.argv().clone(),
|
||||
// This optimization is only available for "run" subcommand
|
||||
// because we need to register new ops for testing and jupyter
|
||||
// integration.
|
||||
skip_op_registration: cli_options.sub_command().is_run(),
|
||||
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
|
||||
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
|
||||
enable_testing_features: cli_options.enable_testing_features(),
|
||||
has_node_modules_dir: cli_options.has_node_modules_dir(),
|
||||
hmr: cli_options.has_hmr(),
|
||||
inspect_brk: cli_options.inspect_brk().is_some(),
|
||||
inspect_wait: cli_options.inspect_wait().is_some(),
|
||||
strace_ops: cli_options.strace_ops().clone(),
|
||||
is_inspecting: cli_options.is_inspecting(),
|
||||
location: cli_options.location_flag().clone(),
|
||||
// if the user ran a binary command, we'll need to set process.argv[0]
|
||||
// to be the name of the binary command instead of deno
|
||||
argv0: cli_options
|
||||
.take_binary_npm_command_name()
|
||||
.or(std::env::args().next()),
|
||||
node_debug: std::env::var("NODE_DEBUG").ok(),
|
||||
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
|
||||
seed: cli_options.seed(),
|
||||
unsafely_ignore_certificate_errors: cli_options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
needs_test_modules: cli_options.sub_command().needs_test(),
|
||||
create_hmr_runner,
|
||||
create_coverage_collector,
|
||||
node_ipc: cli_options.node_ipc_fd(),
|
||||
serve_port: cli_options.serve_port(),
|
||||
serve_host: cli_options.serve_host(),
|
||||
default_npm_caching_strategy: cli_options.default_npm_caching_strategy(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use deno_core::serde_json;
|
|||
use deno_core::url::Url;
|
||||
use deno_error::JsError;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_runtime::deno_fetch;
|
||||
use deno_runtime::deno_fetch::create_http_client;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
|
@ -28,7 +29,6 @@ use http_body_util::BodyExt;
|
|||
use thiserror::Error;
|
||||
|
||||
use crate::util::progress_bar::UpdateGuard;
|
||||
use crate::version;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum SendError {
|
||||
|
@ -79,7 +79,7 @@ impl HttpClientProvider {
|
|||
Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())),
|
||||
Entry::Vacant(entry) => {
|
||||
let client = create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
root_cert_store: match &self.root_cert_store_provider {
|
||||
Some(provider) => Some(provider.get_or_try_init()?.clone()),
|
||||
|
@ -481,7 +481,7 @@ mod test {
|
|||
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![std::fs::read(
|
||||
test_util::testdata_path().join("tls/RootCA.pem"),
|
||||
|
@ -525,7 +525,7 @@ mod test {
|
|||
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
|
@ -566,7 +566,7 @@ mod test {
|
|||
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
root_cert_store: Some(root_cert_store),
|
||||
..Default::default()
|
||||
|
@ -587,7 +587,7 @@ mod test {
|
|||
.unwrap();
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![std::fs::read(
|
||||
test_util::testdata_path()
|
||||
|
@ -620,7 +620,7 @@ mod test {
|
|||
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![std::fs::read(
|
||||
test_util::testdata_path()
|
||||
|
@ -661,7 +661,7 @@ mod test {
|
|||
.unwrap();
|
||||
let client = HttpClient::new(
|
||||
create_http_client(
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
DENO_VERSION_INFO.user_agent,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![std::fs::read(
|
||||
test_util::testdata_path()
|
||||
|
|
|
@ -1,18 +1,5 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub fn main() {
|
||||
let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"];
|
||||
|
||||
if !cfg!(debug_assertions) {
|
||||
args.push("--release");
|
||||
}
|
||||
|
||||
args.push("--");
|
||||
|
||||
// If any args were passed to this process, pass them through to the child
|
||||
let orig_args = std::env::args().skip(1).collect::<Vec<_>>();
|
||||
let orig_args: Vec<&str> =
|
||||
orig_args.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
|
||||
args.extend(orig_args);
|
||||
|
||||
test_util::spawn::exec_replace("cargo", &args).unwrap();
|
||||
// this file exists to cause the executable to be built when running cargo test
|
||||
}
|
||||
|
|
13
cli/js.rs
13
cli/js.rs
|
@ -2,18 +2,7 @@
|
|||
|
||||
use log::debug;
|
||||
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
static CLI_SNAPSHOT: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
|
||||
|
||||
pub fn deno_isolate_init() -> Option<&'static [u8]> {
|
||||
debug!("Deno isolate init with snapshots.");
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
Some(CLI_SNAPSHOT)
|
||||
}
|
||||
#[cfg(feature = "hmr")]
|
||||
{
|
||||
None
|
||||
}
|
||||
deno_snapshots::CLI_SNAPSHOT
|
||||
}
|
||||
|
|
46
cli/lib/Cargo.toml
Normal file
46
cli/lib/Cargo.toml
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_lib"
|
||||
version = "0.2.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
readme = "README.md"
|
||||
repository.workspace = true
|
||||
description = "Shared code between the Deno CLI and denort"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
capacity_builder.workspace = true
|
||||
deno_config.workspace = true
|
||||
deno_error.workspace = true
|
||||
deno_fs = { workspace = true, features = ["sync_fs"] }
|
||||
deno_media_type.workspace = true
|
||||
deno_node = { workspace = true, features = ["sync_fs"] }
|
||||
deno_npm.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver = { workspace = true, features = ["sync"] }
|
||||
deno_runtime.workspace = true
|
||||
deno_semver.workspace = true
|
||||
deno_terminal.workspace = true
|
||||
env_logger = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
indexmap.workspace = true
|
||||
libsui.workspace = true
|
||||
log.workspace = true
|
||||
node_resolver = { workspace = true, features = ["sync"] }
|
||||
parking_lot.workspace = true
|
||||
ring.workspace = true
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json.workspace = true
|
||||
sys_traits = { workspace = true, features = ["getrandom"] }
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
twox-hash.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
test_util.workspace = true
|
4
cli/lib/README.md
Normal file
4
cli/lib/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# deno_lib
|
||||
|
||||
This crate contains the shared code between the Deno CLI and denort. It is
|
||||
highly unstable.
|
199
cli/lib/args.rs
Normal file
199
cli/lib/args.rs
Normal file
|
@ -0,0 +1,199 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::io::BufReader;
|
||||
use std::io::Cursor;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
|
||||
use deno_runtime::deno_tls::rustls;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::rustls_pemfile;
|
||||
use deno_runtime::deno_tls::webpki_roots;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
pub fn npm_pkg_req_ref_to_binary_command(
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> String {
|
||||
req_ref
|
||||
.sub_path()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| req_ref.req().name.to_string())
|
||||
}
|
||||
|
||||
pub fn has_trace_permissions_enabled() -> bool {
|
||||
has_flag_env_var("DENO_TRACE_PERMISSIONS")
|
||||
}
|
||||
|
||||
pub fn has_flag_env_var(name: &str) -> bool {
|
||||
let value = std::env::var(name);
|
||||
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CaData {
|
||||
/// The string is a file path
|
||||
File(String),
|
||||
/// This variant is not exposed as an option in the CLI, it is used internally
|
||||
/// for standalone binaries.
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone, deno_error::JsError)]
|
||||
#[class(generic)]
|
||||
pub enum RootCertStoreLoadError {
|
||||
#[error(
|
||||
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
|
||||
)]
|
||||
UnknownStore(String),
|
||||
#[error("Unable to add pem file to certificate store: {0}")]
|
||||
FailedAddPemFile(String),
|
||||
#[error("Failed opening CA file: {0}")]
|
||||
CaFileOpenError(String),
|
||||
}
|
||||
|
||||
/// Create and populate a root cert store based on the passed options and
|
||||
/// environment.
|
||||
pub fn get_root_cert_store(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Result<RootCertStore, RootCertStoreLoadError> {
|
||||
let mut root_cert_store = RootCertStore::empty();
|
||||
let ca_stores: Vec<String> = maybe_ca_stores
|
||||
.or_else(|| {
|
||||
let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?;
|
||||
Some(
|
||||
env_ca_store
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| vec!["mozilla".to_string()]);
|
||||
|
||||
for store in ca_stores.iter() {
|
||||
match store.as_str() {
|
||||
"mozilla" => {
|
||||
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
|
||||
}
|
||||
"system" => {
|
||||
let roots = load_native_certs().expect("could not load platform certs");
|
||||
for root in roots {
|
||||
if let Err(err) = root_cert_store
|
||||
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
|
||||
{
|
||||
log::error!(
|
||||
"{}",
|
||||
colors::yellow(&format!(
|
||||
"Unable to add system certificate to certificate store: {:?}",
|
||||
err
|
||||
))
|
||||
);
|
||||
let hex_encoded_root = faster_hex::hex_string(&root.0);
|
||||
log::error!("{}", colors::gray(&hex_encoded_root));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ca_data =
|
||||
maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File));
|
||||
if let Some(ca_data) = ca_data {
|
||||
let result = match ca_data {
|
||||
CaData::File(ca_file) => {
|
||||
let ca_file = if let Some(root) = &maybe_root_path {
|
||||
root.join(&ca_file)
|
||||
} else {
|
||||
PathBuf::from(ca_file)
|
||||
};
|
||||
let certfile = std::fs::File::open(ca_file).map_err(|err| {
|
||||
RootCertStoreLoadError::CaFileOpenError(err.to_string())
|
||||
})?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
CaData::Bytes(data) => {
|
||||
let mut reader = BufReader::new(Cursor::new(data));
|
||||
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(certs) => {
|
||||
root_cert_store.add_parsable_certificates(certs);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(root_cert_store)
|
||||
}
|
||||
|
||||
/// State provided to the process via an environment variable.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NpmProcessState {
|
||||
pub kind: NpmProcessStateKind,
|
||||
pub local_node_modules_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum NpmProcessStateKind {
|
||||
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
|
||||
Byonm,
|
||||
}
|
||||
|
||||
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
|
||||
LazyLock::new(|| {
|
||||
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
|
||||
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
|
||||
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
|
||||
let fd = fd.parse::<usize>().ok()?;
|
||||
let mut file = {
|
||||
use deno_runtime::deno_io::FromRawIoHandle;
|
||||
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
|
||||
};
|
||||
let mut buf = Vec::new();
|
||||
// seek to beginning. after the file is written the position will be inherited by this subprocess,
|
||||
// and also this file might have been read before
|
||||
file.seek(std::io::SeekFrom::Start(0)).unwrap();
|
||||
file
|
||||
.read_to_end(&mut buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!("failed to read npm process state from fd {fd}: {e}");
|
||||
})
|
||||
.ok()?;
|
||||
let state: NpmProcessState = serde_json::from_slice(&buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!(
|
||||
"failed to deserialize npm process state: {e} {}",
|
||||
String::from_utf8_lossy(&buf)
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
Some(state)
|
||||
});
|
||||
|
||||
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct UnstableConfig {
|
||||
// TODO(bartlomieju): remove in Deno 2.5
|
||||
pub legacy_flag_enabled: bool, // --unstable
|
||||
pub bare_node_builtins: bool,
|
||||
pub detect_cjs: bool,
|
||||
pub sloppy_imports: bool,
|
||||
pub npm_lazy_caching: bool,
|
||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
42
cli/lib/build.rs
Normal file
42
cli/lib/build.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
fn main() {
|
||||
// todo(dsherret): remove this after Deno 2.2.0 is published and then
|
||||
// align the version of this crate with Deno then. We need to wait because
|
||||
// there was previously a deno_lib 2.2.0 published (https://crates.io/crates/deno_lib/versions)
|
||||
let version_path = std::path::Path::new(".").join("version.txt");
|
||||
println!("cargo:rerun-if-changed={}", version_path.display());
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let text = std::fs::read_to_string(version_path).unwrap();
|
||||
println!("cargo:rustc-env=DENO_VERSION={}", text);
|
||||
|
||||
let commit_hash = git_commit_hash();
|
||||
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash);
|
||||
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
|
||||
println!(
|
||||
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
|
||||
&commit_hash[..7]
|
||||
);
|
||||
}
|
||||
|
||||
fn git_commit_hash() -> String {
|
||||
if let Ok(output) = std::process::Command::new("git")
|
||||
.arg("rev-list")
|
||||
.arg("-1")
|
||||
.arg("HEAD")
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
std::str::from_utf8(&output.stdout[..40])
|
||||
.unwrap()
|
||||
.to_string()
|
||||
} else {
|
||||
// When not in git repository
|
||||
// (e.g. when the user install by `cargo install deno`)
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
} else {
|
||||
// When there is no git command for some reason
|
||||
"UNKNOWN".to_string()
|
||||
}
|
||||
}
|
48
cli/lib/clippy.toml
Normal file
48
cli/lib/clippy.toml
Normal file
|
@ -0,0 +1,48 @@
|
|||
disallowed-methods = [
|
||||
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" },
|
||||
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead" },
|
||||
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead" },
|
||||
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" },
|
||||
]
|
11
cli/lib/lib.rs
Normal file
11
cli/lib/lib.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub mod args;
|
||||
pub mod loader;
|
||||
pub mod npm;
|
||||
pub mod shared;
|
||||
pub mod standalone;
|
||||
pub mod sys;
|
||||
pub mod util;
|
||||
pub mod version;
|
||||
pub mod worker;
|
213
cli/lib/loader.rs
Normal file
213
cli/lib/loader.rs
Normal file
|
@ -0,0 +1,213 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_media_type::MediaType;
|
||||
use deno_resolver::cjs::CjsTracker;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_runtime::deno_core::ModuleSourceCode;
|
||||
use node_resolver::analyze::CjsCodeAnalyzer;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::IsBuiltInNodeModuleChecker;
|
||||
use node_resolver::NpmPackageFolderResolver;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use crate::sys::DenoLibSys;
|
||||
use crate::util::text_encoding::from_utf8_lossy_cow;
|
||||
|
||||
pub struct ModuleCodeStringSource {
|
||||
pub code: ModuleSourceCode,
|
||||
pub found_url: Url,
|
||||
pub media_type: MediaType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
#[class(type)]
|
||||
#[error("{media_type} files are not supported in npm packages: {specifier}")]
|
||||
pub struct NotSupportedKindInNpmError {
|
||||
pub media_type: MediaType,
|
||||
pub specifier: Url,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
pub enum NpmModuleLoadError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NotSupportedKindInNpm(#[from] NotSupportedKindInNpmError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
|
||||
#[class(inherit)]
|
||||
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
|
||||
UnableToLoad {
|
||||
file_path: PathBuf,
|
||||
maybe_referrer: Option<Url>,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: std::io::Error,
|
||||
},
|
||||
#[class(inherit)]
|
||||
#[error(
|
||||
"{}",
|
||||
format_dir_import_message(file_path, maybe_referrer, suggestion)
|
||||
)]
|
||||
DirImport {
|
||||
file_path: PathBuf,
|
||||
maybe_referrer: Option<Url>,
|
||||
suggestion: Option<&'static str>,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: std::io::Error,
|
||||
},
|
||||
}
|
||||
|
||||
fn format_dir_import_message(
|
||||
file_path: &std::path::Path,
|
||||
maybe_referrer: &Option<Url>,
|
||||
suggestion: &Option<&'static str>,
|
||||
) -> String {
|
||||
// directory imports are not allowed when importing from an
|
||||
// ES module, so provide the user with a helpful error message
|
||||
let dir_path = file_path;
|
||||
let mut msg = "Directory import ".to_string();
|
||||
msg.push_str(&dir_path.to_string_lossy());
|
||||
if let Some(referrer) = maybe_referrer {
|
||||
msg.push_str(" is not supported resolving import from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
if let Some(entrypoint_name) = suggestion {
|
||||
msg.push_str("\nDid you mean to import ");
|
||||
msg.push_str(entrypoint_name);
|
||||
msg.push_str(" within the directory?");
|
||||
}
|
||||
}
|
||||
msg
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NpmModuleLoader<
|
||||
TCjsCodeAnalyzer: CjsCodeAnalyzer,
|
||||
TInNpmPackageChecker: InNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver: NpmPackageFolderResolver,
|
||||
TSys: DenoLibSys,
|
||||
> {
|
||||
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
|
||||
sys: TSys,
|
||||
node_code_translator: Arc<
|
||||
NodeCodeTranslator<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
impl<
|
||||
TCjsCodeAnalyzer: CjsCodeAnalyzer,
|
||||
TInNpmPackageChecker: InNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver: NpmPackageFolderResolver,
|
||||
TSys: DenoLibSys,
|
||||
>
|
||||
NpmModuleLoader<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>
|
||||
{
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
|
||||
node_code_translator: Arc<
|
||||
NodeCodeTranslator<
|
||||
TCjsCodeAnalyzer,
|
||||
TInNpmPackageChecker,
|
||||
TIsBuiltInNodeModuleChecker,
|
||||
TNpmPackageFolderResolver,
|
||||
TSys,
|
||||
>,
|
||||
>,
|
||||
sys: TSys,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
node_code_translator,
|
||||
sys,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
maybe_referrer: Option<&Url>,
|
||||
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
|
||||
let file_path = deno_path_util::url_to_file_path(specifier)?;
|
||||
let code = self.sys.fs_read(&file_path).map_err(|source| {
|
||||
if self.sys.fs_is_dir_no_err(&file_path) {
|
||||
let suggestion = ["index.mjs", "index.js", "index.cjs"]
|
||||
.into_iter()
|
||||
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
|
||||
NpmModuleLoadError::DirImport {
|
||||
file_path,
|
||||
maybe_referrer: maybe_referrer.cloned(),
|
||||
suggestion,
|
||||
source,
|
||||
}
|
||||
} else {
|
||||
NpmModuleLoadError::UnableToLoad {
|
||||
file_path,
|
||||
maybe_referrer: maybe_referrer.cloned(),
|
||||
source,
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
if media_type.is_emittable() {
|
||||
return Err(NpmModuleLoadError::NotSupportedKindInNpm(
|
||||
NotSupportedKindInNpmError {
|
||||
media_type,
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
let code = from_utf8_lossy_cow(code);
|
||||
ModuleSourceCode::String(
|
||||
self
|
||||
.node_code_translator
|
||||
.translate_cjs_to_esm(specifier, Some(code))
|
||||
.await?
|
||||
.into_owned()
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
// esm and json code is untouched
|
||||
ModuleSourceCode::Bytes(match code {
|
||||
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
|
||||
Cow::Borrowed(bytes) => bytes.into(),
|
||||
})
|
||||
};
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
code,
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
})
|
||||
}
|
||||
}
|
80
cli/lib/npm/mod.rs
Normal file
80
cli/lib/npm/mod.rs
Normal file
|
@ -0,0 +1,80 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
mod permission_checker;
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ManagedNpmResolverRc;
|
||||
use deno_resolver::npm::NpmResolver;
|
||||
use deno_runtime::deno_process::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_process::NpmProcessStateProviderRc;
|
||||
pub use permission_checker::NpmRegistryReadPermissionChecker;
|
||||
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
|
||||
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::sys::DenoLibSys;
|
||||
|
||||
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
|
||||
npm_resolver: &NpmResolver<TSys>,
|
||||
) -> NpmProcessStateProviderRc {
|
||||
match npm_resolver {
|
||||
NpmResolver::Byonm(byonm_npm_resolver) => {
|
||||
Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone()))
|
||||
}
|
||||
NpmResolver::Managed(managed_npm_resolver) => {
|
||||
Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn npm_process_state(
|
||||
snapshot: ValidSerializedNpmResolutionSnapshot,
|
||||
node_modules_path: Option<&Path>,
|
||||
) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
|
||||
local_node_modules_path: node_modules_path
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
|
||||
pub ManagedNpmResolverRc<TSys>,
|
||||
);
|
||||
|
||||
impl<TSys: DenoLibSys> NpmProcessStateProvider
|
||||
for ManagedNpmProcessStateProvider<TSys>
|
||||
{
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
npm_process_state(
|
||||
self.0.resolution().serialized_valid_snapshot(),
|
||||
self.0.root_node_modules_path(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ByonmNpmProcessStateProvider<TSys: DenoLibSys>(
|
||||
pub Arc<ByonmNpmResolver<TSys>>,
|
||||
);
|
||||
|
||||
impl<TSys: DenoLibSys> NpmProcessStateProvider
|
||||
for ByonmNpmProcessStateProvider<TSys>
|
||||
{
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: self
|
||||
.0
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
|
@ -6,12 +6,11 @@ use std::io::ErrorKind;
|
|||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use sys_traits::FsCanonicalize;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::sys::CliSys;
|
||||
use crate::sys::DenoLibSys;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NpmRegistryReadPermissionCheckerMode {
|
||||
|
@ -21,8 +20,8 @@ pub enum NpmRegistryReadPermissionCheckerMode {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NpmRegistryReadPermissionChecker {
|
||||
sys: CliSys,
|
||||
pub struct NpmRegistryReadPermissionChecker<TSys: DenoLibSys> {
|
||||
sys: TSys,
|
||||
cache: Mutex<HashMap<PathBuf, PathBuf>>,
|
||||
mode: NpmRegistryReadPermissionCheckerMode,
|
||||
}
|
||||
|
@ -37,8 +36,8 @@ struct EnsureRegistryReadPermissionError {
|
|||
source: std::io::Error,
|
||||
}
|
||||
|
||||
impl NpmRegistryReadPermissionChecker {
|
||||
pub fn new(sys: CliSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
|
||||
impl<TSys: DenoLibSys> NpmRegistryReadPermissionChecker<TSys> {
|
||||
pub fn new(sys: TSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
|
||||
Self {
|
||||
sys,
|
||||
cache: Default::default(),
|
|
@ -1,8 +1,11 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
/// This module is shared between build script and the binaries. Use it sparsely.
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Unrecognized release channel: {0}")]
|
||||
pub struct UnrecognizedReleaseChannelError(pub String);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ReleaseChannel {
|
||||
|
@ -50,13 +53,17 @@ impl ReleaseChannel {
|
|||
// NOTE(bartlomieju): do not ever change these values, tools like `patchver`
|
||||
// rely on them.
|
||||
#[allow(unused)]
|
||||
pub fn deserialize(str_: &str) -> Result<Self, AnyError> {
|
||||
pub fn deserialize(
|
||||
str_: &str,
|
||||
) -> Result<Self, UnrecognizedReleaseChannelError> {
|
||||
Ok(match str_ {
|
||||
"stable" => Self::Stable,
|
||||
"canary" => Self::Canary,
|
||||
"rc" => Self::Rc,
|
||||
"lts" => Self::Lts,
|
||||
unknown => bail!("Unrecognized release channel: {}", unknown),
|
||||
unknown => {
|
||||
return Err(UnrecognizedReleaseChannelError(unknown.to_string()))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
389
cli/lib/standalone/binary.rs
Normal file
389
cli/lib/standalone/binary.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_media_type::MediaType;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::deno_telemetry::OtelConfig;
|
||||
use deno_semver::Version;
|
||||
use indexmap::IndexMap;
|
||||
use node_resolver::analyze::CjsAnalysisExports;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use url::Url;
|
||||
|
||||
use super::virtual_fs::FileSystemCaseSensitivity;
|
||||
use crate::args::UnstableConfig;
|
||||
|
||||
pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
pub trait DenoRtDeserializable<'a>: Sized {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)>;
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for Cow<'a, [u8]> {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, Cow::Borrowed(data)))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DenoRtSerializable<'a> {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
Managed {
|
||||
/// Relative path for the node_modules directory in the vfs.
|
||||
node_modules_dir: Option<String>,
|
||||
},
|
||||
Byonm {
|
||||
root_node_modules_dir: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolverImportMap {
|
||||
pub specifier: String,
|
||||
pub json: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SerializedResolverWorkspaceJsrPackage {
|
||||
pub relative_base: String,
|
||||
pub name: String,
|
||||
pub version: Option<Version>,
|
||||
pub exports: IndexMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolver {
|
||||
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
|
||||
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
|
||||
pub package_jsons: BTreeMap<String, serde_json::Value>,
|
||||
pub pkg_json_resolution: PackageJsonDepResolution,
|
||||
}
|
||||
|
||||
// Note: Don't use hashmaps/hashsets. Ensure the serialization
|
||||
// is deterministic.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub seed: Option<u64>,
|
||||
pub code_cache_key: Option<u64>,
|
||||
pub permissions: PermissionsOptions,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
pub log_level: Option<log::Level>,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub env_vars_from_env_file: IndexMap<String, String>,
|
||||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub unstable_config: UnstableConfig,
|
||||
pub otel_config: OtelConfig,
|
||||
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct SpecifierId(u32);
|
||||
|
||||
impl SpecifierId {
|
||||
pub fn new(id: u32) -> Self {
|
||||
Self(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> capacity_builder::BytesAppendable<'a> for SpecifierId {
|
||||
fn append_to_builder<TBytes: capacity_builder::BytesType>(
|
||||
self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
|
||||
) {
|
||||
builder.append_le(self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtSerializable<'a> for SpecifierId {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
builder.append_le(self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for SpecifierId {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, id) = read_u32(input)?;
|
||||
Ok((input, Self(id)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum CjsExportAnalysisEntry {
|
||||
Esm,
|
||||
Cjs(CjsAnalysisExports),
|
||||
}
|
||||
|
||||
const HAS_TRANSPILED_FLAG: u8 = 1 << 0;
|
||||
const HAS_SOURCE_MAP_FLAG: u8 = 1 << 1;
|
||||
const HAS_CJS_EXPORT_ANALYSIS_FLAG: u8 = 1 << 2;
|
||||
|
||||
pub struct RemoteModuleEntry<'a> {
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'a, [u8]>,
|
||||
pub maybe_transpiled: Option<Cow<'a, [u8]>>,
|
||||
pub maybe_source_map: Option<Cow<'a, [u8]>>,
|
||||
pub maybe_cjs_export_analysis: Option<Cow<'a, [u8]>>,
|
||||
}
|
||||
|
||||
impl<'a> DenoRtSerializable<'a> for RemoteModuleEntry<'a> {
|
||||
fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
fn append_maybe_data<'a>(
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
maybe_data: Option<&'a [u8]>,
|
||||
) {
|
||||
if let Some(data) = maybe_data {
|
||||
builder.append_le(data.len() as u32);
|
||||
builder.append(data);
|
||||
}
|
||||
}
|
||||
|
||||
let mut has_data_flags = 0;
|
||||
if self.maybe_transpiled.is_some() {
|
||||
has_data_flags |= HAS_TRANSPILED_FLAG;
|
||||
}
|
||||
if self.maybe_source_map.is_some() {
|
||||
has_data_flags |= HAS_SOURCE_MAP_FLAG;
|
||||
}
|
||||
if self.maybe_cjs_export_analysis.is_some() {
|
||||
has_data_flags |= HAS_CJS_EXPORT_ANALYSIS_FLAG;
|
||||
}
|
||||
builder.append(serialize_media_type(self.media_type));
|
||||
builder.append_le(self.data.len() as u32);
|
||||
builder.append(self.data.as_ref());
|
||||
builder.append(has_data_flags);
|
||||
append_maybe_data(builder, self.maybe_transpiled.as_deref());
|
||||
append_maybe_data(builder, self.maybe_source_map.as_deref());
|
||||
append_maybe_data(builder, self.maybe_cjs_export_analysis.as_deref());
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for RemoteModuleEntry<'a> {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn deserialize_data_if_has_flag(
|
||||
input: &[u8],
|
||||
has_data_flags: u8,
|
||||
flag: u8,
|
||||
) -> std::io::Result<(&[u8], Option<Cow<[u8]>>)> {
|
||||
if has_data_flags & flag != 0 {
|
||||
let (input, bytes) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, Some(Cow::Borrowed(bytes))))
|
||||
} else {
|
||||
Ok((input, None))
|
||||
}
|
||||
}
|
||||
|
||||
let (input, media_type) = MediaType::deserialize(input)?;
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
let (input, has_data_flags) = read_u8(input)?;
|
||||
let (input, maybe_transpiled) =
|
||||
deserialize_data_if_has_flag(input, has_data_flags, HAS_TRANSPILED_FLAG)?;
|
||||
let (input, maybe_source_map) =
|
||||
deserialize_data_if_has_flag(input, has_data_flags, HAS_SOURCE_MAP_FLAG)?;
|
||||
let (input, maybe_cjs_export_analysis) = deserialize_data_if_has_flag(
|
||||
input,
|
||||
has_data_flags,
|
||||
HAS_CJS_EXPORT_ANALYSIS_FLAG,
|
||||
)?;
|
||||
Ok((
|
||||
input,
|
||||
Self {
|
||||
media_type,
|
||||
data: Cow::Borrowed(data),
|
||||
maybe_transpiled,
|
||||
maybe_source_map,
|
||||
maybe_cjs_export_analysis,
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_media_type(media_type: MediaType) -> u8 {
|
||||
match media_type {
|
||||
MediaType::JavaScript => 0,
|
||||
MediaType::Jsx => 1,
|
||||
MediaType::Mjs => 2,
|
||||
MediaType::Cjs => 3,
|
||||
MediaType::TypeScript => 4,
|
||||
MediaType::Mts => 5,
|
||||
MediaType::Cts => 6,
|
||||
MediaType::Dts => 7,
|
||||
MediaType::Dmts => 8,
|
||||
MediaType::Dcts => 9,
|
||||
MediaType::Tsx => 10,
|
||||
MediaType::Json => 11,
|
||||
MediaType::Wasm => 12,
|
||||
MediaType::Css => 13,
|
||||
MediaType::SourceMap => 14,
|
||||
MediaType::Unknown => 15,
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DenoRtDeserializable<'a> for MediaType {
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, value) = read_u8(input)?;
|
||||
let value = match value {
|
||||
0 => MediaType::JavaScript,
|
||||
1 => MediaType::Jsx,
|
||||
2 => MediaType::Mjs,
|
||||
3 => MediaType::Cjs,
|
||||
4 => MediaType::TypeScript,
|
||||
5 => MediaType::Mts,
|
||||
6 => MediaType::Cts,
|
||||
7 => MediaType::Dts,
|
||||
8 => MediaType::Dmts,
|
||||
9 => MediaType::Dcts,
|
||||
10 => MediaType::Tsx,
|
||||
11 => MediaType::Json,
|
||||
12 => MediaType::Wasm,
|
||||
13 => MediaType::Css,
|
||||
14 => MediaType::SourceMap,
|
||||
15 => MediaType::Unknown,
|
||||
value => {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
format!("Unknown media type value: {value}"),
|
||||
))
|
||||
}
|
||||
};
|
||||
Ok((input, value))
|
||||
}
|
||||
}
|
||||
|
||||
/// Data stored keyed by specifier.
|
||||
pub struct SpecifierDataStore<TData> {
|
||||
data: IndexMap<SpecifierId, TData>,
|
||||
}
|
||||
|
||||
impl<TData> Default for SpecifierDataStore<TData> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
data: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<TData> SpecifierDataStore<TData> {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
data: IndexMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (SpecifierId, &TData)> {
|
||||
self.data.iter().map(|(k, v)| (*k, v))
|
||||
}
|
||||
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn contains(&self, specifier: SpecifierId) -> bool {
|
||||
self.data.contains_key(&specifier)
|
||||
}
|
||||
|
||||
pub fn add(&mut self, specifier: SpecifierId, value: TData) {
|
||||
self.data.insert(specifier, value);
|
||||
}
|
||||
|
||||
pub fn get(&self, specifier: SpecifierId) -> Option<&TData> {
|
||||
self.data.get(&specifier)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, TData> SpecifierDataStore<TData>
|
||||
where
|
||||
TData: DenoRtSerializable<'a> + 'a,
|
||||
{
|
||||
pub fn serialize(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, Vec<u8>>,
|
||||
) {
|
||||
builder.append_le(self.len() as u32);
|
||||
for (specifier, value) in self.iter() {
|
||||
builder.append(specifier);
|
||||
value.serialize(builder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, TData> DenoRtDeserializable<'a> for SpecifierDataStore<TData>
|
||||
where
|
||||
TData: DenoRtDeserializable<'a>,
|
||||
{
|
||||
fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let mut data = IndexMap::with_capacity(len);
|
||||
let mut input = input;
|
||||
for _ in 0..len {
|
||||
let (new_input, specifier) = SpecifierId::deserialize(input)?;
|
||||
let (new_input, value) = TData::deserialize(new_input)?;
|
||||
data.insert(specifier, value);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, Self { data }))
|
||||
}
|
||||
}
|
||||
|
||||
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let (input, data) = read_bytes(input, len)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
|
||||
read_u32(input).map(|(input, len)| (input, len as usize))
|
||||
}
|
||||
|
||||
fn read_u32(input: &[u8]) -> std::io::Result<(&[u8], u32)> {
|
||||
let (input, len_bytes) = read_bytes(input, 4)?;
|
||||
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
|
||||
Ok((input, len))
|
||||
}
|
||||
|
||||
fn read_u8(input: &[u8]) -> std::io::Result<(&[u8], u8)> {
|
||||
check_has_len(input, 1)?;
|
||||
Ok((&input[1..], input[0]))
|
||||
}
|
||||
|
||||
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
|
||||
check_has_len(input, len)?;
|
||||
let (len_bytes, input) = input.split_at(len);
|
||||
Ok((input, len_bytes))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
|
||||
if input.len() < len {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Unexpected end of data",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
4
cli/lib/standalone/mod.rs
Normal file
4
cli/lib/standalone/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub mod binary;
|
||||
pub mod virtual_fs;
|
999
cli/lib/standalone/virtual_fs.rs
Normal file
999
cli/lib/standalone/virtual_fs.rs
Normal file
|
@ -0,0 +1,999 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::strip_unc_prefix;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_core::anyhow::bail;
|
||||
use deno_runtime::deno_core::anyhow::Context;
|
||||
use deno_runtime::deno_core::error::AnyError;
|
||||
use indexmap::IndexSet;
|
||||
use serde::de;
|
||||
use serde::de::SeqAccess;
|
||||
use serde::de::Visitor;
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::Serializer;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum WindowsSystemRootablePath {
|
||||
/// The root of the system above any drive letters.
|
||||
WindowSystemRoot,
|
||||
Path(PathBuf),
|
||||
}
|
||||
|
||||
impl WindowsSystemRootablePath {
|
||||
pub fn root_for_current_os() -> Self {
|
||||
if cfg!(windows) {
|
||||
WindowsSystemRootablePath::WindowSystemRoot
|
||||
} else {
|
||||
WindowsSystemRootablePath::Path(PathBuf::from("/"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&self, name_component: &str) -> PathBuf {
|
||||
// this method doesn't handle multiple components
|
||||
debug_assert!(
|
||||
!name_component.contains('\\'),
|
||||
"Invalid component: {}",
|
||||
name_component
|
||||
);
|
||||
debug_assert!(
|
||||
!name_component.contains('/'),
|
||||
"Invalid component: {}",
|
||||
name_component
|
||||
);
|
||||
|
||||
match self {
|
||||
WindowsSystemRootablePath::WindowSystemRoot => {
|
||||
// windows drive letter
|
||||
PathBuf::from(&format!("{}\\", name_component))
|
||||
}
|
||||
WindowsSystemRootablePath::Path(path) => path.join(name_component),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum FileSystemCaseSensitivity {
|
||||
#[serde(rename = "s")]
|
||||
Sensitive,
|
||||
#[serde(rename = "i")]
|
||||
Insensitive,
|
||||
}
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct VirtualDirectoryEntries(Vec<VfsEntry>);
|
||||
|
||||
impl VirtualDirectoryEntries {
|
||||
pub fn new(mut entries: Vec<VfsEntry>) -> Self {
|
||||
// needs to be sorted by name
|
||||
entries.sort_by(|a, b| a.name().cmp(b.name()));
|
||||
Self(entries)
|
||||
}
|
||||
|
||||
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, VfsEntry> {
|
||||
self.0.iter_mut()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> std::slice::Iter<'_, VfsEntry> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
pub fn take_inner(&mut self) -> Vec<VfsEntry> {
|
||||
std::mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn get_by_name(
|
||||
&self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Option<&VfsEntry> {
|
||||
self
|
||||
.binary_search(name, case_sensitivity)
|
||||
.ok()
|
||||
.map(|index| &self.0[index])
|
||||
}
|
||||
|
||||
pub fn get_mut_by_name(
|
||||
&mut self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Option<&mut VfsEntry> {
|
||||
self
|
||||
.binary_search(name, case_sensitivity)
|
||||
.ok()
|
||||
.map(|index| &mut self.0[index])
|
||||
}
|
||||
|
||||
pub fn get_mut_by_index(&mut self, index: usize) -> Option<&mut VfsEntry> {
|
||||
self.0.get_mut(index)
|
||||
}
|
||||
|
||||
pub fn get_by_index(&self, index: usize) -> Option<&VfsEntry> {
|
||||
self.0.get(index)
|
||||
}
|
||||
|
||||
pub fn binary_search(
|
||||
&self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> Result<usize, usize> {
|
||||
match case_sensitivity {
|
||||
FileSystemCaseSensitivity::Sensitive => {
|
||||
self.0.binary_search_by(|e| e.name().cmp(name))
|
||||
}
|
||||
FileSystemCaseSensitivity::Insensitive => self.0.binary_search_by(|e| {
|
||||
e.name()
|
||||
.chars()
|
||||
.zip(name.chars())
|
||||
.map(|(a, b)| a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()))
|
||||
.find(|&ord| ord != Ordering::Equal)
|
||||
.unwrap_or_else(|| e.name().len().cmp(&name.len()))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
entry: VfsEntry,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
) -> usize {
|
||||
match self.binary_search(entry.name(), case_sensitivity) {
|
||||
Ok(index) => {
|
||||
self.0[index] = entry;
|
||||
index
|
||||
}
|
||||
Err(insert_index) => {
|
||||
self.0.insert(insert_index, entry);
|
||||
insert_index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_or_modify(
|
||||
&mut self,
|
||||
name: &str,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
on_insert: impl FnOnce() -> VfsEntry,
|
||||
on_modify: impl FnOnce(&mut VfsEntry),
|
||||
) -> usize {
|
||||
match self.binary_search(name, case_sensitivity) {
|
||||
Ok(index) => {
|
||||
on_modify(&mut self.0[index]);
|
||||
index
|
||||
}
|
||||
Err(insert_index) => {
|
||||
self.0.insert(insert_index, on_insert());
|
||||
insert_index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) -> VfsEntry {
|
||||
self.0.remove(index)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualDirectory {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
// should be sorted by name
|
||||
#[serde(rename = "e")]
|
||||
pub entries: VirtualDirectoryEntries,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct OffsetWithLength {
|
||||
pub offset: u64,
|
||||
pub len: u64,
|
||||
}
|
||||
|
||||
// serialize as an array in order to save space
|
||||
impl Serialize for OffsetWithLength {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let array = [self.offset, self.len];
|
||||
array.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for OffsetWithLength {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct OffsetWithLengthVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for OffsetWithLengthVisitor {
|
||||
type Value = OffsetWithLength;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an array with two elements: [offset, len]")
|
||||
}
|
||||
|
||||
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: SeqAccess<'de>,
|
||||
{
|
||||
let offset = seq
|
||||
.next_element()?
|
||||
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
|
||||
let len = seq
|
||||
.next_element()?
|
||||
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
|
||||
Ok(OffsetWithLength { offset, len })
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_seq(OffsetWithLengthVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct VirtualFile {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
#[serde(rename = "o")]
|
||||
pub offset: OffsetWithLength,
|
||||
#[serde(rename = "m", skip_serializing_if = "Option::is_none")]
|
||||
pub transpiled_offset: Option<OffsetWithLength>,
|
||||
#[serde(rename = "c", skip_serializing_if = "Option::is_none")]
|
||||
pub cjs_export_analysis_offset: Option<OffsetWithLength>,
|
||||
#[serde(rename = "s", skip_serializing_if = "Option::is_none")]
|
||||
pub source_map_offset: Option<OffsetWithLength>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualSymlinkParts(Vec<String>);
|
||||
|
||||
impl VirtualSymlinkParts {
|
||||
pub fn from_path(path: &Path) -> Self {
|
||||
Self(
|
||||
path
|
||||
.components()
|
||||
.filter(|c| !matches!(c, std::path::Component::RootDir))
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn take_parts(&mut self) -> Vec<String> {
|
||||
std::mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
pub fn parts(&self) -> &[String] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn set_parts(&mut self, parts: Vec<String>) {
|
||||
self.0 = parts;
|
||||
}
|
||||
|
||||
pub fn display(&self) -> String {
|
||||
self.0.join("/")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VirtualSymlink {
|
||||
#[serde(rename = "n")]
|
||||
pub name: String,
|
||||
#[serde(rename = "p")]
|
||||
pub dest_parts: VirtualSymlinkParts,
|
||||
}
|
||||
|
||||
impl VirtualSymlink {
|
||||
pub fn resolve_dest_from_root(&self, root: &Path) -> PathBuf {
|
||||
let mut dest = root.to_path_buf();
|
||||
for part in &self.dest_parts.0 {
|
||||
dest.push(part);
|
||||
}
|
||||
dest
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum VfsEntryRef<'a> {
|
||||
Dir(&'a VirtualDirectory),
|
||||
File(&'a VirtualFile),
|
||||
Symlink(&'a VirtualSymlink),
|
||||
}
|
||||
|
||||
impl VfsEntryRef<'_> {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
Self::Dir(dir) => &dir.name,
|
||||
Self::File(file) => &file.name,
|
||||
Self::Symlink(symlink) => &symlink.name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): we should store this more efficiently in the binary
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum VfsEntry {
|
||||
Dir(VirtualDirectory),
|
||||
File(VirtualFile),
|
||||
Symlink(VirtualSymlink),
|
||||
}
|
||||
|
||||
impl VfsEntry {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
Self::Dir(dir) => &dir.name,
|
||||
Self::File(file) => &file.name,
|
||||
Self::Symlink(symlink) => &symlink.name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> VfsEntryRef {
|
||||
match self {
|
||||
VfsEntry::Dir(dir) => VfsEntryRef::Dir(dir),
|
||||
VfsEntry::File(file) => VfsEntryRef::File(file),
|
||||
VfsEntry::Symlink(symlink) => VfsEntryRef::Symlink(symlink),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
|
||||
".deno_compile_node_modules";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BuiltVfs {
|
||||
pub root_path: WindowsSystemRootablePath,
|
||||
pub case_sensitivity: FileSystemCaseSensitivity,
|
||||
pub entries: VirtualDirectoryEntries,
|
||||
pub files: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct FilesData {
|
||||
files: Vec<Vec<u8>>,
|
||||
current_offset: u64,
|
||||
file_offsets: HashMap<(String, usize), OffsetWithLength>,
|
||||
}
|
||||
|
||||
impl FilesData {
|
||||
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
|
||||
if offset.len == 0 {
|
||||
return Some(&[]);
|
||||
}
|
||||
|
||||
// the debug assertions in this method should never happen
|
||||
// because it would indicate providing an offset not in the vfs
|
||||
let mut count: u64 = 0;
|
||||
for file in &self.files {
|
||||
// clippy wanted a match
|
||||
match count.cmp(&offset.offset) {
|
||||
Ordering::Equal => {
|
||||
debug_assert_eq!(offset.len, file.len() as u64);
|
||||
if offset.len == file.len() as u64 {
|
||||
return Some(file);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Ordering::Less => {
|
||||
count += file.len() as u64;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
debug_assert!(false);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
debug_assert!(false);
|
||||
None
|
||||
}
|
||||
|
||||
pub fn add_data(&mut self, data: Vec<u8>) -> OffsetWithLength {
|
||||
if data.is_empty() {
|
||||
return OffsetWithLength { offset: 0, len: 0 };
|
||||
}
|
||||
let checksum = crate::util::checksum::gen(&[&data]);
|
||||
match self.file_offsets.entry((checksum, data.len())) {
|
||||
Entry::Occupied(occupied_entry) => {
|
||||
let offset_and_len = *occupied_entry.get();
|
||||
debug_assert_eq!(data.len() as u64, offset_and_len.len);
|
||||
offset_and_len
|
||||
}
|
||||
Entry::Vacant(vacant_entry) => {
|
||||
let offset_and_len = OffsetWithLength {
|
||||
offset: self.current_offset,
|
||||
len: data.len() as u64,
|
||||
};
|
||||
vacant_entry.insert(offset_and_len);
|
||||
self.current_offset += offset_and_len.len;
|
||||
self.files.push(data);
|
||||
offset_and_len
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AddFileDataOptions {
|
||||
pub data: Vec<u8>,
|
||||
pub maybe_transpiled: Option<Vec<u8>>,
|
||||
pub maybe_source_map: Option<Vec<u8>>,
|
||||
pub maybe_cjs_export_analysis: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VfsBuilder {
|
||||
executable_root: VirtualDirectory,
|
||||
files: FilesData,
|
||||
/// The minimum root directory that should be included in the VFS.
|
||||
min_root_dir: Option<WindowsSystemRootablePath>,
|
||||
case_sensitivity: FileSystemCaseSensitivity,
|
||||
}
|
||||
|
||||
impl Default for VfsBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl VfsBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
executable_root: VirtualDirectory {
|
||||
name: "/".to_string(),
|
||||
entries: Default::default(),
|
||||
},
|
||||
files: Default::default(),
|
||||
min_root_dir: Default::default(),
|
||||
// This is not exactly correct because file systems on these OSes
|
||||
// may be case-sensitive or not based on the directory, but this
|
||||
// is a good enough approximation and limitation. In the future,
|
||||
// we may want to store this information per directory instead
|
||||
// depending on the feedback we get.
|
||||
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
|
||||
FileSystemCaseSensitivity::Insensitive
|
||||
} else {
|
||||
FileSystemCaseSensitivity::Sensitive
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
|
||||
self.case_sensitivity
|
||||
}
|
||||
|
||||
pub fn files_len(&self) -> usize {
|
||||
self.files.files.len()
|
||||
}
|
||||
|
||||
pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> {
|
||||
self.files.file_bytes(offset)
|
||||
}
|
||||
|
||||
/// Add a directory that might be the minimum root directory
|
||||
/// of the VFS.
|
||||
///
|
||||
/// For example, say the user has a deno.json and specifies an
|
||||
/// import map in a parent directory. The import map won't be
|
||||
/// included in the VFS, but its base will meaning we need to
|
||||
/// tell the VFS builder to include the base of the import map
|
||||
/// by calling this method.
|
||||
pub fn add_possible_min_root_dir(&mut self, path: &Path) {
|
||||
self.add_dir_raw(path);
|
||||
|
||||
match &self.min_root_dir {
|
||||
Some(WindowsSystemRootablePath::WindowSystemRoot) => {
|
||||
// already the root dir
|
||||
}
|
||||
Some(WindowsSystemRootablePath::Path(current_path)) => {
|
||||
let mut common_components = Vec::new();
|
||||
for (a, b) in current_path.components().zip(path.components()) {
|
||||
if a != b {
|
||||
break;
|
||||
}
|
||||
common_components.push(a);
|
||||
}
|
||||
if common_components.is_empty() {
|
||||
self.min_root_dir =
|
||||
Some(WindowsSystemRootablePath::root_for_current_os());
|
||||
} else {
|
||||
self.min_root_dir = Some(WindowsSystemRootablePath::Path(
|
||||
common_components.iter().collect(),
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
self.min_root_dir =
|
||||
Some(WindowsSystemRootablePath::Path(path.to_path_buf()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let target_path = self.resolve_target_path(path)?;
|
||||
self.add_dir_recursive_not_symlink(&target_path)
|
||||
}
|
||||
|
||||
fn add_dir_recursive_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
self.add_dir_raw(path);
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let read_dir = std::fs::read_dir(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
|
||||
let mut dir_entries =
|
||||
read_dir.into_iter().collect::<Result<Vec<_>, _>>()?;
|
||||
dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism
|
||||
|
||||
for entry in dir_entries {
|
||||
let file_type = entry.file_type()?;
|
||||
let path = entry.path();
|
||||
|
||||
if file_type.is_dir() {
|
||||
self.add_dir_recursive_not_symlink(&path)?;
|
||||
} else if file_type.is_file() {
|
||||
self.add_file_at_path_not_symlink(&path)?;
|
||||
} else if file_type.is_symlink() {
|
||||
match self.add_symlink(&path) {
|
||||
Ok(target) => match target {
|
||||
SymlinkTarget::File(target) => {
|
||||
self.add_file_at_path_not_symlink(&target)?
|
||||
}
|
||||
SymlinkTarget::Dir(target) => {
|
||||
self.add_dir_recursive_not_symlink(&target)?;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}",
|
||||
colors::yellow("Warning"),
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory {
|
||||
log::debug!("Ensuring directory '{}'", path.display());
|
||||
debug_assert!(path.is_absolute());
|
||||
let mut current_dir = &mut self.executable_root;
|
||||
|
||||
for component in path.components() {
|
||||
if matches!(component, std::path::Component::RootDir) {
|
||||
continue;
|
||||
}
|
||||
let name = component.as_os_str().to_string_lossy();
|
||||
let index = current_dir.entries.insert_or_modify(
|
||||
&name,
|
||||
self.case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::Dir(VirtualDirectory {
|
||||
name: name.to_string(),
|
||||
entries: Default::default(),
|
||||
})
|
||||
},
|
||||
|_| {
|
||||
// ignore
|
||||
},
|
||||
);
|
||||
match current_dir.entries.get_mut_by_index(index) {
|
||||
Some(VfsEntry::Dir(dir)) => {
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
|
||||
current_dir
|
||||
}
|
||||
|
||||
pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory {
|
||||
&mut self.executable_root
|
||||
}
|
||||
|
||||
pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> {
|
||||
debug_assert!(path.is_absolute());
|
||||
let mut current_dir = &mut self.executable_root;
|
||||
|
||||
for component in path.components() {
|
||||
if matches!(component, std::path::Component::RootDir) {
|
||||
continue;
|
||||
}
|
||||
let name = component.as_os_str().to_string_lossy();
|
||||
let entry = current_dir
|
||||
.entries
|
||||
.get_mut_by_name(&name, self.case_sensitivity)?;
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!("{}", path.display()),
|
||||
};
|
||||
}
|
||||
|
||||
Some(current_dir)
|
||||
}
|
||||
|
||||
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file_with_data(
|
||||
path,
|
||||
AddFileDataOptions {
|
||||
data: file_bytes,
|
||||
maybe_cjs_export_analysis: None,
|
||||
maybe_transpiled: None,
|
||||
maybe_source_map: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn add_file_at_path_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
// ok, building fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file_with_data_raw(path, file_bytes)
|
||||
}
|
||||
|
||||
pub fn add_file_with_data(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
options: AddFileDataOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let metadata = std::fs::symlink_metadata(path).with_context(|| {
|
||||
format!("Resolving target path for '{}'", path.display())
|
||||
})?;
|
||||
if metadata.is_symlink() {
|
||||
let target = self.add_symlink(path)?.into_path_buf();
|
||||
self.add_file_with_data_raw_options(&target, options)
|
||||
} else {
|
||||
self.add_file_with_data_raw_options(path, options)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_file_with_data_raw(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.add_file_with_data_raw_options(
|
||||
path,
|
||||
AddFileDataOptions {
|
||||
data,
|
||||
maybe_transpiled: None,
|
||||
maybe_cjs_export_analysis: None,
|
||||
maybe_source_map: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn add_file_with_data_raw_options(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
options: AddFileDataOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Adding file '{}'", path.display());
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
|
||||
let offset_and_len = self.files.add_data(options.data);
|
||||
let transpiled_offset = options
|
||||
.maybe_transpiled
|
||||
.map(|data| self.files.add_data(data));
|
||||
let source_map_offset = options
|
||||
.maybe_source_map
|
||||
.map(|data| self.files.add_data(data));
|
||||
let cjs_export_analysis_offset = options
|
||||
.maybe_cjs_export_analysis
|
||||
.map(|data| self.files.add_data(data));
|
||||
let dir = self.add_dir_raw(path.parent().unwrap());
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
|
||||
dir.entries.insert_or_modify(
|
||||
&name,
|
||||
case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::File(VirtualFile {
|
||||
name: name.to_string(),
|
||||
offset: offset_and_len,
|
||||
transpiled_offset,
|
||||
cjs_export_analysis_offset,
|
||||
source_map_offset,
|
||||
})
|
||||
},
|
||||
|entry| match entry {
|
||||
VfsEntry::File(virtual_file) => {
|
||||
virtual_file.offset = offset_and_len;
|
||||
// doesn't overwrite to None
|
||||
if transpiled_offset.is_some() {
|
||||
virtual_file.transpiled_offset = transpiled_offset;
|
||||
}
|
||||
if source_map_offset.is_some() {
|
||||
virtual_file.source_map_offset = source_map_offset;
|
||||
}
|
||||
if cjs_export_analysis_offset.is_some() {
|
||||
virtual_file.cjs_export_analysis_offset =
|
||||
cjs_export_analysis_offset;
|
||||
}
|
||||
}
|
||||
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_target_path(&mut self, path: &Path) -> Result<PathBuf, AnyError> {
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let metadata = std::fs::symlink_metadata(path).with_context(|| {
|
||||
format!("Resolving target path for '{}'", path.display())
|
||||
})?;
|
||||
if metadata.is_symlink() {
|
||||
Ok(self.add_symlink(path)?.into_path_buf())
|
||||
} else {
|
||||
Ok(path.to_path_buf())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<SymlinkTarget, AnyError> {
|
||||
self.add_symlink_inner(path, &mut IndexSet::new())
|
||||
}
|
||||
|
||||
fn add_symlink_inner(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
visited: &mut IndexSet<PathBuf>,
|
||||
) -> Result<SymlinkTarget, AnyError> {
|
||||
log::debug!("Adding symlink '{}'", path.display());
|
||||
let target = strip_unc_prefix(
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
std::fs::read_link(path)
|
||||
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
|
||||
);
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
let target = normalize_path(path.parent().unwrap().join(&target));
|
||||
let dir = self.add_dir_raw(path.parent().unwrap());
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
dir.entries.insert_or_modify(
|
||||
&name,
|
||||
case_sensitivity,
|
||||
|| {
|
||||
VfsEntry::Symlink(VirtualSymlink {
|
||||
name: name.to_string(),
|
||||
dest_parts: VirtualSymlinkParts::from_path(&target),
|
||||
})
|
||||
},
|
||||
|_| {
|
||||
// ignore previously inserted
|
||||
},
|
||||
);
|
||||
// ok, fs implementation
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target_metadata =
|
||||
std::fs::symlink_metadata(&target).with_context(|| {
|
||||
format!("Reading symlink target '{}'", target.display())
|
||||
})?;
|
||||
if target_metadata.is_symlink() {
|
||||
if !visited.insert(target.clone()) {
|
||||
// todo: probably don't error in this scenario
|
||||
bail!(
|
||||
"Circular symlink detected: {} -> {}",
|
||||
visited
|
||||
.iter()
|
||||
.map(|p| p.display().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> "),
|
||||
target.display()
|
||||
);
|
||||
}
|
||||
self.add_symlink_inner(&target, visited)
|
||||
} else if target_metadata.is_dir() {
|
||||
Ok(SymlinkTarget::Dir(target))
|
||||
} else {
|
||||
Ok(SymlinkTarget::File(target))
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds the CJS export analysis to the provided file.
|
||||
///
|
||||
/// Warning: This will panic if the file wasn't properly
|
||||
/// setup before calling this.
|
||||
pub fn add_cjs_export_analysis(&mut self, path: &Path, data: Vec<u8>) {
|
||||
self.add_data_for_file_or_panic(path, data, |file, offset_with_length| {
|
||||
file.cjs_export_analysis_offset = Some(offset_with_length);
|
||||
})
|
||||
}
|
||||
|
||||
fn add_data_for_file_or_panic(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
update_file: impl FnOnce(&mut VirtualFile, OffsetWithLength),
|
||||
) {
|
||||
let offset_with_length = self.files.add_data(data);
|
||||
let case_sensitivity = self.case_sensitivity;
|
||||
let dir = self.get_dir_mut(path.parent().unwrap()).unwrap();
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
let file = dir
|
||||
.entries
|
||||
.get_mut_by_name(&name, case_sensitivity)
|
||||
.unwrap();
|
||||
match file {
|
||||
VfsEntry::File(virtual_file) => {
|
||||
update_file(virtual_file, offset_with_length);
|
||||
}
|
||||
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterates through all the files in the virtual file system.
|
||||
pub fn iter_files(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (PathBuf, &VirtualFile)> + '_ {
|
||||
FileIterator {
|
||||
pending_dirs: VecDeque::from([(
|
||||
WindowsSystemRootablePath::root_for_current_os(),
|
||||
&self.executable_root,
|
||||
)]),
|
||||
current_dir_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> BuiltVfs {
|
||||
fn strip_prefix_from_symlinks(
|
||||
dir: &mut VirtualDirectory,
|
||||
parts: &[String],
|
||||
) {
|
||||
for entry in dir.entries.iter_mut() {
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
strip_prefix_from_symlinks(dir, parts);
|
||||
}
|
||||
VfsEntry::File(_) => {}
|
||||
VfsEntry::Symlink(symlink) => {
|
||||
let parts = symlink
|
||||
.dest_parts
|
||||
.take_parts()
|
||||
.into_iter()
|
||||
.skip(parts.len())
|
||||
.collect();
|
||||
symlink.dest_parts.set_parts(parts);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut current_dir = self.executable_root;
|
||||
let mut current_path = WindowsSystemRootablePath::root_for_current_os();
|
||||
loop {
|
||||
if current_dir.entries.len() != 1 {
|
||||
break;
|
||||
}
|
||||
if self.min_root_dir.as_ref() == Some(¤t_path) {
|
||||
break;
|
||||
}
|
||||
match current_dir.entries.iter().next().unwrap() {
|
||||
VfsEntry::Dir(dir) => {
|
||||
if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME {
|
||||
// special directory we want to maintain
|
||||
break;
|
||||
}
|
||||
match current_dir.entries.remove(0) {
|
||||
VfsEntry::Dir(dir) => {
|
||||
current_path =
|
||||
WindowsSystemRootablePath::Path(current_path.join(&dir.name));
|
||||
current_dir = dir;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
VfsEntry::File(_) | VfsEntry::Symlink(_) => break,
|
||||
}
|
||||
}
|
||||
if let WindowsSystemRootablePath::Path(path) = ¤t_path {
|
||||
strip_prefix_from_symlinks(
|
||||
&mut current_dir,
|
||||
VirtualSymlinkParts::from_path(path).parts(),
|
||||
);
|
||||
}
|
||||
BuiltVfs {
|
||||
root_path: current_path,
|
||||
case_sensitivity: self.case_sensitivity,
|
||||
entries: current_dir.entries,
|
||||
files: self.files.files,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct FileIterator<'a> {
|
||||
pending_dirs: VecDeque<(WindowsSystemRootablePath, &'a VirtualDirectory)>,
|
||||
current_dir_index: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for FileIterator<'a> {
|
||||
type Item = (PathBuf, &'a VirtualFile);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while !self.pending_dirs.is_empty() {
|
||||
let (dir_path, current_dir) = self.pending_dirs.front()?;
|
||||
if let Some(entry) =
|
||||
current_dir.entries.get_by_index(self.current_dir_index)
|
||||
{
|
||||
self.current_dir_index += 1;
|
||||
match entry {
|
||||
VfsEntry::Dir(virtual_directory) => {
|
||||
self.pending_dirs.push_back((
|
||||
WindowsSystemRootablePath::Path(
|
||||
dir_path.join(&virtual_directory.name),
|
||||
),
|
||||
virtual_directory,
|
||||
));
|
||||
}
|
||||
VfsEntry::File(virtual_file) => {
|
||||
return Some((dir_path.join(&virtual_file.name), virtual_file));
|
||||
}
|
||||
VfsEntry::Symlink(_) => {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.pending_dirs.pop_front();
|
||||
self.current_dir_index = 0;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SymlinkTarget {
|
||||
File(PathBuf),
|
||||
Dir(PathBuf),
|
||||
}
|
||||
|
||||
impl SymlinkTarget {
|
||||
pub fn into_path_buf(self) -> PathBuf {
|
||||
match self {
|
||||
Self::File(path) => path,
|
||||
Self::Dir(path) => path,
|
||||
}
|
||||
}
|
||||
}
|
37
cli/lib/sys.rs
Normal file
37
cli/lib/sys.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_node::ExtNodeSys;
|
||||
use sys_traits::FsCanonicalize;
|
||||
use sys_traits::FsCreateDirAll;
|
||||
use sys_traits::FsMetadata;
|
||||
use sys_traits::FsOpen;
|
||||
use sys_traits::FsRead;
|
||||
use sys_traits::FsReadDir;
|
||||
use sys_traits::FsRemoveFile;
|
||||
use sys_traits::FsRename;
|
||||
use sys_traits::SystemRandom;
|
||||
use sys_traits::ThreadSleep;
|
||||
|
||||
pub trait DenoLibSys:
|
||||
FsCanonicalize
|
||||
+ FsCreateDirAll
|
||||
+ FsReadDir
|
||||
+ FsMetadata
|
||||
+ FsOpen
|
||||
+ FsRemoveFile
|
||||
+ FsRename
|
||||
+ FsRead
|
||||
+ ThreadSleep
|
||||
+ SystemRandom
|
||||
+ ExtNodeSys
|
||||
+ Clone
|
||||
+ Send
|
||||
+ Sync
|
||||
+ std::fmt::Debug
|
||||
+ 'static
|
||||
{
|
||||
}
|
||||
|
||||
// ok, implementation
|
||||
#[allow(clippy::disallowed_types)]
|
||||
impl DenoLibSys for sys_traits::impls::RealSys {}
|
|
@ -2,44 +2,33 @@
|
|||
|
||||
use std::io::Write;
|
||||
|
||||
use deno_telemetry::OtelConfig;
|
||||
use deno_telemetry::OtelConsoleConfig;
|
||||
use deno_runtime::deno_telemetry;
|
||||
use deno_runtime::deno_telemetry::OtelConfig;
|
||||
use deno_runtime::deno_telemetry::OtelConsoleConfig;
|
||||
|
||||
use super::draw_thread::DrawThread;
|
||||
|
||||
struct CliLogger {
|
||||
struct CliLogger<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
|
||||
otel_console_config: OtelConsoleConfig,
|
||||
logger: env_logger::Logger,
|
||||
on_log_start: FnOnLogStart,
|
||||
on_log_end: FnOnLogEnd,
|
||||
}
|
||||
|
||||
impl CliLogger {
|
||||
pub fn new(
|
||||
logger: env_logger::Logger,
|
||||
otel_console_config: OtelConsoleConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
logger,
|
||||
otel_console_config,
|
||||
}
|
||||
}
|
||||
|
||||
impl<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> CliLogger<FnOnLogStart, FnOnLogEnd> {
|
||||
pub fn filter(&self) -> log::LevelFilter {
|
||||
self.logger.filter()
|
||||
}
|
||||
}
|
||||
|
||||
impl log::Log for CliLogger {
|
||||
impl<FnOnLogStart: Fn() + Send + Sync, FnOnLogEnd: Fn() + Send + Sync> log::Log
|
||||
for CliLogger<FnOnLogStart, FnOnLogEnd>
|
||||
{
|
||||
fn enabled(&self, metadata: &log::Metadata) -> bool {
|
||||
self.logger.enabled(metadata)
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
// it was considered to hold the draw thread's internal lock
|
||||
// across logging, but if outputting to stderr blocks then that
|
||||
// could potentially block other threads that access the draw
|
||||
// thread's state
|
||||
DrawThread::hide();
|
||||
(self.on_log_start)();
|
||||
|
||||
match self.otel_console_config {
|
||||
OtelConsoleConfig::Ignore => {
|
||||
|
@ -54,7 +43,7 @@ impl log::Log for CliLogger {
|
|||
}
|
||||
}
|
||||
|
||||
DrawThread::show();
|
||||
(self.on_log_end)();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,8 +52,20 @@ impl log::Log for CliLogger {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
|
||||
let log_level = maybe_level.unwrap_or(log::Level::Info);
|
||||
pub struct InitLoggingOptions<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
|
||||
pub on_log_start: FnOnLogStart,
|
||||
pub on_log_end: FnOnLogEnd,
|
||||
pub maybe_level: Option<log::Level>,
|
||||
pub otel_config: Option<OtelConfig>,
|
||||
}
|
||||
|
||||
pub fn init<
|
||||
FOnLogStart: Fn() + Send + Sync + 'static,
|
||||
FnOnLogEnd: Fn() + Send + Sync + 'static,
|
||||
>(
|
||||
options: InitLoggingOptions<FOnLogStart, FnOnLogEnd>,
|
||||
) {
|
||||
let log_level = options.maybe_level.unwrap_or(log::Level::Info);
|
||||
let logger = env_logger::Builder::from_env(
|
||||
env_logger::Env::new()
|
||||
// Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix
|
||||
|
@ -117,12 +118,15 @@ pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
|
|||
})
|
||||
.build();
|
||||
|
||||
let cli_logger = CliLogger::new(
|
||||
let cli_logger = CliLogger {
|
||||
on_log_start: options.on_log_start,
|
||||
on_log_end: options.on_log_end,
|
||||
logger,
|
||||
otel_config
|
||||
otel_console_config: options
|
||||
.otel_config
|
||||
.map(|c| c.console)
|
||||
.unwrap_or(OtelConsoleConfig::Ignore),
|
||||
);
|
||||
};
|
||||
let max_level = cli_logger.filter();
|
||||
let r = log::set_boxed_logger(Box::new(cli_logger));
|
||||
if r.is_ok() {
|
8
cli/lib/util/mod.rs
Normal file
8
cli/lib/util/mod.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub mod checksum;
|
||||
pub mod hash;
|
||||
pub mod logger;
|
||||
pub mod result;
|
||||
pub mod text_encoding;
|
||||
pub mod v8;
|
|
@ -4,10 +4,10 @@ use std::convert::Infallible;
|
|||
use std::fmt::Debug;
|
||||
use std::fmt::Display;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::CoreError;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_error::JsErrorClass;
|
||||
use deno_runtime::deno_core::error::AnyError;
|
||||
use deno_runtime::deno_core::error::CoreError;
|
||||
|
||||
pub trait InfallibleResultExt<T> {
|
||||
fn unwrap_infallible(self) -> T;
|
||||
|
@ -36,7 +36,7 @@ pub fn any_and_jserrorbox_downcast_ref<
|
|||
})
|
||||
.or_else(|| {
|
||||
err.downcast_ref::<CoreError>().and_then(|e| match e {
|
||||
CoreError::JsNative(e) => e.as_any().downcast_ref::<E>(),
|
||||
CoreError::JsBox(e) => e.as_any().downcast_ref::<E>(),
|
||||
_ => None,
|
||||
})
|
||||
})
|
45
cli/lib/util/text_encoding.rs
Normal file
45
cli/lib/util/text_encoding.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[inline(always)]
|
||||
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
|
||||
match String::from_utf8_lossy(&bytes) {
|
||||
Cow::Owned(code) => code,
|
||||
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
|
||||
// UTF-8 if `Cow::Borrowed` is returned.
|
||||
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow<str> {
|
||||
match bytes {
|
||||
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
|
||||
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
|
||||
#[allow(dead_code)]
|
||||
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
|
||||
let raw = Arc::into_raw(arc_str);
|
||||
// SAFETY: This is safe because they have the same memory layout.
|
||||
unsafe { Arc::from_raw(raw as *const [u8]) }
|
||||
}
|
||||
|
||||
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
|
||||
#[allow(dead_code)]
|
||||
pub fn arc_u8_to_arc_str(
|
||||
arc_u8: Arc<[u8]>,
|
||||
) -> Result<Arc<str>, std::str::Utf8Error> {
|
||||
// Check that the string is valid UTF-8.
|
||||
std::str::from_utf8(&arc_u8)?;
|
||||
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
|
||||
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
|
||||
// standard library.
|
||||
Ok(unsafe {
|
||||
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
|
||||
})
|
||||
}
|
14
cli/lib/util/v8.rs
Normal file
14
cli/lib/util/v8.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
#[inline(always)]
|
||||
pub fn construct_v8_flags(
|
||||
default_v8_flags: &[String],
|
||||
v8_flags: &[String],
|
||||
env_v8_flags: Vec<String>,
|
||||
) -> Vec<String> {
|
||||
std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned())
|
||||
.chain(default_v8_flags.iter().cloned())
|
||||
.chain(env_v8_flags)
|
||||
.chain(v8_flags.iter().cloned())
|
||||
.collect::<Vec<_>>()
|
||||
}
|
94
cli/lib/version.rs
Normal file
94
cli/lib/version.rs
Normal file
|
@ -0,0 +1,94 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use deno_runtime::deno_telemetry::OtelRuntimeConfig;
|
||||
|
||||
use crate::shared::ReleaseChannel;
|
||||
|
||||
pub fn otel_runtime_config() -> OtelRuntimeConfig {
|
||||
OtelRuntimeConfig {
|
||||
runtime_name: Cow::Borrowed("deno"),
|
||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||
}
|
||||
}
|
||||
|
||||
const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
|
||||
const TYPESCRIPT: &str = "5.6.2";
|
||||
const DENO_VERSION: &str = env!("DENO_VERSION");
|
||||
// TODO(bartlomieju): ideally we could remove this const.
|
||||
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();
|
||||
// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well
|
||||
const IS_RC: bool = option_env!("DENO_RC").is_some();
|
||||
|
||||
pub static DENO_VERSION_INFO: std::sync::LazyLock<DenoVersionInfo> =
|
||||
std::sync::LazyLock::new(|| {
|
||||
let release_channel = libsui::find_section("denover")
|
||||
.and_then(|buf| std::str::from_utf8(buf).ok())
|
||||
.and_then(|str_| ReleaseChannel::deserialize(str_).ok())
|
||||
.unwrap_or({
|
||||
if IS_CANARY {
|
||||
ReleaseChannel::Canary
|
||||
} else if IS_RC {
|
||||
ReleaseChannel::Rc
|
||||
} else {
|
||||
ReleaseChannel::Stable
|
||||
}
|
||||
});
|
||||
|
||||
DenoVersionInfo {
|
||||
deno: if release_channel == ReleaseChannel::Canary {
|
||||
concat!(env!("DENO_VERSION"), "+", env!("GIT_COMMIT_HASH_SHORT"))
|
||||
} else {
|
||||
env!("DENO_VERSION")
|
||||
},
|
||||
|
||||
release_channel,
|
||||
|
||||
git_hash: GIT_COMMIT_HASH,
|
||||
|
||||
// Keep in sync with `deno` field.
|
||||
user_agent: if release_channel == ReleaseChannel::Canary {
|
||||
concat!(
|
||||
"Deno/",
|
||||
env!("DENO_VERSION"),
|
||||
"+",
|
||||
env!("GIT_COMMIT_HASH_SHORT")
|
||||
)
|
||||
} else {
|
||||
concat!("Deno/", env!("DENO_VERSION"))
|
||||
},
|
||||
|
||||
typescript: TYPESCRIPT,
|
||||
}
|
||||
});
|
||||
|
||||
pub struct DenoVersionInfo {
|
||||
/// Human-readable version of the current Deno binary.
|
||||
///
|
||||
/// For stable release, a semver, eg. `v1.46.2`.
|
||||
/// For canary release, a semver + 7-char git hash, eg. `v1.46.3+asdfqwq`.
|
||||
pub deno: &'static str,
|
||||
|
||||
pub release_channel: ReleaseChannel,
|
||||
|
||||
/// A full git hash.
|
||||
pub git_hash: &'static str,
|
||||
|
||||
/// A user-agent header that will be used in HTTP client.
|
||||
pub user_agent: &'static str,
|
||||
|
||||
pub typescript: &'static str,
|
||||
}
|
||||
|
||||
impl DenoVersionInfo {
|
||||
/// For stable release, a semver like, eg. `v1.46.2`.
|
||||
/// For canary release a full git hash, eg. `9bdab6fb6b93eb43b1930f40987fa4997287f9c8`.
|
||||
pub fn version_or_git_hash(&self) -> &'static str {
|
||||
if self.release_channel == ReleaseChannel::Canary {
|
||||
self.git_hash
|
||||
} else {
|
||||
DENO_VERSION
|
||||
}
|
||||
}
|
||||
}
|
1
cli/lib/version.txt
Normal file
1
cli/lib/version.txt
Normal file
|
@ -0,0 +1 @@
|
|||
2.1.6
|
716
cli/lib/worker.rs
Normal file
716
cli/lib/worker.rs
Normal file
|
@ -0,0 +1,716 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::JsError;
|
||||
use deno_node::NodeRequireLoaderRc;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_resolver::npm::NpmResolver;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
|
||||
use deno_runtime::deno_core;
|
||||
use deno_runtime::deno_core::error::CoreError;
|
||||
use deno_runtime::deno_core::v8;
|
||||
use deno_runtime::deno_core::CompiledWasmModuleStore;
|
||||
use deno_runtime::deno_core::Extension;
|
||||
use deno_runtime::deno_core::FeatureChecker;
|
||||
use deno_runtime::deno_core::JsRuntime;
|
||||
use deno_runtime::deno_core::LocalInspectorSession;
|
||||
use deno_runtime::deno_core::ModuleLoader;
|
||||
use deno_runtime::deno_core::SharedArrayBufferStore;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeExtInitServices;
|
||||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_process::NpmProcessStateProviderRc;
|
||||
use deno_runtime::deno_telemetry::OtelConfig;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
|
||||
use deno_runtime::web_worker::WebWorker;
|
||||
use deno_runtime::web_worker::WebWorkerOptions;
|
||||
use deno_runtime::web_worker::WebWorkerServiceOptions;
|
||||
use deno_runtime::worker::MainWorker;
|
||||
use deno_runtime::worker::WorkerOptions;
|
||||
use deno_runtime::worker::WorkerServiceOptions;
|
||||
use deno_runtime::BootstrapOptions;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
||||
use node_resolver::errors::ResolvePkgJsonBinExportError;
|
||||
use url::Url;
|
||||
|
||||
use crate::args::has_trace_permissions_enabled;
|
||||
use crate::sys::DenoLibSys;
|
||||
use crate::util::checksum;
|
||||
|
||||
pub struct CreateModuleLoaderResult {
|
||||
pub module_loader: Rc<dyn ModuleLoader>,
|
||||
pub node_require_loader: Rc<dyn NodeRequireLoader>,
|
||||
}
|
||||
|
||||
pub trait ModuleLoaderFactory: Send + Sync {
|
||||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
) -> CreateModuleLoaderResult;
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
parent_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> CreateModuleLoaderResult;
|
||||
}
|
||||
|
||||
enum StorageKeyResolverStrategy {
|
||||
Specified(Option<String>),
|
||||
UseMainModule,
|
||||
}
|
||||
|
||||
pub struct StorageKeyResolver(StorageKeyResolverStrategy);
|
||||
|
||||
impl StorageKeyResolver {
|
||||
pub fn from_flag(location: &Url) -> Self {
|
||||
// if a location is set, then the ascii serialization of the location is
|
||||
// used, unless the origin is opaque, and then no storage origin is set, as
|
||||
// we can't expect the origin to be reproducible
|
||||
let storage_origin = location.origin();
|
||||
Self(StorageKeyResolverStrategy::Specified(
|
||||
if storage_origin.is_tuple() {
|
||||
Some(storage_origin.ascii_serialization())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub fn from_config_file_url(url: &Url) -> Self {
|
||||
Self(StorageKeyResolverStrategy::Specified(Some(url.to_string())))
|
||||
}
|
||||
|
||||
pub fn new_use_main_module() -> Self {
|
||||
Self(StorageKeyResolverStrategy::UseMainModule)
|
||||
}
|
||||
|
||||
/// Creates a storage key resolver that will always resolve to being empty.
|
||||
pub fn empty() -> Self {
|
||||
Self(StorageKeyResolverStrategy::Specified(None))
|
||||
}
|
||||
|
||||
/// Resolves the storage key to use based on the current flags, config, or main module.
|
||||
pub fn resolve_storage_key(&self, main_module: &Url) -> Option<String> {
|
||||
// use the stored value or fall back to using the path of the main module.
|
||||
match &self.0 {
|
||||
StorageKeyResolverStrategy::Specified(value) => value.clone(),
|
||||
StorageKeyResolverStrategy::UseMainModule => {
|
||||
Some(main_module.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_cache_storage_dir() -> PathBuf {
|
||||
// ok because this won't ever be used by the js runtime
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
// Note: we currently use temp_dir() to avoid managing storage size.
|
||||
std::env::temp_dir().join("deno_cache")
|
||||
}
|
||||
|
||||
/// By default V8 uses 1.4Gb heap limit which is meant for browser tabs.
|
||||
/// Instead probe for the total memory on the system and use it instead
|
||||
/// as a default.
|
||||
pub fn create_isolate_create_params() -> Option<v8::CreateParams> {
|
||||
let maybe_mem_info = deno_runtime::deno_os::sys_info::mem_info();
|
||||
maybe_mem_info.map(|mem_info| {
|
||||
v8::CreateParams::default()
|
||||
.heap_limits_from_system_memory(mem_info.total, 0)
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum ResolveNpmBinaryEntrypointError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ResolvePkgJsonBinExport(ResolvePkgJsonBinExportError),
|
||||
#[class(generic)]
|
||||
#[error("{original:#}\n\nFallback failed: {fallback:#}")]
|
||||
Fallback {
|
||||
fallback: ResolveNpmBinaryEntrypointFallbackError,
|
||||
original: ResolvePkgJsonBinExportError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum ResolveNpmBinaryEntrypointFallbackError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
PackageSubpathResolve(node_resolver::errors::PackageSubpathResolveError),
|
||||
#[class(generic)]
|
||||
#[error("Cannot find module '{0}'")]
|
||||
ModuleNotFound(Url),
|
||||
}
|
||||
|
||||
pub struct LibMainWorkerOptions {
|
||||
pub argv: Vec<String>,
|
||||
pub log_level: WorkerLogLevel,
|
||||
pub enable_op_summary_metrics: bool,
|
||||
pub enable_testing_features: bool,
|
||||
pub has_node_modules_dir: bool,
|
||||
pub inspect_brk: bool,
|
||||
pub inspect_wait: bool,
|
||||
pub strace_ops: Option<Vec<String>>,
|
||||
pub is_inspecting: bool,
|
||||
pub location: Option<Url>,
|
||||
pub argv0: Option<String>,
|
||||
pub node_debug: Option<String>,
|
||||
pub otel_config: OtelConfig,
|
||||
pub origin_data_folder_path: Option<PathBuf>,
|
||||
pub seed: Option<u64>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub skip_op_registration: bool,
|
||||
pub node_ipc: Option<i64>,
|
||||
pub startup_snapshot: Option<&'static [u8]>,
|
||||
pub serve_port: Option<u16>,
|
||||
pub serve_host: Option<String>,
|
||||
}
|
||||
|
||||
struct LibWorkerFactorySharedState<TSys: DenoLibSys> {
|
||||
blob_store: Arc<BlobStore>,
|
||||
broadcast_channel: InMemoryBroadcastChannel,
|
||||
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,
|
||||
compiled_wasm_module_store: CompiledWasmModuleStore,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
module_loader_factory: Box<dyn ModuleLoaderFactory>,
|
||||
node_resolver:
|
||||
Arc<NodeResolver<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys>>,
|
||||
npm_process_state_provider: NpmProcessStateProviderRc,
|
||||
pkg_json_resolver: Arc<node_resolver::PackageJsonResolver<TSys>>,
|
||||
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
|
||||
shared_array_buffer_store: SharedArrayBufferStore,
|
||||
storage_key_resolver: StorageKeyResolver,
|
||||
sys: TSys,
|
||||
options: LibMainWorkerOptions,
|
||||
}
|
||||
|
||||
impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
|
||||
fn resolve_unstable_features(
|
||||
&self,
|
||||
feature_checker: &FeatureChecker,
|
||||
) -> Vec<i32> {
|
||||
let mut unstable_features =
|
||||
Vec::with_capacity(UNSTABLE_GRANULAR_FLAGS.len());
|
||||
for granular_flag in UNSTABLE_GRANULAR_FLAGS {
|
||||
if feature_checker.check(granular_flag.name) {
|
||||
unstable_features.push(granular_flag.id);
|
||||
}
|
||||
}
|
||||
unstable_features
|
||||
}
|
||||
|
||||
fn create_node_init_services(
|
||||
&self,
|
||||
node_require_loader: NodeRequireLoaderRc,
|
||||
) -> NodeExtInitServices<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys> {
|
||||
NodeExtInitServices {
|
||||
node_require_loader,
|
||||
node_resolver: self.node_resolver.clone(),
|
||||
pkg_json_resolver: self.pkg_json_resolver.clone(),
|
||||
sys: self.sys.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_web_worker_callback(
|
||||
self: &Arc<Self>,
|
||||
stdio: deno_runtime::deno_io::Stdio,
|
||||
) -> Arc<CreateWebWorkerCb> {
|
||||
let shared = self.clone();
|
||||
Arc::new(move |args| {
|
||||
let maybe_inspector_server = shared.maybe_inspector_server.clone();
|
||||
|
||||
let CreateModuleLoaderResult {
|
||||
module_loader,
|
||||
node_require_loader,
|
||||
} = shared.module_loader_factory.create_for_worker(
|
||||
args.parent_permissions.clone(),
|
||||
args.permissions.clone(),
|
||||
);
|
||||
let create_web_worker_cb =
|
||||
shared.create_web_worker_callback(stdio.clone());
|
||||
|
||||
let maybe_storage_key = shared
|
||||
.storage_key_resolver
|
||||
.resolve_storage_key(&args.main_module);
|
||||
let cache_storage_dir = maybe_storage_key.map(|key| {
|
||||
// TODO(@satyarohith): storage quota management
|
||||
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
|
||||
});
|
||||
|
||||
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
|
||||
// list of enabled features.
|
||||
let feature_checker = shared.feature_checker.clone();
|
||||
let unstable_features =
|
||||
shared.resolve_unstable_features(feature_checker.as_ref());
|
||||
|
||||
let services = WebWorkerServiceOptions {
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(
|
||||
shared.create_node_init_services(node_require_loader),
|
||||
),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(
|
||||
shared.shared_array_buffer_store.clone(),
|
||||
),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
maybe_inspector_server,
|
||||
feature_checker,
|
||||
npm_process_state_provider: Some(
|
||||
shared.npm_process_state_provider.clone(),
|
||||
),
|
||||
permissions: args.permissions,
|
||||
};
|
||||
let options = WebWorkerOptions {
|
||||
name: args.name,
|
||||
main_module: args.main_module.clone(),
|
||||
worker_id: args.worker_id,
|
||||
bootstrap: BootstrapOptions {
|
||||
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
args: shared.options.argv.clone(),
|
||||
cpu_count: std::thread::available_parallelism()
|
||||
.map(|p| p.get())
|
||||
.unwrap_or(1),
|
||||
log_level: shared.options.log_level,
|
||||
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
|
||||
enable_testing_features: shared.options.enable_testing_features,
|
||||
locale: deno_core::v8::icu::get_language_tag(),
|
||||
location: Some(args.main_module),
|
||||
no_color: !colors::use_color(),
|
||||
color_level: colors::get_color_level(),
|
||||
is_stdout_tty: deno_terminal::is_stdout_tty(),
|
||||
is_stderr_tty: deno_terminal::is_stderr_tty(),
|
||||
unstable_features,
|
||||
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
|
||||
inspect: shared.options.is_inspecting,
|
||||
has_node_modules_dir: shared.options.has_node_modules_dir,
|
||||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: None,
|
||||
mode: WorkerExecutionMode::Worker,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
otel_config: shared.options.otel_config.clone(),
|
||||
close_on_idle: args.close_on_idle,
|
||||
},
|
||||
extensions: vec![],
|
||||
startup_snapshot: shared.options.startup_snapshot,
|
||||
create_params: create_isolate_create_params(),
|
||||
unsafely_ignore_certificate_errors: shared
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
seed: shared.options.seed,
|
||||
create_web_worker_cb,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
worker_type: args.worker_type,
|
||||
stdio: stdio.clone(),
|
||||
cache_storage_dir,
|
||||
strace_ops: shared.options.strace_ops.clone(),
|
||||
close_on_idle: args.close_on_idle,
|
||||
maybe_worker_metadata: args.maybe_worker_metadata,
|
||||
enable_stack_trace_arg_in_ops: has_trace_permissions_enabled(),
|
||||
};
|
||||
|
||||
WebWorker::bootstrap_from_options(services, options)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LibMainWorkerFactory<TSys: DenoLibSys> {
|
||||
shared: Arc<LibWorkerFactorySharedState<TSys>>,
|
||||
}
|
||||
|
||||
impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
blob_store: Arc<BlobStore>,
|
||||
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
module_loader_factory: Box<dyn ModuleLoaderFactory>,
|
||||
node_resolver: Arc<
|
||||
NodeResolver<DenoInNpmPackageChecker, NpmResolver<TSys>, TSys>,
|
||||
>,
|
||||
npm_process_state_provider: NpmProcessStateProviderRc,
|
||||
pkg_json_resolver: Arc<node_resolver::PackageJsonResolver<TSys>>,
|
||||
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
|
||||
storage_key_resolver: StorageKeyResolver,
|
||||
sys: TSys,
|
||||
options: LibMainWorkerOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
shared: Arc::new(LibWorkerFactorySharedState {
|
||||
blob_store,
|
||||
broadcast_channel: Default::default(),
|
||||
code_cache,
|
||||
compiled_wasm_module_store: Default::default(),
|
||||
feature_checker,
|
||||
fs,
|
||||
maybe_inspector_server,
|
||||
module_loader_factory,
|
||||
node_resolver,
|
||||
npm_process_state_provider,
|
||||
pkg_json_resolver,
|
||||
root_cert_store_provider,
|
||||
shared_array_buffer_store: Default::default(),
|
||||
storage_key_resolver,
|
||||
sys,
|
||||
options,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_main_worker(
|
||||
&self,
|
||||
mode: WorkerExecutionMode,
|
||||
permissions: PermissionsContainer,
|
||||
main_module: Url,
|
||||
) -> Result<LibMainWorker, CoreError> {
|
||||
self.create_custom_worker(
|
||||
mode,
|
||||
main_module,
|
||||
permissions,
|
||||
vec![],
|
||||
Default::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn create_custom_worker(
|
||||
&self,
|
||||
mode: WorkerExecutionMode,
|
||||
main_module: Url,
|
||||
permissions: PermissionsContainer,
|
||||
custom_extensions: Vec<Extension>,
|
||||
stdio: deno_runtime::deno_io::Stdio,
|
||||
) -> Result<LibMainWorker, CoreError> {
|
||||
let shared = &self.shared;
|
||||
let CreateModuleLoaderResult {
|
||||
module_loader,
|
||||
node_require_loader,
|
||||
} = shared
|
||||
.module_loader_factory
|
||||
.create_for_main(permissions.clone());
|
||||
|
||||
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
|
||||
// list of enabled features.
|
||||
let feature_checker = shared.feature_checker.clone();
|
||||
let unstable_features =
|
||||
shared.resolve_unstable_features(feature_checker.as_ref());
|
||||
let maybe_storage_key = shared
|
||||
.storage_key_resolver
|
||||
.resolve_storage_key(&main_module);
|
||||
let origin_storage_dir = maybe_storage_key.as_ref().map(|key| {
|
||||
shared
|
||||
.options
|
||||
.origin_data_folder_path
|
||||
.as_ref()
|
||||
.unwrap() // must be set if storage key resolver returns a value
|
||||
.join(checksum::gen(&[key.as_bytes()]))
|
||||
});
|
||||
let cache_storage_dir = maybe_storage_key.map(|key| {
|
||||
// TODO(@satyarohith): storage quota management
|
||||
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
|
||||
});
|
||||
|
||||
let services = WorkerServiceOptions {
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(
|
||||
shared.create_node_init_services(node_require_loader),
|
||||
),
|
||||
npm_process_state_provider: Some(
|
||||
shared.npm_process_state_provider.clone(),
|
||||
),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
fetch_dns_resolver: Default::default(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
feature_checker,
|
||||
permissions,
|
||||
v8_code_cache: shared.code_cache.clone(),
|
||||
};
|
||||
|
||||
let options = WorkerOptions {
|
||||
bootstrap: BootstrapOptions {
|
||||
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
args: shared.options.argv.clone(),
|
||||
cpu_count: std::thread::available_parallelism()
|
||||
.map(|p| p.get())
|
||||
.unwrap_or(1),
|
||||
log_level: shared.options.log_level,
|
||||
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
|
||||
enable_testing_features: shared.options.enable_testing_features,
|
||||
locale: deno_core::v8::icu::get_language_tag(),
|
||||
location: shared.options.location.clone(),
|
||||
no_color: !colors::use_color(),
|
||||
is_stdout_tty: deno_terminal::is_stdout_tty(),
|
||||
is_stderr_tty: deno_terminal::is_stderr_tty(),
|
||||
color_level: colors::get_color_level(),
|
||||
unstable_features,
|
||||
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
|
||||
inspect: shared.options.is_inspecting,
|
||||
has_node_modules_dir: shared.options.has_node_modules_dir,
|
||||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: shared.options.node_ipc,
|
||||
mode,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
otel_config: shared.options.otel_config.clone(),
|
||||
close_on_idle: true,
|
||||
},
|
||||
extensions: custom_extensions,
|
||||
startup_snapshot: shared.options.startup_snapshot,
|
||||
create_params: create_isolate_create_params(),
|
||||
unsafely_ignore_certificate_errors: shared
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
seed: shared.options.seed,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
create_web_worker_cb: shared.create_web_worker_callback(stdio.clone()),
|
||||
maybe_inspector_server: shared.maybe_inspector_server.clone(),
|
||||
should_break_on_first_statement: shared.options.inspect_brk,
|
||||
should_wait_for_inspector_session: shared.options.inspect_wait,
|
||||
strace_ops: shared.options.strace_ops.clone(),
|
||||
cache_storage_dir,
|
||||
origin_storage_dir,
|
||||
stdio,
|
||||
skip_op_registration: shared.options.skip_op_registration,
|
||||
enable_stack_trace_arg_in_ops: has_trace_permissions_enabled(),
|
||||
};
|
||||
|
||||
let worker =
|
||||
MainWorker::bootstrap_from_options(&main_module, services, options);
|
||||
|
||||
Ok(LibMainWorker {
|
||||
main_module,
|
||||
worker,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve_npm_binary_entrypoint(
|
||||
&self,
|
||||
package_folder: &Path,
|
||||
sub_path: Option<&str>,
|
||||
) -> Result<Url, ResolveNpmBinaryEntrypointError> {
|
||||
match self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_binary_export(package_folder, sub_path)
|
||||
{
|
||||
Ok(specifier) => Ok(specifier),
|
||||
Err(original_err) => {
|
||||
// if the binary entrypoint was not found, fallback to regular node resolution
|
||||
let result =
|
||||
self.resolve_binary_entrypoint_fallback(package_folder, sub_path);
|
||||
match result {
|
||||
Ok(Some(specifier)) => Ok(specifier),
|
||||
Ok(None) => {
|
||||
Err(ResolveNpmBinaryEntrypointError::ResolvePkgJsonBinExport(
|
||||
original_err,
|
||||
))
|
||||
}
|
||||
Err(fallback_err) => Err(ResolveNpmBinaryEntrypointError::Fallback {
|
||||
original: original_err,
|
||||
fallback: fallback_err,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// resolve the binary entrypoint using regular node resolution
|
||||
fn resolve_binary_entrypoint_fallback(
|
||||
&self,
|
||||
package_folder: &Path,
|
||||
sub_path: Option<&str>,
|
||||
) -> Result<Option<Url>, ResolveNpmBinaryEntrypointFallbackError> {
|
||||
// only fallback if the user specified a sub path
|
||||
if sub_path.is_none() {
|
||||
// it's confusing to users if the package doesn't have any binary
|
||||
// entrypoint and we just execute the main script which will likely
|
||||
// have blank output, so do not resolve the entrypoint in this case
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let specifier = self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
package_folder,
|
||||
sub_path,
|
||||
/* referrer */ None,
|
||||
node_resolver::ResolutionMode::Import,
|
||||
node_resolver::NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(
|
||||
ResolveNpmBinaryEntrypointFallbackError::PackageSubpathResolve,
|
||||
)?;
|
||||
if deno_path_util::url_to_file_path(&specifier)
|
||||
.map(|p| self.shared.sys.fs_exists_no_err(p))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Ok(Some(specifier))
|
||||
} else {
|
||||
Err(ResolveNpmBinaryEntrypointFallbackError::ModuleNotFound(
|
||||
specifier,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LibMainWorker {
|
||||
main_module: Url,
|
||||
worker: MainWorker,
|
||||
}
|
||||
|
||||
impl LibMainWorker {
|
||||
pub fn into_main_worker(self) -> MainWorker {
|
||||
self.worker
|
||||
}
|
||||
|
||||
pub fn main_module(&self) -> &Url {
|
||||
&self.main_module
|
||||
}
|
||||
|
||||
pub fn js_runtime(&mut self) -> &mut JsRuntime {
|
||||
&mut self.worker.js_runtime
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn create_inspector_session(&mut self) -> LocalInspectorSession {
|
||||
self.worker.create_inspector_session()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_load_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
|
||||
self.worker.dispatch_beforeunload_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
|
||||
self.worker.dispatch_process_beforeexit_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_unload_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_process_exit_event()
|
||||
}
|
||||
|
||||
pub async fn execute_main_module(&mut self) -> Result<(), CoreError> {
|
||||
let id = self.worker.preload_main_module(&self.main_module).await?;
|
||||
self.worker.evaluate_module(id).await
|
||||
}
|
||||
|
||||
pub async fn execute_side_module(&mut self) -> Result<(), CoreError> {
|
||||
let id = self.worker.preload_side_module(&self.main_module).await?;
|
||||
self.worker.evaluate_module(id).await
|
||||
}
|
||||
|
||||
pub async fn run(&mut self) -> Result<i32, CoreError> {
|
||||
log::debug!("main_module {}", self.main_module);
|
||||
|
||||
self.execute_main_module().await?;
|
||||
self.worker.dispatch_load_event()?;
|
||||
|
||||
loop {
|
||||
self
|
||||
.worker
|
||||
.run_event_loop(/* wait for inspector */ false)
|
||||
.await?;
|
||||
|
||||
let web_continue = self.worker.dispatch_beforeunload_event()?;
|
||||
if !web_continue {
|
||||
let node_continue = self.worker.dispatch_process_beforeexit_event()?;
|
||||
if !node_continue {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.worker.dispatch_unload_event()?;
|
||||
self.worker.dispatch_process_exit_event()?;
|
||||
|
||||
Ok(self.worker.exit_code())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub async fn run_event_loop(
|
||||
&mut self,
|
||||
wait_for_inspector: bool,
|
||||
) -> Result<(), CoreError> {
|
||||
self.worker.run_event_loop(wait_for_inspector).await
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn exit_code(&self) -> i32 {
|
||||
self.worker.exit_code()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn storage_key_resolver_test() {
|
||||
let resolver =
|
||||
StorageKeyResolver(StorageKeyResolverStrategy::UseMainModule);
|
||||
let specifier = Url::parse("file:///a.ts").unwrap();
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some(specifier.to_string())
|
||||
);
|
||||
let resolver =
|
||||
StorageKeyResolver(StorageKeyResolverStrategy::Specified(None));
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
let resolver = StorageKeyResolver(StorageKeyResolverStrategy::Specified(
|
||||
Some("value".to_string()),
|
||||
));
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some("value".to_string())
|
||||
);
|
||||
|
||||
// test empty
|
||||
let resolver = StorageKeyResolver::empty();
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
}
|
||||
}
|
|
@ -41,6 +41,8 @@ use deno_core::serde_json::json;
|
|||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_lib::args::has_flag_env_var;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_package_json::PackageJsonCache;
|
||||
|
@ -55,13 +57,11 @@ use super::logging::lsp_log;
|
|||
use super::lsp_custom;
|
||||
use super::urls::url_to_uri;
|
||||
use crate::args::discover_npmrc_from_workspace;
|
||||
use crate::args::has_flag_env_var;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliLockfileReadFromPathOptions;
|
||||
use crate::args::ConfigFile;
|
||||
use crate::args::LintFlags;
|
||||
use crate::args::LintOptions;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
|
|
|
@ -265,7 +265,7 @@ impl TsDiagnosticsStore {
|
|||
}
|
||||
|
||||
pub fn should_send_diagnostic_batch_index_notifications() -> bool {
|
||||
crate::args::has_flag_env_var(
|
||||
deno_lib::args::has_flag_env_var(
|
||||
"DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG",
|
||||
)
|
||||
}
|
||||
|
|
|
@ -27,6 +27,10 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Resolution;
|
||||
use deno_lib::args::get_root_cert_store;
|
||||
use deno_lib::args::has_flag_env_var;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
|
@ -94,9 +98,6 @@ use super::urls;
|
|||
use super::urls::uri_to_url;
|
||||
use super::urls::url_to_uri;
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::has_flag_env_var;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InternalFlags;
|
||||
|
@ -703,7 +704,7 @@ impl Inner {
|
|||
|
||||
let version = format!(
|
||||
"{} ({}, {})",
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
DENO_VERSION_INFO.deno,
|
||||
env!("PROFILE"),
|
||||
env!("TARGET")
|
||||
);
|
||||
|
|
|
@ -5,6 +5,7 @@ use std::collections::HashSet;
|
|||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_lib::util::checksum;
|
||||
use lsp::Range;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
|
@ -15,7 +16,6 @@ use crate::lsp::logging::lsp_warn;
|
|||
use crate::lsp::urls::url_to_uri;
|
||||
use crate::tools::test::TestDescription;
|
||||
use crate::tools::test::TestStepDescription;
|
||||
use crate::util::checksum;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct TestDefinition {
|
||||
|
|
|
@ -39,6 +39,8 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::OpState;
|
||||
use deno_core::PollEventLoopOptions;
|
||||
use deno_core::RuntimeOptions;
|
||||
use deno_lib::util::result::InfallibleResultExt;
|
||||
use deno_lib::worker::create_isolate_create_params;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
|
@ -72,6 +74,7 @@ use super::documents::Document;
|
|||
use super::documents::DocumentsFilter;
|
||||
use super::language_server;
|
||||
use super::language_server::StateSnapshot;
|
||||
use super::logging::lsp_log;
|
||||
use super::performance::Performance;
|
||||
use super::performance::PerformanceMark;
|
||||
use super::refactor::RefactorCodeActionData;
|
||||
|
@ -94,9 +97,7 @@ use crate::tsc::ResolveArgs;
|
|||
use crate::tsc::MISSING_DEPENDENCY_SPECIFIER;
|
||||
use crate::util::path::relative_specifier;
|
||||
use crate::util::path::to_percent_decoded_str;
|
||||
use crate::util::result::InfallibleResultExt;
|
||||
use crate::util::v8::convert;
|
||||
use crate::worker::create_isolate_create_params;
|
||||
|
||||
static BRACKET_ACCESSOR_RE: Lazy<Regex> =
|
||||
lazy_regex!(r#"^\[['"](.+)[\['"]\]$"#);
|
||||
|
@ -4340,7 +4341,9 @@ impl TscSpecifierMap {
|
|||
if let Some(specifier) = self.normalized_specifiers.get(original) {
|
||||
return Ok(specifier.clone());
|
||||
}
|
||||
let specifier_str = original.replace(".d.ts.d.ts", ".d.ts");
|
||||
let specifier_str = original
|
||||
.replace(".d.ts.d.ts", ".d.ts")
|
||||
.replace("$node_modules", "node_modules");
|
||||
let specifier = match ModuleSpecifier::parse(&specifier_str) {
|
||||
Ok(s) => s,
|
||||
Err(err) => return Err(err),
|
||||
|
@ -4695,7 +4698,24 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
|
|||
.graph_imports_by_referrer(scope)
|
||||
{
|
||||
for specifier in specifiers {
|
||||
script_names.insert(specifier.to_string());
|
||||
if let Ok(req_ref) =
|
||||
deno_semver::npm::NpmPackageReqReference::from_specifier(specifier)
|
||||
{
|
||||
let Some((resolved, _)) =
|
||||
state.state_snapshot.resolver.npm_to_file_url(
|
||||
&req_ref,
|
||||
scope,
|
||||
ResolutionMode::Import,
|
||||
Some(scope),
|
||||
)
|
||||
else {
|
||||
lsp_log!("failed to resolve {req_ref} to file URL");
|
||||
continue;
|
||||
};
|
||||
script_names.insert(resolved.to_string());
|
||||
} else {
|
||||
script_names.insert(specifier.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6245,7 +6265,40 @@ mod tests {
|
|||
"kind": "keyword"
|
||||
}
|
||||
],
|
||||
"documentation": []
|
||||
"documentation": [
|
||||
{
|
||||
"text": "Outputs a message to the console",
|
||||
"kind": "text",
|
||||
},
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"name": "param",
|
||||
"text": [
|
||||
{
|
||||
"text": "data",
|
||||
"kind": "parameterName",
|
||||
},
|
||||
{
|
||||
"text": " ",
|
||||
"kind": "space",
|
||||
},
|
||||
{
|
||||
"text": "Values to be printed to the console",
|
||||
"kind": "text",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "example",
|
||||
"text": [
|
||||
{
|
||||
"text": "```ts\nconsole.log('Hello', 'World', 123);\n```",
|
||||
"kind": "text",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
|
|||
file_name_str.push('?');
|
||||
file_name_str.push_str(query);
|
||||
}
|
||||
crate::util::checksum::gen(&[file_name_str.as_bytes()])
|
||||
deno_lib::util::checksum::gen(&[file_name_str.as_bytes()])
|
||||
}
|
||||
|
||||
fn to_deno_uri(specifier: &Url) -> String {
|
||||
|
|
50
cli/main.rs
50
cli/main.rs
|
@ -17,16 +17,18 @@ mod node;
|
|||
mod npm;
|
||||
mod ops;
|
||||
mod resolver;
|
||||
mod shared;
|
||||
mod standalone;
|
||||
mod sys;
|
||||
mod task_runner;
|
||||
mod tools;
|
||||
mod tsc;
|
||||
mod util;
|
||||
mod version;
|
||||
mod worker;
|
||||
|
||||
pub mod sys {
|
||||
#[allow(clippy::disallowed_types)] // ok, definition
|
||||
pub type CliSys = sys_traits::impls::RealSys;
|
||||
}
|
||||
|
||||
use std::env;
|
||||
use std::future::Future;
|
||||
use std::io::IsTerminal;
|
||||
|
@ -40,18 +42,22 @@ use deno_core::error::AnyError;
|
|||
use deno_core::error::CoreError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::unsync::JoinHandle;
|
||||
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
||||
use deno_telemetry::OtelConfig;
|
||||
use deno_terminal::colors;
|
||||
use factory::CliFactory;
|
||||
use standalone::MODULE_NOT_FOUND;
|
||||
use standalone::UNSUPPORTED_SCHEME;
|
||||
|
||||
const MODULE_NOT_FOUND: &str = "Module not found";
|
||||
const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
|
||||
|
||||
use self::npm::ResolveSnapshotError;
|
||||
use self::util::draw_thread::DrawThread;
|
||||
use crate::args::flags_from_vec;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
|
@ -201,7 +207,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
match result {
|
||||
Ok(v) => Ok(v),
|
||||
Err(script_err) => {
|
||||
if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = util::result::any_and_jserrorbox_downcast_ref::<ResolvePkgFolderFromDenoReqError>(&script_err) {
|
||||
if let Some(worker::CreateCustomWorkerError::ResolvePkgFolderFromDenoReq(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_)))) = any_and_jserrorbox_downcast_ref::<worker::CreateCustomWorkerError>(&script_err) {
|
||||
if flags.node_modules_dir.is_none() {
|
||||
let mut flags = flags.deref().clone();
|
||||
let watch = match &flags.subcommand {
|
||||
|
@ -351,7 +357,7 @@ fn setup_panic_hook() {
|
|||
eprintln!("var set and include the backtrace in your report.");
|
||||
eprintln!();
|
||||
eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH);
|
||||
eprintln!("Version: {}", version::DENO_VERSION_INFO.deno);
|
||||
eprintln!("Version: {}", deno_lib::version::DENO_VERSION_INFO.deno);
|
||||
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
|
||||
eprintln!();
|
||||
orig_hook(panic_info);
|
||||
|
@ -373,13 +379,11 @@ fn exit_for_error(error: AnyError) -> ! {
|
|||
let mut error_code = 1;
|
||||
|
||||
if let Some(CoreError::Js(e)) =
|
||||
util::result::any_and_jserrorbox_downcast_ref::<CoreError>(&error)
|
||||
any_and_jserrorbox_downcast_ref::<CoreError>(&error)
|
||||
{
|
||||
error_string = format_js_error(e);
|
||||
} else if let Some(e @ ResolveSnapshotError { .. }) =
|
||||
util::result::any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(
|
||||
&error,
|
||||
)
|
||||
any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(&error)
|
||||
{
|
||||
if let Some(e) = e.maybe_integrity_check_error() {
|
||||
error_string = e.to_string();
|
||||
|
@ -442,19 +446,19 @@ fn resolve_flags_and_init(
|
|||
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
|
||||
{
|
||||
// Ignore results to avoid BrokenPipe errors.
|
||||
util::logger::init(None, None);
|
||||
init_logging(None, None);
|
||||
let _ = err.print();
|
||||
deno_runtime::exit(0);
|
||||
}
|
||||
Err(err) => {
|
||||
util::logger::init(None, None);
|
||||
init_logging(None, None);
|
||||
exit_for_error(AnyError::from(err))
|
||||
}
|
||||
};
|
||||
|
||||
let otel_config = flags.otel_config();
|
||||
deno_telemetry::init(crate::args::otel_runtime_config(), &otel_config)?;
|
||||
util::logger::init(flags.log_level, Some(otel_config));
|
||||
deno_telemetry::init(deno_lib::version::otel_runtime_config(), &otel_config)?;
|
||||
init_logging(flags.log_level, Some(otel_config));
|
||||
|
||||
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
|
||||
if flags.unstable_config.legacy_flag_enabled {
|
||||
|
@ -487,3 +491,19 @@ fn resolve_flags_and_init(
|
|||
|
||||
Ok(flags)
|
||||
}
|
||||
|
||||
fn init_logging(
|
||||
maybe_level: Option<log::Level>,
|
||||
otel_config: Option<OtelConfig>,
|
||||
) {
|
||||
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
|
||||
maybe_level,
|
||||
otel_config,
|
||||
// it was considered to hold the draw thread's internal lock
|
||||
// across logging, but if outputting to stderr blocks then that
|
||||
// could potentially block other threads that access the draw
|
||||
// thread's state
|
||||
on_log_start: DrawThread::hide,
|
||||
on_log_end: DrawThread::show,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -13,8 +13,6 @@ use std::sync::Arc;
|
|||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleKind;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::ModuleLoaderError;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
|
@ -39,10 +37,19 @@ use deno_graph::ModuleGraph;
|
|||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::Resolution;
|
||||
use deno_graph::WasmModule;
|
||||
use deno_lib::loader::ModuleCodeStringSource;
|
||||
use deno_lib::loader::NotSupportedKindInNpmError;
|
||||
use deno_lib::loader::NpmModuleLoadError;
|
||||
use deno_lib::npm::NpmRegistryReadPermissionChecker;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_lib::worker::CreateModuleLoaderResult;
|
||||
use deno_lib::worker::ModuleLoaderFactory;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_node::create_host_defined_options;
|
||||
use deno_runtime::deno_node::ops::require::UnableToGetCwdError;
|
||||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
|
@ -57,7 +64,6 @@ use crate::args::CliOptions;
|
|||
use crate::args::DenoSubcommand;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::cache::CodeCache;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::graph_container::MainModuleGraphContainer;
|
||||
|
@ -67,16 +73,13 @@ use crate::graph_util::enhance_graph_error;
|
|||
use crate::graph_util::CreateGraphOptions;
|
||||
use crate::graph_util::EnhanceGraphErrorMode;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::node::CliCjsCodeAnalyzer;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::node::CliNodeResolver;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::NpmRegistryReadPermissionChecker;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
use crate::resolver::CliNpmReqResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::ModuleCodeStringSource;
|
||||
use crate::resolver::NotSupportedKindInNpmError;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::sys::CliSys;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::CheckError;
|
||||
|
@ -84,8 +87,14 @@ use crate::tools::check::TypeChecker;
|
|||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::text_encoding::code_without_source_map;
|
||||
use crate::util::text_encoding::source_map_from_code;
|
||||
use crate::worker::CreateModuleLoaderResult;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
|
||||
pub type CliNpmModuleLoader = deno_lib::loader::NpmModuleLoader<
|
||||
CliCjsCodeAnalyzer,
|
||||
DenoInNpmPackageChecker,
|
||||
RealIsBuiltInNodeModuleChecker,
|
||||
CliNpmResolver,
|
||||
CliSys,
|
||||
>;
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum PrepareModuleLoadError {
|
||||
|
@ -99,6 +108,11 @@ pub enum PrepareModuleLoadError {
|
|||
Check(#[from] CheckError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
AtomicWriteFileWithRetries(
|
||||
#[from] crate::args::AtomicWriteFileWithRetriesError,
|
||||
),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Other(#[from] JsErrorBox),
|
||||
}
|
||||
|
||||
|
@ -242,8 +256,9 @@ struct SharedCliModuleLoaderState {
|
|||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
|
||||
npm_module_loader: CliNpmModuleLoader,
|
||||
npm_registry_permission_checker:
|
||||
Arc<NpmRegistryReadPermissionChecker<CliSys>>,
|
||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||
npm_resolver: CliNpmResolver,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
|
@ -303,8 +318,10 @@ impl CliModuleLoaderFactory {
|
|||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
|
||||
npm_module_loader: CliNpmModuleLoader,
|
||||
npm_registry_permission_checker: Arc<
|
||||
NpmRegistryReadPermissionChecker<CliSys>,
|
||||
>,
|
||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||
npm_resolver: CliNpmResolver,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
|
@ -416,6 +433,55 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum LoadCodeSourceError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NpmModuleLoad(NpmModuleLoadError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
LoadPreparedModule(#[from] LoadPreparedModuleError),
|
||||
#[class(generic)]
|
||||
#[error("Loading unprepared module: {}{}", .specifier, .maybe_referrer.as_ref().map(|r| format!(", imported from: {}", r)).unwrap_or_default())]
|
||||
LoadUnpreparedModule {
|
||||
specifier: ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum LoadPreparedModuleError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
LoadMaybeCjs(#[from] LoadMaybeCjsError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Other(#[from] JsErrorBox),
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum LoadMaybeCjsError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
#[class(inherit)]
|
||||
#[error("Could not resolve '{reference}'")]
|
||||
pub struct CouldNotResolveError {
|
||||
reference: deno_semver::npm::NpmPackageNvReference,
|
||||
#[source]
|
||||
#[inherit]
|
||||
source: node_resolver::errors::PackageSubpathResolveError,
|
||||
}
|
||||
|
||||
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
|
||||
lib: TsTypeLib,
|
||||
is_worker: bool,
|
||||
|
@ -440,7 +506,10 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
requested_module_type: RequestedModuleType,
|
||||
) -> Result<ModuleSource, ModuleLoaderError> {
|
||||
let code_source = self.load_code_source(specifier, maybe_referrer).await?;
|
||||
let code_source = self
|
||||
.load_code_source(specifier, maybe_referrer)
|
||||
.await
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let code = if self.shared.is_inspecting
|
||||
|| code_source.media_type == MediaType::Wasm
|
||||
{
|
||||
|
@ -501,7 +570,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<ModuleCodeStringSource, AnyError> {
|
||||
) -> Result<ModuleCodeStringSource, LoadCodeSourceError> {
|
||||
if let Some(code_source) = self.load_prepared_module(specifier).await? {
|
||||
return Ok(code_source);
|
||||
}
|
||||
|
@ -510,14 +579,14 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
.shared
|
||||
.npm_module_loader
|
||||
.load(specifier, maybe_referrer)
|
||||
.await;
|
||||
.await
|
||||
.map_err(LoadCodeSourceError::NpmModuleLoad);
|
||||
}
|
||||
|
||||
let mut msg = format!("Loading unprepared module: {specifier}");
|
||||
if let Some(referrer) = maybe_referrer {
|
||||
msg = format!("{}, imported from: {}", msg, referrer.as_str());
|
||||
}
|
||||
Err(anyhow!(msg))
|
||||
Err(LoadCodeSourceError::LoadUnpreparedModule {
|
||||
specifier: specifier.clone(),
|
||||
maybe_referrer: maybe_referrer.cloned(),
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_referrer(
|
||||
|
@ -540,7 +609,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
.map_err(|e| e.into())
|
||||
} else {
|
||||
// this cwd check is slow, so try to avoid it
|
||||
let cwd = std::env::current_dir().context("Unable to get CWD")?;
|
||||
let cwd = std::env::current_dir()
|
||||
.map_err(|e| JsErrorBox::from_err(UnableToGetCwdError(e)))?;
|
||||
deno_core::resolve_path(referrer, &cwd).map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
@ -619,8 +689,11 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
ResolutionMode::Import,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
.map_err(|source| {
|
||||
JsErrorBox::from_err(CouldNotResolveError {
|
||||
reference: module.nv_reference.clone(),
|
||||
source,
|
||||
})
|
||||
})?
|
||||
}
|
||||
Some(Module::Node(module)) => module.specifier.clone(),
|
||||
|
@ -641,7 +714,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
async fn load_prepared_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<ModuleCodeStringSource>, AnyError> {
|
||||
) -> Result<Option<ModuleCodeStringSource>, LoadPreparedModuleError> {
|
||||
// Note: keep this in sync with the sync version below
|
||||
let graph = self.graph_container.graph();
|
||||
match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
|
||||
|
@ -673,7 +746,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
}) => self
|
||||
.load_maybe_cjs(specifier, media_type, source)
|
||||
.await
|
||||
.map(Some),
|
||||
.map(Some)
|
||||
.map_err(LoadPreparedModuleError::LoadMaybeCjs),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
@ -834,7 +908,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
original_source: &Arc<str>,
|
||||
) -> Result<ModuleCodeStringSource, AnyError> {
|
||||
) -> Result<ModuleCodeStringSource, LoadMaybeCjsError> {
|
||||
let js_source = if media_type.is_emittable() {
|
||||
Cow::Owned(
|
||||
self
|
||||
|
@ -1145,7 +1219,8 @@ struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
|
|||
sys: CliSys,
|
||||
graph_container: TGraphContainer,
|
||||
in_npm_pkg_checker: DenoInNpmPackageChecker,
|
||||
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
|
||||
npm_registry_permission_checker:
|
||||
Arc<NpmRegistryReadPermissionChecker<CliSys>>,
|
||||
}
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
||||
|
|
32
cli/node.rs
32
cli/node.rs
|
@ -5,7 +5,7 @@ use std::sync::Arc;
|
|||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_graph::ParsedSourceStore;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_runtime::deno_fs;
|
||||
|
@ -75,7 +75,7 @@ impl CliCjsCodeAnalyzer {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Result<CliCjsAnalysis, AnyError> {
|
||||
) -> Result<CliCjsAnalysis, JsErrorBox> {
|
||||
let source_hash = CacheDBHash::from_hashable(source);
|
||||
if let Some(analysis) =
|
||||
self.cache.get_cjs_analysis(specifier.as_str(), source_hash)
|
||||
|
@ -92,7 +92,9 @@ impl CliCjsCodeAnalyzer {
|
|||
}
|
||||
|
||||
let cjs_tracker = self.cjs_tracker.clone();
|
||||
let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
|
||||
let is_maybe_cjs = cjs_tracker
|
||||
.is_maybe_cjs(specifier, media_type)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let analysis = if is_maybe_cjs {
|
||||
let maybe_parsed_source = self
|
||||
.parsed_source_cache
|
||||
|
@ -102,9 +104,10 @@ impl CliCjsCodeAnalyzer {
|
|||
deno_core::unsync::spawn_blocking({
|
||||
let specifier = specifier.clone();
|
||||
let source: Arc<str> = source.into();
|
||||
move || -> Result<_, AnyError> {
|
||||
let parsed_source =
|
||||
maybe_parsed_source.map(Ok).unwrap_or_else(|| {
|
||||
move || -> Result<_, JsErrorBox> {
|
||||
let parsed_source = maybe_parsed_source
|
||||
.map(Ok)
|
||||
.unwrap_or_else(|| {
|
||||
deno_ast::parse_program(deno_ast::ParseParams {
|
||||
specifier,
|
||||
text: source,
|
||||
|
@ -113,13 +116,16 @@ impl CliCjsCodeAnalyzer {
|
|||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})
|
||||
})?;
|
||||
})
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let is_script = parsed_source.compute_is_script();
|
||||
let is_cjs = cjs_tracker.is_cjs_with_known_is_script(
|
||||
parsed_source.specifier(),
|
||||
media_type,
|
||||
is_script,
|
||||
)?;
|
||||
let is_cjs = cjs_tracker
|
||||
.is_cjs_with_known_is_script(
|
||||
parsed_source.specifier(),
|
||||
media_type,
|
||||
is_script,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
if is_cjs {
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
Ok(CliCjsAnalysis::Cjs {
|
||||
|
@ -151,7 +157,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: Option<Cow<'a, str>>,
|
||||
) -> Result<ExtNodeCjsAnalysis<'a>, AnyError> {
|
||||
) -> Result<ExtNodeCjsAnalysis<'a>, JsErrorBox> {
|
||||
let source = match source {
|
||||
Some(source) => source,
|
||||
None => {
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::serde_json;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
pub type CliByonmNpmResolverCreateOptions =
|
||||
ByonmNpmResolverCreateOptions<CliSys>;
|
||||
pub type CliByonmNpmResolver = ByonmNpmResolver<CliSys>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliByonmNpmProcessStateProvider(pub Arc<CliByonmNpmResolver>);
|
||||
|
||||
impl NpmProcessStateProvider for CliByonmNpmProcessStateProvider {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: self
|
||||
.0
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
|
@ -220,7 +220,7 @@ impl<'a> LifecycleScripts<'a> {
|
|||
get_package_path,
|
||||
);
|
||||
let init_cwd = &self.config.initial_cwd;
|
||||
let process_state = crate::npm::managed::npm_process_state(
|
||||
let process_state = deno_lib::npm::npm_process_state(
|
||||
snapshot.as_valid_serialized(),
|
||||
Some(root_node_modules_dir_path),
|
||||
);
|
||||
|
@ -240,7 +240,7 @@ impl<'a> LifecycleScripts<'a> {
|
|||
// However, if we concurrently run scripts in the future we will
|
||||
// have to have multiple temp files.
|
||||
let temp_file_fd =
|
||||
deno_runtime::ops::process::npm_process_state_tempfile(
|
||||
deno_runtime::deno_process::npm_process_state_tempfile(
|
||||
process_state.as_bytes(),
|
||||
)
|
||||
.map_err(LifecycleScriptsError::CreateNpmProcessState)?;
|
||||
|
@ -248,7 +248,7 @@ impl<'a> LifecycleScripts<'a> {
|
|||
let _temp_file =
|
||||
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
|
||||
env_vars.insert(
|
||||
deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
|
||||
deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
|
||||
.to_string(),
|
||||
(temp_file_fd as usize).to_string(),
|
||||
);
|
||||
|
|
|
@ -9,6 +9,7 @@ use async_trait::async_trait;
|
|||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_resolver::npm::managed::NpmResolutionCell;
|
||||
|
@ -17,7 +18,6 @@ use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
|
|||
use super::common::NpmPackageFsInstaller;
|
||||
use super::PackageCaching;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::colors;
|
||||
use crate::npm::CliNpmCache;
|
||||
use crate::npm::CliNpmTarballCache;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_error::JsError;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_npm::registry::NpmRegistryApi;
|
||||
|
@ -13,14 +11,10 @@ use deno_npm::resolution::NpmResolutionSnapshot;
|
|||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
|
||||
use deno_resolver::npm::managed::NpmResolutionCell;
|
||||
use deno_resolver::npm::ManagedNpmResolverRc;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use thiserror::Error;
|
||||
|
||||
use super::CliNpmRegistryInfoProvider;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::sys::CliSys;
|
||||
|
||||
pub type CliManagedNpmResolverCreateOptions =
|
||||
|
@ -207,27 +201,3 @@ async fn snapshot_from_lockfile(
|
|||
.await?;
|
||||
Ok(snapshot)
|
||||
}
|
||||
|
||||
pub fn npm_process_state(
|
||||
snapshot: ValidSerializedNpmResolutionSnapshot,
|
||||
node_modules_path: Option<&Path>,
|
||||
) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
|
||||
local_node_modules_path: node_modules_path
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliManagedNpmProcessStateProvider(pub ManagedNpmResolverRc<CliSys>);
|
||||
|
||||
impl NpmProcessStateProvider for CliManagedNpmProcessStateProvider {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
npm_process_state(
|
||||
self.0.resolution().serialized_valid_snapshot(),
|
||||
self.0.root_node_modules_path(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
mod byonm;
|
||||
pub mod installer;
|
||||
mod managed;
|
||||
mod permission_checker;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -11,21 +9,19 @@ use dashmap::DashMap;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_runtime::ops::process::NpmProcessStateProviderRc;
|
||||
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use http::HeaderName;
|
||||
use http::HeaderValue;
|
||||
|
||||
pub use self::byonm::CliByonmNpmResolverCreateOptions;
|
||||
pub use self::managed::CliManagedNpmResolverCreateOptions;
|
||||
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
||||
pub use self::managed::NpmResolutionInitializer;
|
||||
pub use self::managed::ResolveSnapshotError;
|
||||
pub use self::permission_checker::NpmRegistryReadPermissionChecker;
|
||||
pub use self::permission_checker::NpmRegistryReadPermissionCheckerMode;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::sys::CliSys;
|
||||
|
@ -40,6 +36,8 @@ pub type CliNpmResolver = deno_resolver::npm::NpmResolver<CliSys>;
|
|||
pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver<CliSys>;
|
||||
pub type CliNpmResolverCreateOptions =
|
||||
deno_resolver::npm::NpmResolverCreateOptions<CliSys>;
|
||||
pub type CliByonmNpmResolverCreateOptions =
|
||||
ByonmNpmResolverCreateOptions<CliSys>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNpmCacheHttpClient {
|
||||
|
@ -59,19 +57,6 @@ impl CliNpmCacheHttpClient {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn create_npm_process_state_provider(
|
||||
npm_resolver: &CliNpmResolver,
|
||||
) -> NpmProcessStateProviderRc {
|
||||
match npm_resolver {
|
||||
CliNpmResolver::Byonm(byonm_npm_resolver) => Arc::new(
|
||||
byonm::CliByonmNpmProcessStateProvider(byonm_npm_resolver.clone()),
|
||||
),
|
||||
CliNpmResolver::Managed(managed_npm_resolver) => Arc::new(
|
||||
managed::CliManagedNpmProcessStateProvider(managed_npm_resolver.clone()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
|
||||
async fn download_with_retries_on_any_tokio_runtime(
|
||||
|
@ -198,8 +183,8 @@ pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
|
|||
pub fn get_npm_config_user_agent() -> String {
|
||||
format!(
|
||||
"deno/{} npm/? deno/{} {} {}",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
DENO_VERSION_INFO.deno,
|
||||
DENO_VERSION_INFO.deno,
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
)
|
||||
|
|
122
cli/resolver.rs
122
cli/resolver.rs
|
@ -1,17 +1,11 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use dashmap::DashSet;
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::workspace::MappedResolutionDiagnostic;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_graph::source::ResolveError;
|
||||
|
@ -23,23 +17,19 @@ use deno_resolver::npm::DenoInNpmPackageChecker;
|
|||
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolver;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::is_builtin_node_module;
|
||||
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::NodeResolutionKind;
|
||||
use node_resolver::ResolutionMode;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::args::NpmCachingStrategy;
|
||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::installer::NpmInstaller;
|
||||
use crate::npm::installer::PackageCaching;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::sys::CliSys;
|
||||
use crate::util::sync::AtomicFlag;
|
||||
use crate::util::text_encoding::from_utf8_lossy_cow;
|
||||
|
||||
pub type CliCjsTracker =
|
||||
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, CliSys>;
|
||||
|
@ -62,118 +52,6 @@ pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver<
|
|||
CliSys,
|
||||
>;
|
||||
|
||||
pub struct ModuleCodeStringSource {
|
||||
pub code: ModuleSourceCode,
|
||||
pub found_url: ModuleSpecifier,
|
||||
pub media_type: MediaType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
#[class(type)]
|
||||
#[error("{media_type} files are not supported in npm packages: {specifier}")]
|
||||
pub struct NotSupportedKindInNpmError {
|
||||
pub media_type: MediaType,
|
||||
pub specifier: Url,
|
||||
}
|
||||
|
||||
// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone)
|
||||
#[derive(Clone)]
|
||||
pub struct NpmModuleLoader {
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
}
|
||||
|
||||
impl NpmModuleLoader {
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CliCjsTracker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
node_code_translator,
|
||||
fs,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<ModuleCodeStringSource, AnyError> {
|
||||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = self
|
||||
.fs
|
||||
.read_file_async(file_path.clone(), None)
|
||||
.await
|
||||
.map_err(AnyError::from)
|
||||
.with_context(|| {
|
||||
if file_path.is_dir() {
|
||||
// directory imports are not allowed when importing from an
|
||||
// ES module, so provide the user with a helpful error message
|
||||
let dir_path = file_path;
|
||||
let mut msg = "Directory import ".to_string();
|
||||
msg.push_str(&dir_path.to_string_lossy());
|
||||
if let Some(referrer) = &maybe_referrer {
|
||||
msg.push_str(" is not supported resolving import from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
let entrypoint_name = ["index.mjs", "index.js", "index.cjs"]
|
||||
.iter()
|
||||
.find(|e| dir_path.join(e).is_file());
|
||||
if let Some(entrypoint_name) = entrypoint_name {
|
||||
msg.push_str("\nDid you mean to import ");
|
||||
msg.push_str(entrypoint_name);
|
||||
msg.push_str(" within the directory?");
|
||||
}
|
||||
}
|
||||
msg
|
||||
} else {
|
||||
let mut msg = "Unable to load ".to_string();
|
||||
msg.push_str(&file_path.to_string_lossy());
|
||||
if let Some(referrer) = &maybe_referrer {
|
||||
msg.push_str(" imported from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
}
|
||||
msg
|
||||
}
|
||||
})?;
|
||||
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
if media_type.is_emittable() {
|
||||
return Err(AnyError::from(NotSupportedKindInNpmError {
|
||||
media_type,
|
||||
specifier: specifier.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
let code = from_utf8_lossy_cow(code);
|
||||
ModuleSourceCode::String(
|
||||
self
|
||||
.node_code_translator
|
||||
.translate_cjs_to_esm(specifier, Some(code))
|
||||
.await?
|
||||
.into_owned()
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
// esm and json code is untouched
|
||||
ModuleSourceCode::Bytes(match code {
|
||||
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
|
||||
Cow::Borrowed(bytes) => bytes.into(),
|
||||
})
|
||||
};
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
code,
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct FoundPackageJsonDepFlag(AtomicFlag);
|
||||
|
||||
|
|
63
cli/rt/Cargo.toml
Normal file
63
cli/rt/Cargo.toml
Normal file
|
@ -0,0 +1,63 @@
|
|||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "denort"
|
||||
version = "2.1.5"
|
||||
authors.workspace = true
|
||||
default-run = "denort"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
publish = false
|
||||
repository.workspace = true
|
||||
description = "Provides the denort executable"
|
||||
|
||||
[[bin]]
|
||||
name = "denort"
|
||||
path = "main.rs"
|
||||
doc = false
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
path = "integration_tests_runner.rs"
|
||||
harness = false
|
||||
|
||||
[build-dependencies]
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
|
||||
[dependencies]
|
||||
deno_cache_dir.workspace = true
|
||||
deno_config.workspace = true
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_error.workspace = true
|
||||
deno_lib.workspace = true
|
||||
deno_media_type = { workspace = true, features = ["data_url", "decoding"] }
|
||||
deno_npm.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver = { workspace = true, features = ["sync"] }
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_snapshots.workspace = true
|
||||
deno_terminal.workspace = true
|
||||
libsui = "0.5.0"
|
||||
node_resolver.workspace = true
|
||||
|
||||
async-trait.workspace = true
|
||||
bincode = "=1.3.3"
|
||||
import_map = { version = "=0.21.0", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
log = { workspace = true, features = ["serde"] }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] }
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
tokio-util.workspace = true
|
||||
twox-hash.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions.workspace = true
|
||||
sys_traits = { workspace = true, features = ["memory"] }
|
||||
test_util.workspace = true
|
682
cli/rt/binary.rs
Normal file
682
cli/rt/binary.rs
Normal file
|
@ -0,0 +1,682 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsString;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::FastString;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleType;
|
||||
use deno_error::JsError;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::standalone::binary::DenoRtDeserializable;
|
||||
use deno_lib::standalone::binary::Metadata;
|
||||
use deno_lib::standalone::binary::RemoteModuleEntry;
|
||||
use deno_lib::standalone::binary::SpecifierDataStore;
|
||||
use deno_lib::standalone::binary::SpecifierId;
|
||||
use deno_lib::standalone::binary::MAGIC_BYTES;
|
||||
use deno_lib::standalone::virtual_fs::VirtualDirectory;
|
||||
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
|
||||
use deno_media_type::MediaType;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::StackString;
|
||||
use indexmap::IndexMap;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::file_system::FileBackedVfs;
|
||||
use crate::file_system::VfsRoot;
|
||||
|
||||
pub struct StandaloneData {
|
||||
pub metadata: Metadata,
|
||||
pub modules: Arc<StandaloneModules>,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub root_path: PathBuf,
|
||||
pub vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
/// This function will try to run this binary as a standalone binary
|
||||
/// produced by `deno compile`. It determines if this is a standalone
|
||||
/// binary by skipping over the trailer width at the end of the file,
|
||||
/// then checking for the magic trailer string `d3n0l4nd`. If found,
|
||||
/// the bundle is executed. If not, this function exits with `Ok(None)`.
|
||||
pub fn extract_standalone(
|
||||
cli_args: Cow<Vec<OsString>>,
|
||||
) -> Result<Option<StandaloneData>, AnyError> {
|
||||
let Some(data) = libsui::find_section("d3n0l4nd") else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let root_path = {
|
||||
let maybe_current_exe = std::env::current_exe().ok();
|
||||
let current_exe_name = maybe_current_exe
|
||||
.as_ref()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|p| p.to_string_lossy())
|
||||
// should never happen
|
||||
.unwrap_or_else(|| Cow::Borrowed("binary"));
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
|
||||
};
|
||||
let root_url = deno_path_util::url_from_directory_path(&root_path)?;
|
||||
|
||||
let DeserializedDataSection {
|
||||
mut metadata,
|
||||
npm_snapshot,
|
||||
modules_store: remote_modules,
|
||||
vfs_root_entries,
|
||||
vfs_files_data,
|
||||
} = match deserialize_binary_data_section(&root_url, data)? {
|
||||
Some(data_section) => data_section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let cli_args = cli_args.into_owned();
|
||||
metadata.argv.reserve(cli_args.len() - 1);
|
||||
for arg in cli_args.into_iter().skip(1) {
|
||||
metadata.argv.push(arg.into_string().unwrap());
|
||||
}
|
||||
let vfs = {
|
||||
let fs_root = VfsRoot {
|
||||
dir: VirtualDirectory {
|
||||
// align the name of the directory with the root dir
|
||||
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
entries: vfs_root_entries,
|
||||
},
|
||||
root_path: root_path.clone(),
|
||||
start_file_offset: 0,
|
||||
};
|
||||
Arc::new(FileBackedVfs::new(
|
||||
Cow::Borrowed(vfs_files_data),
|
||||
fs_root,
|
||||
metadata.vfs_case_sensitivity,
|
||||
))
|
||||
};
|
||||
Ok(Some(StandaloneData {
|
||||
metadata,
|
||||
modules: Arc::new(StandaloneModules {
|
||||
modules: remote_modules,
|
||||
vfs: vfs.clone(),
|
||||
}),
|
||||
npm_snapshot,
|
||||
root_path,
|
||||
vfs,
|
||||
}))
|
||||
}
|
||||
|
||||
pub struct DeserializedDataSection {
|
||||
pub metadata: Metadata,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub modules_store: RemoteModulesStore,
|
||||
pub vfs_root_entries: VirtualDirectoryEntries,
|
||||
pub vfs_files_data: &'static [u8],
|
||||
}
|
||||
|
||||
pub fn deserialize_binary_data_section(
|
||||
root_dir_url: &Url,
|
||||
data: &'static [u8],
|
||||
) -> Result<Option<DeserializedDataSection>, AnyError> {
|
||||
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
|
||||
if input.len() < MAGIC_BYTES.len() {
|
||||
bail!("Unexpected end of data. Could not find magic bytes.");
|
||||
}
|
||||
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
|
||||
if magic_bytes != MAGIC_BYTES {
|
||||
return Ok((input, false));
|
||||
}
|
||||
Ok((input, true))
|
||||
}
|
||||
|
||||
let (input, found) = read_magic_bytes(data)?;
|
||||
if !found {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// 1. Metadata
|
||||
let (input, data) =
|
||||
read_bytes_with_u64_len(input).context("reading metadata")?;
|
||||
let metadata: Metadata =
|
||||
serde_json::from_slice(data).context("deserializing metadata")?;
|
||||
// 2. Npm snapshot
|
||||
let (input, data) =
|
||||
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
|
||||
let npm_snapshot = if data.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
|
||||
};
|
||||
// 3. Specifiers
|
||||
let (input, specifiers_store) =
|
||||
SpecifierStore::deserialize(root_dir_url, input)
|
||||
.context("deserializing specifiers")?;
|
||||
// 4. Redirects
|
||||
let (input, redirects_store) =
|
||||
SpecifierDataStore::<SpecifierId>::deserialize(input)
|
||||
.context("deserializing redirects")?;
|
||||
// 5. Remote modules
|
||||
let (input, remote_modules_store) =
|
||||
SpecifierDataStore::<RemoteModuleEntry<'static>>::deserialize(input)
|
||||
.context("deserializing remote modules")?;
|
||||
// 6. VFS
|
||||
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
|
||||
let vfs_root_entries: VirtualDirectoryEntries =
|
||||
serde_json::from_slice(data).context("deserializing vfs data")?;
|
||||
let (input, vfs_files_data) =
|
||||
read_bytes_with_u64_len(input).context("reading vfs files data")?;
|
||||
|
||||
// finally ensure we read the magic bytes at the end
|
||||
let (_input, found) = read_magic_bytes(input)?;
|
||||
if !found {
|
||||
bail!("Could not find magic bytes at the end of the data.");
|
||||
}
|
||||
|
||||
let modules_store = RemoteModulesStore::new(
|
||||
specifiers_store,
|
||||
redirects_store,
|
||||
remote_modules_store,
|
||||
);
|
||||
|
||||
Ok(Some(DeserializedDataSection {
|
||||
metadata,
|
||||
npm_snapshot,
|
||||
modules_store,
|
||||
vfs_root_entries,
|
||||
vfs_files_data,
|
||||
}))
|
||||
}
|
||||
|
||||
struct SpecifierStore {
|
||||
data: IndexMap<Arc<Url>, SpecifierId>,
|
||||
reverse: IndexMap<SpecifierId, Arc<Url>>,
|
||||
}
|
||||
|
||||
impl SpecifierStore {
|
||||
pub fn deserialize<'a>(
|
||||
root_dir_url: &Url,
|
||||
input: &'a [u8],
|
||||
) -> std::io::Result<(&'a [u8], Self)> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let mut data = IndexMap::with_capacity(len);
|
||||
let mut reverse = IndexMap::with_capacity(len);
|
||||
let mut input = input;
|
||||
for _ in 0..len {
|
||||
let (new_input, specifier_str) = read_string_lossy(input)?;
|
||||
let specifier = match Url::parse(&specifier_str) {
|
||||
Ok(url) => url,
|
||||
Err(err) => match root_dir_url.join(&specifier_str) {
|
||||
Ok(url) => url,
|
||||
Err(_) => {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
err,
|
||||
));
|
||||
}
|
||||
},
|
||||
};
|
||||
let (new_input, id) = SpecifierId::deserialize(new_input)?;
|
||||
let specifier = Arc::new(specifier);
|
||||
data.insert(specifier.clone(), id);
|
||||
reverse.insert(id, specifier);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, Self { data, reverse }))
|
||||
}
|
||||
|
||||
pub fn get_id(&self, specifier: &Url) -> Option<SpecifierId> {
|
||||
self.data.get(specifier).cloned()
|
||||
}
|
||||
|
||||
pub fn get_specifier(&self, specifier_id: SpecifierId) -> Option<&Url> {
|
||||
self.reverse.get(&specifier_id).map(|url| url.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StandaloneModules {
|
||||
modules: RemoteModulesStore,
|
||||
vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
impl StandaloneModules {
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<&'a Url>, TooManyRedirectsError> {
|
||||
if specifier.scheme() == "file" {
|
||||
Ok(Some(specifier))
|
||||
} else {
|
||||
self.modules.resolve_specifier(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_file(&self, path: &Path) -> bool {
|
||||
self.vfs.file_entry(path).is_ok()
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<DenoCompileModuleData<'a>>, JsErrorBox> {
|
||||
if specifier.scheme() == "file" {
|
||||
let path = deno_path_util::url_to_file_path(specifier)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let mut transpiled = None;
|
||||
let mut source_map = None;
|
||||
let mut cjs_export_analysis = None;
|
||||
let bytes = match self.vfs.file_entry(&path) {
|
||||
Ok(entry) => {
|
||||
let bytes = self
|
||||
.vfs
|
||||
.read_file_all(entry)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
transpiled = entry
|
||||
.transpiled_offset
|
||||
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
|
||||
source_map = entry
|
||||
.source_map_offset
|
||||
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
|
||||
cjs_export_analysis = entry
|
||||
.cjs_export_analysis_offset
|
||||
.and_then(|t| self.vfs.read_file_offset_with_len(t).ok());
|
||||
bytes
|
||||
}
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
match RealFs.read_file_sync(&path, None) {
|
||||
Ok(bytes) => bytes,
|
||||
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
|
||||
return Ok(None)
|
||||
}
|
||||
Err(err) => return Err(JsErrorBox::from_err(err)),
|
||||
}
|
||||
}
|
||||
Err(err) => return Err(JsErrorBox::from_err(err)),
|
||||
};
|
||||
Ok(Some(DenoCompileModuleData {
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
specifier,
|
||||
data: bytes,
|
||||
transpiled,
|
||||
source_map,
|
||||
cjs_export_analysis,
|
||||
}))
|
||||
} else {
|
||||
self.modules.read(specifier).map_err(JsErrorBox::from_err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DenoCompileModuleData<'a> {
|
||||
pub specifier: &'a Url,
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'static, [u8]>,
|
||||
pub transpiled: Option<Cow<'static, [u8]>>,
|
||||
pub source_map: Option<Cow<'static, [u8]>>,
|
||||
pub cjs_export_analysis: Option<Cow<'static, [u8]>>,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileModuleData<'a> {
|
||||
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
|
||||
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
|
||||
match data {
|
||||
Cow::Borrowed(d) => DenoCompileModuleSource::String(
|
||||
// SAFETY: we know this is a valid utf8 string
|
||||
unsafe { std::str::from_utf8_unchecked(d) },
|
||||
),
|
||||
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
|
||||
}
|
||||
}
|
||||
|
||||
let data = self.transpiled.unwrap_or(self.data);
|
||||
let (media_type, source) = match self.media_type {
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::Mjs
|
||||
| MediaType::Cjs
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx => (ModuleType::JavaScript, into_string_unsafe(data)),
|
||||
MediaType::Json => (ModuleType::Json, into_string_unsafe(data)),
|
||||
MediaType::Wasm => {
|
||||
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(data))
|
||||
}
|
||||
// just assume javascript if we made it here
|
||||
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => {
|
||||
(ModuleType::JavaScript, DenoCompileModuleSource::Bytes(data))
|
||||
}
|
||||
};
|
||||
(self.specifier, media_type, source)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DenoCompileModuleSource {
|
||||
String(&'static str),
|
||||
Bytes(Cow<'static, [u8]>),
|
||||
}
|
||||
|
||||
impl DenoCompileModuleSource {
|
||||
pub fn into_for_v8(self) -> ModuleSourceCode {
|
||||
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
|
||||
ModuleSourceCode::Bytes(match data {
|
||||
Cow::Borrowed(d) => d.into(),
|
||||
Cow::Owned(d) => d.into_boxed_slice().into(),
|
||||
})
|
||||
}
|
||||
|
||||
match self {
|
||||
// todo(https://github.com/denoland/deno_core/pull/943): store whether
|
||||
// the string is ascii or not ahead of time so we can avoid the is_ascii()
|
||||
// check in FastString::from_static
|
||||
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
|
||||
Self::Bytes(b) => into_bytes(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, JsError)]
|
||||
#[class(generic)]
|
||||
#[error("Too many redirects resolving: {0}")]
|
||||
pub struct TooManyRedirectsError(Url);
|
||||
|
||||
pub struct RemoteModulesStore {
|
||||
specifiers: SpecifierStore,
|
||||
redirects: SpecifierDataStore<SpecifierId>,
|
||||
remote_modules: SpecifierDataStore<RemoteModuleEntry<'static>>,
|
||||
}
|
||||
|
||||
impl RemoteModulesStore {
|
||||
fn new(
|
||||
specifiers: SpecifierStore,
|
||||
redirects: SpecifierDataStore<SpecifierId>,
|
||||
remote_modules: SpecifierDataStore<RemoteModuleEntry<'static>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
specifiers,
|
||||
redirects,
|
||||
remote_modules,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<&'a Url>, TooManyRedirectsError> {
|
||||
let Some(mut current) = self.specifiers.get_id(specifier) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let mut count = 0;
|
||||
loop {
|
||||
if count > 10 {
|
||||
return Err(TooManyRedirectsError(specifier.clone()));
|
||||
}
|
||||
match self.redirects.get(current) {
|
||||
Some(to) => {
|
||||
current = *to;
|
||||
count += 1;
|
||||
}
|
||||
None => {
|
||||
if count == 0 {
|
||||
return Ok(Some(specifier));
|
||||
} else {
|
||||
return Ok(self.specifiers.get_specifier(current));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
original_specifier: &'a Url,
|
||||
) -> Result<Option<DenoCompileModuleData<'a>>, TooManyRedirectsError> {
|
||||
#[allow(clippy::ptr_arg)]
|
||||
fn handle_cow_ref(data: &Cow<'static, [u8]>) -> Cow<'static, [u8]> {
|
||||
match data {
|
||||
Cow::Borrowed(data) => Cow::Borrowed(data),
|
||||
Cow::Owned(data) => {
|
||||
// this variant should never happen because the data
|
||||
// should always be borrowed static in denort
|
||||
debug_assert!(false);
|
||||
Cow::Owned(data.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut count = 0;
|
||||
let Some(mut specifier) = self.specifiers.get_id(original_specifier) else {
|
||||
return Ok(None);
|
||||
};
|
||||
loop {
|
||||
if count > 10 {
|
||||
return Err(TooManyRedirectsError(original_specifier.clone()));
|
||||
}
|
||||
match self.redirects.get(specifier) {
|
||||
Some(to) => {
|
||||
specifier = *to;
|
||||
count += 1;
|
||||
}
|
||||
None => {
|
||||
let Some(entry) = self.remote_modules.get(specifier) else {
|
||||
return Ok(None);
|
||||
};
|
||||
return Ok(Some(DenoCompileModuleData {
|
||||
specifier: if count == 0 {
|
||||
original_specifier
|
||||
} else {
|
||||
self.specifiers.get_specifier(specifier).unwrap()
|
||||
},
|
||||
media_type: entry.media_type,
|
||||
data: handle_cow_ref(&entry.data),
|
||||
transpiled: entry.maybe_transpiled.as_ref().map(handle_cow_ref),
|
||||
source_map: entry.maybe_source_map.as_ref().map(handle_cow_ref),
|
||||
cjs_export_analysis: entry
|
||||
.maybe_cjs_export_analysis
|
||||
.as_ref()
|
||||
.map(handle_cow_ref),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_npm_snapshot(
|
||||
input: &[u8],
|
||||
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
|
||||
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
|
||||
let (input, id) = read_string_lossy(input)?;
|
||||
let id = NpmPackageId::from_serialized(&id)?;
|
||||
Ok((input, id))
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_root_package<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let req = PackageReq::from_str(&req)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
Ok((input, (req, id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_package_dep<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
let req = StackString::from_cow(req);
|
||||
Ok((input, (req, id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_package<'a>(
|
||||
input: &'a [u8],
|
||||
id: NpmPackageId,
|
||||
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
|
||||
let (input, deps_len) = read_u32_as_usize(input)?;
|
||||
let (input, dependencies) =
|
||||
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
|
||||
Ok((
|
||||
input,
|
||||
SerializedNpmResolutionSnapshotPackage {
|
||||
id,
|
||||
system: Default::default(),
|
||||
dist: Default::default(),
|
||||
dependencies,
|
||||
optional_dependencies: Default::default(),
|
||||
bin: None,
|
||||
scripts: Default::default(),
|
||||
deprecated: Default::default(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
let (input, packages_len) = read_u32_as_usize(input)?;
|
||||
|
||||
// get a hashmap of all the npm package ids to their serialized ids
|
||||
let (input, data_ids_to_npm_ids) =
|
||||
parse_vec_n_times(input, packages_len, parse_id)
|
||||
.context("deserializing id")?;
|
||||
let data_id_to_npm_id = |id: usize| {
|
||||
data_ids_to_npm_ids
|
||||
.get(id)
|
||||
.cloned()
|
||||
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
|
||||
};
|
||||
|
||||
let (input, root_packages_len) = read_u32_as_usize(input)?;
|
||||
let (input, root_packages) = parse_hashmap_n_times(
|
||||
input,
|
||||
root_packages_len,
|
||||
parse_root_package(&data_id_to_npm_id),
|
||||
)
|
||||
.context("deserializing root package")?;
|
||||
let (input, packages) =
|
||||
parse_vec_n_times_with_index(input, packages_len, |input, index| {
|
||||
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
|
||||
})
|
||||
.context("deserializing package")?;
|
||||
|
||||
if !input.is_empty() {
|
||||
bail!("Unexpected data left over");
|
||||
}
|
||||
|
||||
Ok(
|
||||
SerializedNpmResolutionSnapshot {
|
||||
packages,
|
||||
root_packages,
|
||||
}
|
||||
// this is ok because we have already verified that all the
|
||||
// identifiers found in the snapshot are valid via the
|
||||
// npm package id -> npm package id mapping
|
||||
.into_valid_unsafe(),
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
|
||||
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
|
||||
let mut results = HashMap::with_capacity(times);
|
||||
for _ in 0..times {
|
||||
let result = parse(input);
|
||||
let (new_input, (key, value)) = result?;
|
||||
results.insert(key, value);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times<TResult>(
|
||||
input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times_with_index<TResult>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
let mut results = Vec::with_capacity(times);
|
||||
for i in 0..times {
|
||||
let result = parse(input, i);
|
||||
let (new_input, result) = result?;
|
||||
results.push(result);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn read_bytes_with_u64_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
|
||||
let (input, len) = read_u64(input)?;
|
||||
let (input, data) = read_bytes(input, len as usize)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let (input, data) = read_bytes(input, len)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> {
|
||||
check_has_len(input, len)?;
|
||||
let (len_bytes, input) = input.split_at(len);
|
||||
Ok((input, len_bytes))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> {
|
||||
if input.len() < len {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Unexpected end of data",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_string_lossy(input: &[u8]) -> std::io::Result<(&[u8], Cow<str>)> {
|
||||
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, String::from_utf8_lossy(data_bytes)))
|
||||
}
|
||||
|
||||
fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> {
|
||||
let (input, len_bytes) = read_bytes(input, 4)?;
|
||||
let len = u32::from_le_bytes(len_bytes.try_into().unwrap());
|
||||
Ok((input, len as usize))
|
||||
}
|
||||
|
||||
fn read_u64(input: &[u8]) -> std::io::Result<(&[u8], u64)> {
|
||||
let (input, len_bytes) = read_bytes(input, 8)?;
|
||||
let len = u64::from_le_bytes(len_bytes.try_into().unwrap());
|
||||
Ok((input, len))
|
||||
}
|
11
cli/rt/build.rs
Normal file
11
cli/rt/build.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
fn main() {
|
||||
// Skip building from docs.rs.
|
||||
if std::env::var_os("DOCS_RS").is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
deno_runtime::deno_napi::print_linker_flags("denort");
|
||||
deno_runtime::deno_webgpu::print_linker_flags("denort");
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::io::BufReader;
|
||||
use std::io::BufWriter;
|
||||
|
@ -10,17 +9,15 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_path_util::get_atomic_path;
|
||||
use deno_runtime::code_cache::CodeCache;
|
||||
use deno_runtime::code_cache::CodeCacheType;
|
||||
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::worker::CliCodeCache;
|
||||
use url::Url;
|
||||
|
||||
enum CodeCacheStrategy {
|
||||
FirstRun(FirstRunCodeCacheStrategy),
|
||||
|
@ -76,12 +73,27 @@ impl DenoCompileCodeCache {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_deno_core(self: Arc<Self>) -> Arc<dyn CodeCache> {
|
||||
self.clone()
|
||||
}
|
||||
|
||||
pub fn enabled(&self) -> bool {
|
||||
match &self.strategy {
|
||||
CodeCacheStrategy::FirstRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
CodeCacheStrategy::SubsequentRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeCache for DenoCompileCodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
specifier: &Url,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
|
@ -106,7 +118,7 @@ impl CodeCache for DenoCompileCodeCache {
|
|||
|
||||
fn set_sync(
|
||||
&self,
|
||||
specifier: ModuleSpecifier,
|
||||
specifier: Url,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
bytes: &[u8],
|
||||
|
@ -152,23 +164,6 @@ impl CodeCache for DenoCompileCodeCache {
|
|||
}
|
||||
}
|
||||
|
||||
impl CliCodeCache for DenoCompileCodeCache {
|
||||
fn enabled(&self) -> bool {
|
||||
match &self.strategy {
|
||||
CodeCacheStrategy::FirstRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
CodeCacheStrategy::SubsequentRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
type CodeCacheKey = (String, CodeCacheType);
|
||||
|
||||
struct FirstRunCodeCacheData {
|
||||
|
@ -216,7 +211,7 @@ struct SubsequentRunCodeCacheStrategy {
|
|||
impl SubsequentRunCodeCacheStrategy {
|
||||
fn take_from_cache(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
specifier: &Url,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
|
@ -395,8 +390,6 @@ fn deserialize_with_reader<T: Read>(
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::fs::File;
|
||||
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
@ -463,8 +456,8 @@ mod test {
|
|||
fn code_cache() {
|
||||
let temp_dir = TempDir::new();
|
||||
let file_path = temp_dir.path().join("cache.bin").to_path_buf();
|
||||
let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap();
|
||||
let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap();
|
||||
let url1 = Url::parse("https://deno.land/example1.js").unwrap();
|
||||
let url2 = Url::parse("https://deno.land/example2.js").unwrap();
|
||||
// first run
|
||||
{
|
||||
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);
|
1713
cli/rt/file_system.rs
Normal file
1713
cli/rt/file_system.rs
Normal file
File diff suppressed because it is too large
Load diff
5
cli/rt/integration_tests_runner.rs
Normal file
5
cli/rt/integration_tests_runner.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub fn main() {
|
||||
// this file exists to cause the executable to be built when running cargo test
|
||||
}
|
|
@ -1,46 +1,27 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
// Allow unused code warnings because we share
|
||||
// code between the two bin targets.
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_imports)]
|
||||
|
||||
mod standalone;
|
||||
|
||||
mod args;
|
||||
mod cache;
|
||||
mod emit;
|
||||
mod file_fetcher;
|
||||
mod http_util;
|
||||
mod js;
|
||||
mod node;
|
||||
mod npm;
|
||||
mod resolver;
|
||||
mod shared;
|
||||
mod sys;
|
||||
mod task_runner;
|
||||
mod util;
|
||||
mod version;
|
||||
mod worker;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::env::current_exe;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::CoreError;
|
||||
use deno_core::error::JsError;
|
||||
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
|
||||
use deno_lib::version::otel_runtime_config;
|
||||
use deno_runtime::deno_telemetry::OtelConfig;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
|
||||
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
||||
use deno_terminal::colors;
|
||||
use indexmap::IndexMap;
|
||||
use standalone::DenoCompileFileSystem;
|
||||
|
||||
use crate::args::Flags;
|
||||
use crate::util::result::any_and_jserrorbox_downcast_ref;
|
||||
use self::binary::extract_standalone;
|
||||
use self::file_system::DenoRtSys;
|
||||
|
||||
mod binary;
|
||||
mod code_cache;
|
||||
mod file_system;
|
||||
mod node;
|
||||
mod run;
|
||||
|
||||
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
|
||||
log::error!(
|
||||
|
@ -87,27 +68,26 @@ fn load_env_vars(env_vars: &IndexMap<String, String>) {
|
|||
fn main() {
|
||||
deno_runtime::deno_permissions::mark_standalone();
|
||||
let args: Vec<_> = env::args_os().collect();
|
||||
let standalone = standalone::extract_standalone(Cow::Owned(args));
|
||||
let standalone = extract_standalone(Cow::Owned(args));
|
||||
let future = async move {
|
||||
match standalone {
|
||||
Ok(Some(data)) => {
|
||||
deno_telemetry::init(
|
||||
crate::args::otel_runtime_config(),
|
||||
deno_runtime::deno_telemetry::init(
|
||||
otel_runtime_config(),
|
||||
&data.metadata.otel_config,
|
||||
)?;
|
||||
util::logger::init(
|
||||
init_logging(
|
||||
data.metadata.log_level,
|
||||
Some(data.metadata.otel_config.clone()),
|
||||
);
|
||||
load_env_vars(&data.metadata.env_vars_from_env_file);
|
||||
let fs = DenoCompileFileSystem::new(data.vfs.clone());
|
||||
let sys = crate::sys::CliSys::DenoCompile(fs.clone());
|
||||
let exit_code = standalone::run(Arc::new(fs), sys, data).await?;
|
||||
let sys = DenoRtSys::new(data.vfs.clone());
|
||||
let exit_code = run::run(Arc::new(sys.clone()), sys, data).await?;
|
||||
deno_runtime::exit(exit_code);
|
||||
}
|
||||
Ok(None) => Ok(()),
|
||||
Err(err) => {
|
||||
util::logger::init(None, None);
|
||||
init_logging(None, None);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
|
@ -115,3 +95,15 @@ fn main() {
|
|||
|
||||
unwrap_or_exit(create_and_run_current_thread_with_maybe_metrics(future));
|
||||
}
|
||||
|
||||
fn init_logging(
|
||||
maybe_level: Option<log::Level>,
|
||||
otel_config: Option<OtelConfig>,
|
||||
) {
|
||||
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
|
||||
maybe_level,
|
||||
otel_config,
|
||||
on_log_start: || {},
|
||||
on_log_end: || {},
|
||||
})
|
||||
}
|
165
cli/rt/node.rs
Normal file
165
cli/rt/node.rs
Normal file
|
@ -0,0 +1,165 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::url::Url;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::loader::NpmModuleLoader;
|
||||
use deno_lib::standalone::binary::CjsExportAnalysisEntry;
|
||||
use deno_media_type::MediaType;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_resolver::npm::NpmReqResolver;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
|
||||
use node_resolver::analyze::CjsAnalysis;
|
||||
use node_resolver::analyze::CjsAnalysisExports;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
|
||||
use crate::binary::StandaloneModules;
|
||||
use crate::file_system::DenoRtSys;
|
||||
|
||||
pub type DenoRtCjsTracker =
|
||||
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, DenoRtSys>;
|
||||
pub type DenoRtNpmResolver = deno_resolver::npm::NpmResolver<DenoRtSys>;
|
||||
pub type DenoRtNpmModuleLoader = NpmModuleLoader<
|
||||
CjsCodeAnalyzer,
|
||||
DenoInNpmPackageChecker,
|
||||
RealIsBuiltInNodeModuleChecker,
|
||||
DenoRtNpmResolver,
|
||||
DenoRtSys,
|
||||
>;
|
||||
pub type DenoRtNodeCodeTranslator = NodeCodeTranslator<
|
||||
CjsCodeAnalyzer,
|
||||
DenoInNpmPackageChecker,
|
||||
RealIsBuiltInNodeModuleChecker,
|
||||
DenoRtNpmResolver,
|
||||
DenoRtSys,
|
||||
>;
|
||||
pub type DenoRtNodeResolver = deno_runtime::deno_node::NodeResolver<
|
||||
DenoInNpmPackageChecker,
|
||||
DenoRtNpmResolver,
|
||||
DenoRtSys,
|
||||
>;
|
||||
pub type DenoRtNpmReqResolver = NpmReqResolver<
|
||||
DenoInNpmPackageChecker,
|
||||
RealIsBuiltInNodeModuleChecker,
|
||||
DenoRtNpmResolver,
|
||||
DenoRtSys,
|
||||
>;
|
||||
|
||||
pub struct CjsCodeAnalyzer {
|
||||
cjs_tracker: Arc<DenoRtCjsTracker>,
|
||||
modules: Arc<StandaloneModules>,
|
||||
sys: DenoRtSys,
|
||||
}
|
||||
|
||||
impl CjsCodeAnalyzer {
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<DenoRtCjsTracker>,
|
||||
modules: Arc<StandaloneModules>,
|
||||
sys: DenoRtSys,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
modules,
|
||||
sys,
|
||||
}
|
||||
}
|
||||
|
||||
fn inner_cjs_analysis<'a>(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
source: Cow<'a, str>,
|
||||
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
if media_type == MediaType::Json {
|
||||
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
}));
|
||||
}
|
||||
|
||||
let cjs_tracker = self.cjs_tracker.clone();
|
||||
let is_maybe_cjs = cjs_tracker
|
||||
.is_maybe_cjs(specifier, media_type)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let analysis = if is_maybe_cjs {
|
||||
let data = self
|
||||
.modules
|
||||
.read(specifier)?
|
||||
.and_then(|d| d.cjs_export_analysis);
|
||||
match data {
|
||||
Some(data) => {
|
||||
let data: CjsExportAnalysisEntry = bincode::deserialize(&data)
|
||||
.map_err(|err| JsErrorBox::generic(err.to_string()))?;
|
||||
match data {
|
||||
CjsExportAnalysisEntry::Esm => {
|
||||
cjs_tracker.set_is_known_script(specifier, false);
|
||||
CjsAnalysis::Esm(source)
|
||||
}
|
||||
CjsExportAnalysisEntry::Cjs(analysis) => {
|
||||
cjs_tracker.set_is_known_script(specifier, true);
|
||||
CjsAnalysis::Cjs(analysis)
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if log::log_enabled!(log::Level::Debug) {
|
||||
if self.sys.is_specifier_in_vfs(specifier) {
|
||||
log::debug!(
|
||||
"No CJS export analysis was stored for '{}'. Assuming ESM. This might indicate a bug in Deno.",
|
||||
specifier
|
||||
);
|
||||
} else {
|
||||
log::debug!(
|
||||
"Analyzing potentially CommonJS files is not supported at runtime in a compiled executable ({}). Assuming ESM.",
|
||||
specifier
|
||||
);
|
||||
}
|
||||
}
|
||||
// assume ESM as we don't have access to swc here
|
||||
CjsAnalysis::Esm(source)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
CjsAnalysis::Esm(source)
|
||||
};
|
||||
|
||||
Ok(analysis)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl node_resolver::analyze::CjsCodeAnalyzer for CjsCodeAnalyzer {
|
||||
async fn analyze_cjs<'a>(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
source: Option<Cow<'a, str>>,
|
||||
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
|
||||
let source = match source {
|
||||
Some(source) => source,
|
||||
None => {
|
||||
if let Ok(path) = deno_path_util::url_to_file_path(specifier) {
|
||||
// todo(dsherret): should this use the sync method instead?
|
||||
if let Ok(source_from_file) =
|
||||
self.sys.read_text_file_lossy_async(path, None).await
|
||||
{
|
||||
source_from_file
|
||||
} else {
|
||||
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
}));
|
||||
}
|
||||
}
|
||||
};
|
||||
self.inner_cjs_analysis(specifier, source)
|
||||
}
|
||||
}
|
990
cli/rt/run.rs
Normal file
990
cli/rt/run.rs
Normal file
|
@ -0,0 +1,990 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::ResolverWorkspaceJsrPackage;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::ModuleLoaderError;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::FastString;
|
||||
use deno_core::FeatureChecker;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceCodeCacheInfo;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_lib::args::get_root_cert_store;
|
||||
use deno_lib::args::npm_pkg_req_ref_to_binary_command;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::args::RootCertStoreLoadError;
|
||||
use deno_lib::loader::NpmModuleLoader;
|
||||
use deno_lib::npm::create_npm_process_state_provider;
|
||||
use deno_lib::npm::NpmRegistryReadPermissionChecker;
|
||||
use deno_lib::npm::NpmRegistryReadPermissionCheckerMode;
|
||||
use deno_lib::standalone::binary::NodeModules;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_lib::util::text_encoding::from_utf8_lossy_cow;
|
||||
use deno_lib::util::text_encoding::from_utf8_lossy_owned;
|
||||
use deno_lib::util::v8::construct_v8_flags;
|
||||
use deno_lib::worker::CreateModuleLoaderResult;
|
||||
use deno_lib::worker::LibMainWorkerFactory;
|
||||
use deno_lib::worker::LibMainWorkerOptions;
|
||||
use deno_lib::worker::ModuleLoaderFactory;
|
||||
use deno_lib::worker::StorageKeyResolver;
|
||||
use deno_media_type::MediaType;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_resolver::cjs::CjsTracker;
|
||||
use deno_resolver::cjs::IsCjsResolutionMode;
|
||||
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
|
||||
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
|
||||
use deno_resolver::npm::managed::NpmResolutionCell;
|
||||
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
|
||||
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use deno_resolver::npm::NpmReqResolver;
|
||||
use deno_resolver::npm::NpmReqResolverOptions;
|
||||
use deno_resolver::npm::NpmResolver;
|
||||
use deno_resolver::npm::NpmResolverCreateOptions;
|
||||
use deno_runtime::code_cache::CodeCache;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::create_host_defined_options;
|
||||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
|
||||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::NodeResolutionKind;
|
||||
use node_resolver::NodeResolver;
|
||||
use node_resolver::PackageJsonResolver;
|
||||
use node_resolver::ResolutionMode;
|
||||
|
||||
use crate::binary::DenoCompileModuleSource;
|
||||
use crate::binary::StandaloneData;
|
||||
use crate::binary::StandaloneModules;
|
||||
use crate::code_cache::DenoCompileCodeCache;
|
||||
use crate::file_system::DenoRtSys;
|
||||
use crate::file_system::FileBackedVfs;
|
||||
use crate::node::CjsCodeAnalyzer;
|
||||
use crate::node::DenoRtCjsTracker;
|
||||
use crate::node::DenoRtNodeCodeTranslator;
|
||||
use crate::node::DenoRtNodeResolver;
|
||||
use crate::node::DenoRtNpmModuleLoader;
|
||||
use crate::node::DenoRtNpmReqResolver;
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
cjs_tracker: Arc<DenoRtCjsTracker>,
|
||||
code_cache: Option<Arc<DenoCompileCodeCache>>,
|
||||
modules: Arc<StandaloneModules>,
|
||||
node_code_translator: Arc<DenoRtNodeCodeTranslator>,
|
||||
node_resolver: Arc<DenoRtNodeResolver>,
|
||||
npm_module_loader: Arc<DenoRtNpmModuleLoader>,
|
||||
npm_registry_permission_checker: NpmRegistryReadPermissionChecker<DenoRtSys>,
|
||||
npm_req_resolver: Arc<DenoRtNpmReqResolver>,
|
||||
vfs: Arc<FileBackedVfs>,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
}
|
||||
|
||||
impl SharedModuleLoaderState {
|
||||
fn get_code_cache(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
source: &[u8],
|
||||
) -> Option<SourceCodeCacheInfo> {
|
||||
let Some(code_cache) = &self.code_cache else {
|
||||
return None;
|
||||
};
|
||||
if !code_cache.enabled() {
|
||||
return None;
|
||||
}
|
||||
// deno version is already included in the root cache key
|
||||
let hash = FastInsecureHasher::new_without_deno_version()
|
||||
.write_hashable(source)
|
||||
.finish();
|
||||
let data = code_cache.get_sync(
|
||||
specifier,
|
||||
deno_runtime::code_cache::CodeCacheType::EsModule,
|
||||
hash,
|
||||
);
|
||||
Some(SourceCodeCacheInfo {
|
||||
hash,
|
||||
data: data.map(Cow::Owned),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EmbeddedModuleLoader {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for EmbeddedModuleLoader {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("EmbeddedModuleLoader").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleLoader for EmbeddedModuleLoader {
|
||||
fn resolve(
|
||||
&self,
|
||||
raw_specifier: &str,
|
||||
referrer: &str,
|
||||
kind: ResolutionKind,
|
||||
) -> Result<Url, ModuleLoaderError> {
|
||||
let referrer = if referrer == "." {
|
||||
if kind != ResolutionKind::MainModule {
|
||||
return Err(
|
||||
JsErrorBox::generic(format!(
|
||||
"Expected to resolve main module, got {:?} instead.",
|
||||
kind
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
let current_dir = std::env::current_dir().unwrap();
|
||||
deno_core::resolve_path(".", ¤t_dir)?
|
||||
} else {
|
||||
Url::parse(referrer).map_err(|err| {
|
||||
JsErrorBox::type_error(format!(
|
||||
"Referrer uses invalid specifier: {}",
|
||||
err
|
||||
))
|
||||
})?
|
||||
};
|
||||
let referrer_kind = if self
|
||||
.shared
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))
|
||||
.map_err(JsErrorBox::from_err)?
|
||||
{
|
||||
ResolutionMode::Require
|
||||
} else {
|
||||
ResolutionMode::Import
|
||||
};
|
||||
|
||||
if self.shared.node_resolver.in_npm_package(&referrer) {
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
||||
let mapped_resolution = self
|
||||
.shared
|
||||
.workspace_resolver
|
||||
.resolve(raw_specifier, &referrer);
|
||||
|
||||
match mapped_resolution {
|
||||
Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => {
|
||||
Ok(specifier)
|
||||
}
|
||||
Ok(MappedResolution::WorkspaceNpmPackage {
|
||||
target_pkg_json: pkg_json,
|
||||
sub_path,
|
||||
..
|
||||
}) => Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?,
|
||||
),
|
||||
Ok(MappedResolution::PackageJson {
|
||||
dep_result,
|
||||
sub_path,
|
||||
alias,
|
||||
..
|
||||
}) => match dep_result
|
||||
.as_ref()
|
||||
.map_err(|e| JsErrorBox::from_err(e.clone()))?
|
||||
{
|
||||
PackageJsonDepValue::Req(req) => self
|
||||
.shared
|
||||
.npm_req_resolver
|
||||
.resolve_req_with_sub_path(
|
||||
req,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(|e| JsErrorBox::from_err(e).into()),
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
let pkg_folder = self
|
||||
.shared
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||
alias,
|
||||
version_req,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?,
|
||||
)
|
||||
}
|
||||
},
|
||||
Ok(MappedResolution::Normal { specifier, .. })
|
||||
| Ok(MappedResolution::ImportMap { specifier, .. }) => {
|
||||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.npm_req_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?,
|
||||
);
|
||||
}
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(specifier) = self
|
||||
.shared
|
||||
.modules
|
||||
.resolve_specifier(&specifier)
|
||||
.map_err(JsErrorBox::from_err)?
|
||||
{
|
||||
return Ok(specifier.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(&specifier)
|
||||
.unwrap_or(specifier),
|
||||
)
|
||||
}
|
||||
Err(err)
|
||||
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
|
||||
{
|
||||
let maybe_res = self
|
||||
.shared
|
||||
.npm_req_resolver
|
||||
.resolve_if_for_npm_pkg(
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionKind::Execution,
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
if let Some(res) = maybe_res {
|
||||
return Ok(res.into_url());
|
||||
}
|
||||
Err(JsErrorBox::from_err(err).into())
|
||||
}
|
||||
Err(err) => Err(JsErrorBox::from_err(err).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_host_defined_options<'s>(
|
||||
&self,
|
||||
scope: &mut deno_core::v8::HandleScope<'s>,
|
||||
name: &str,
|
||||
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
|
||||
let name = Url::parse(name).ok()?;
|
||||
if self.shared.node_resolver.in_npm_package(&name) {
|
||||
Some(create_host_defined_options(scope))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn load(
|
||||
&self,
|
||||
original_specifier: &Url,
|
||||
maybe_referrer: Option<&Url>,
|
||||
_is_dynamic: bool,
|
||||
_requested_module_type: RequestedModuleType,
|
||||
) -> deno_core::ModuleLoadResponse {
|
||||
if original_specifier.scheme() == "data" {
|
||||
let data_url_text =
|
||||
match deno_media_type::data_url::RawDataUrl::parse(original_specifier)
|
||||
.and_then(|url| url.decode())
|
||||
{
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
return deno_core::ModuleLoadResponse::Sync(Err(
|
||||
JsErrorBox::type_error(format!("{:#}", err)).into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
return deno_core::ModuleLoadResponse::Sync(Ok(
|
||||
deno_core::ModuleSource::new(
|
||||
deno_core::ModuleType::JavaScript,
|
||||
ModuleSourceCode::String(data_url_text.into()),
|
||||
original_specifier,
|
||||
None,
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
if self.shared.node_resolver.in_npm_package(original_specifier) {
|
||||
let shared = self.shared.clone();
|
||||
let original_specifier = original_specifier.clone();
|
||||
let maybe_referrer = maybe_referrer.cloned();
|
||||
return deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code_source = shared
|
||||
.npm_module_loader
|
||||
.load(&original_specifier, maybe_referrer.as_ref())
|
||||
.await
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let code_cache_entry = shared.get_code_cache(
|
||||
&code_source.found_url,
|
||||
code_source.code.as_bytes(),
|
||||
);
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match code_source.media_type {
|
||||
MediaType::Json => ModuleType::Json,
|
||||
_ => ModuleType::JavaScript,
|
||||
},
|
||||
code_source.code,
|
||||
&original_specifier,
|
||||
&code_source.found_url,
|
||||
code_cache_entry,
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
);
|
||||
}
|
||||
|
||||
match self.shared.modules.read(original_specifier) {
|
||||
Ok(Some(module)) => {
|
||||
let media_type = module.media_type;
|
||||
let (module_specifier, module_type, module_source) =
|
||||
module.into_parts();
|
||||
let is_maybe_cjs = match self
|
||||
.shared
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(original_specifier, media_type)
|
||||
{
|
||||
Ok(is_maybe_cjs) => is_maybe_cjs,
|
||||
Err(err) => {
|
||||
return deno_core::ModuleLoadResponse::Sync(Err(
|
||||
JsErrorBox::type_error(format!("{:?}", err)).into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
if is_maybe_cjs {
|
||||
let original_specifier = original_specifier.clone();
|
||||
let module_specifier = module_specifier.clone();
|
||||
let shared = self.shared.clone();
|
||||
deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let source = match module_source {
|
||||
DenoCompileModuleSource::String(string) => {
|
||||
Cow::Borrowed(string)
|
||||
}
|
||||
DenoCompileModuleSource::Bytes(module_code_bytes) => {
|
||||
match module_code_bytes {
|
||||
Cow::Owned(bytes) => {
|
||||
Cow::Owned(from_utf8_lossy_owned(bytes))
|
||||
}
|
||||
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
|
||||
}
|
||||
}
|
||||
};
|
||||
let source = shared
|
||||
.node_code_translator
|
||||
.translate_cjs_to_esm(&module_specifier, Some(source))
|
||||
.await
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let module_source = match source {
|
||||
Cow::Owned(source) => ModuleSourceCode::String(source.into()),
|
||||
Cow::Borrowed(source) => {
|
||||
ModuleSourceCode::String(FastString::from_static(source))
|
||||
}
|
||||
};
|
||||
let code_cache_entry = shared
|
||||
.get_code_cache(&module_specifier, module_source.as_bytes());
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
module_type,
|
||||
module_source,
|
||||
&original_specifier,
|
||||
&module_specifier,
|
||||
code_cache_entry,
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
)
|
||||
} else {
|
||||
let module_source = module_source.into_for_v8();
|
||||
let code_cache_entry = self
|
||||
.shared
|
||||
.get_code_cache(module_specifier, module_source.as_bytes());
|
||||
deno_core::ModuleLoadResponse::Sync(Ok(
|
||||
deno_core::ModuleSource::new_with_redirect(
|
||||
module_type,
|
||||
module_source,
|
||||
original_specifier,
|
||||
module_specifier,
|
||||
code_cache_entry,
|
||||
),
|
||||
))
|
||||
}
|
||||
}
|
||||
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(
|
||||
JsErrorBox::type_error(format!(
|
||||
"Module not found: {}",
|
||||
original_specifier
|
||||
))
|
||||
.into(),
|
||||
)),
|
||||
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(
|
||||
JsErrorBox::type_error(format!("{:?}", err)).into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn code_cache_ready(
|
||||
&self,
|
||||
specifier: Url,
|
||||
source_hash: u64,
|
||||
code_cache_data: &[u8],
|
||||
) -> LocalBoxFuture<'static, ()> {
|
||||
if let Some(code_cache) = &self.shared.code_cache {
|
||||
code_cache.set_sync(
|
||||
specifier,
|
||||
deno_runtime::code_cache::CodeCacheType::EsModule,
|
||||
source_hash,
|
||||
code_cache_data,
|
||||
);
|
||||
}
|
||||
std::future::ready(()).boxed_local()
|
||||
}
|
||||
|
||||
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
|
||||
let url = Url::parse(file_name).ok()?;
|
||||
let data = self.shared.modules.read(&url).ok()??;
|
||||
data.source_map
|
||||
}
|
||||
|
||||
fn get_source_mapped_source_line(
|
||||
&self,
|
||||
file_name: &str,
|
||||
line_number: usize,
|
||||
) -> Option<String> {
|
||||
let specifier = Url::parse(file_name).ok()?;
|
||||
let data = self.shared.modules.read(&specifier).ok()??;
|
||||
|
||||
let source = String::from_utf8_lossy(&data.data);
|
||||
// Do NOT use .lines(): it skips the terminating empty line.
|
||||
// (due to internally using_terminator() instead of .split())
|
||||
let lines: Vec<&str> = source.split('\n').collect();
|
||||
if line_number >= lines.len() {
|
||||
Some(format!(
|
||||
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
|
||||
crate::colors::yellow("Warning"), line_number + 1,
|
||||
))
|
||||
} else {
|
||||
Some(lines[line_number].to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeRequireLoader for EmbeddedModuleLoader {
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
|
||||
path: &'a std::path::Path,
|
||||
) -> Result<Cow<'a, std::path::Path>, JsErrorBox> {
|
||||
if self.shared.modules.has_file(path) {
|
||||
// allow reading if the file is in the snapshot
|
||||
return Ok(Cow::Borrowed(path));
|
||||
}
|
||||
|
||||
self
|
||||
.shared
|
||||
.npm_registry_permission_checker
|
||||
.ensure_read_permission(permissions, path)
|
||||
.map_err(JsErrorBox::from_err)
|
||||
}
|
||||
|
||||
fn load_text_file_lossy(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<Cow<'static, str>, JsErrorBox> {
|
||||
let file_entry = self
|
||||
.shared
|
||||
.vfs
|
||||
.file_entry(path)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let file_bytes = self
|
||||
.shared
|
||||
.vfs
|
||||
.read_file_offset_with_len(
|
||||
file_entry.transpiled_offset.unwrap_or(file_entry.offset),
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
Ok(from_utf8_lossy_cow(file_bytes))
|
||||
}
|
||||
|
||||
fn is_maybe_cjs(&self, specifier: &Url) -> Result<bool, ClosestPkgJsonError> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneModuleLoaderFactory {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl StandaloneModuleLoaderFactory {
|
||||
pub fn create_result(&self) -> CreateModuleLoaderResult {
|
||||
let loader = Rc::new(EmbeddedModuleLoader {
|
||||
shared: self.shared.clone(),
|
||||
});
|
||||
CreateModuleLoaderResult {
|
||||
module_loader: loader.clone(),
|
||||
node_require_loader: loader,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
|
||||
fn create_for_main(
|
||||
&self,
|
||||
_root_permissions: PermissionsContainer,
|
||||
) -> CreateModuleLoaderResult {
|
||||
self.create_result()
|
||||
}
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
_parent_permissions: PermissionsContainer,
|
||||
_permissions: PermissionsContainer,
|
||||
) -> CreateModuleLoaderResult {
|
||||
self.create_result()
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneRootCertStoreProvider {
|
||||
ca_stores: Option<Vec<String>>,
|
||||
ca_data: Option<CaData>,
|
||||
cell: OnceLock<Result<RootCertStore, RootCertStoreLoadError>>,
|
||||
}
|
||||
|
||||
impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
|
||||
fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> {
|
||||
self
|
||||
.cell
|
||||
// get_or_try_init was not stable yet when this was written
|
||||
.get_or_init(|| {
|
||||
get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone())
|
||||
})
|
||||
.as_ref()
|
||||
.map_err(|err| JsErrorBox::from_err(err.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
sys: DenoRtSys,
|
||||
data: StandaloneData,
|
||||
) -> Result<i32, AnyError> {
|
||||
let StandaloneData {
|
||||
metadata,
|
||||
modules,
|
||||
npm_snapshot,
|
||||
root_path,
|
||||
vfs,
|
||||
} = data;
|
||||
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
|
||||
ca_stores: metadata.ca_stores,
|
||||
ca_data: metadata.ca_data.map(CaData::Bytes),
|
||||
cell: Default::default(),
|
||||
});
|
||||
// use a dummy npm registry url
|
||||
let npm_registry_url = Url::parse("https://localhost/").unwrap();
|
||||
let root_dir_url = Arc::new(Url::from_directory_path(&root_path).unwrap());
|
||||
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
|
||||
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(sys.clone()));
|
||||
let npm_registry_permission_checker = {
|
||||
let mode = match &metadata.node_modules {
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: Some(path),
|
||||
}) => NpmRegistryReadPermissionCheckerMode::Local(PathBuf::from(path)),
|
||||
Some(NodeModules::Byonm { .. }) => {
|
||||
NpmRegistryReadPermissionCheckerMode::Byonm
|
||||
}
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: None,
|
||||
})
|
||||
| None => NpmRegistryReadPermissionCheckerMode::Global(
|
||||
npm_global_cache_dir.clone(),
|
||||
),
|
||||
};
|
||||
NpmRegistryReadPermissionChecker::new(sys.clone(), mode)
|
||||
};
|
||||
let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules {
|
||||
Some(NodeModules::Managed { node_modules_dir }) => {
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
let npmrc = Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: npm_registry_url.clone(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
});
|
||||
let npm_cache_dir = Arc::new(NpmCacheDir::new(
|
||||
&sys,
|
||||
npm_global_cache_dir,
|
||||
npmrc.get_all_known_registries_urls(),
|
||||
));
|
||||
let snapshot = npm_snapshot.unwrap();
|
||||
let maybe_node_modules_path = node_modules_dir
|
||||
.map(|node_modules_dir| root_path.join(node_modules_dir));
|
||||
let in_npm_pkg_checker =
|
||||
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed(
|
||||
ManagedInNpmPkgCheckerCreateOptions {
|
||||
root_cache_dir_url: npm_cache_dir.root_dir_url(),
|
||||
maybe_node_modules_path: maybe_node_modules_path.as_deref(),
|
||||
},
|
||||
));
|
||||
let npm_resolution =
|
||||
Arc::new(NpmResolutionCell::new(NpmResolutionSnapshot::new(snapshot)));
|
||||
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
|
||||
NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
|
||||
npm_resolution,
|
||||
npm_cache_dir,
|
||||
sys: sys.clone(),
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
npmrc,
|
||||
}),
|
||||
);
|
||||
(in_npm_pkg_checker, npm_resolver)
|
||||
}
|
||||
Some(NodeModules::Byonm {
|
||||
root_node_modules_dir,
|
||||
}) => {
|
||||
let root_node_modules_dir =
|
||||
root_node_modules_dir.map(|p| vfs.root().join(p));
|
||||
let in_npm_pkg_checker =
|
||||
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Byonm);
|
||||
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
|
||||
NpmResolverCreateOptions::Byonm(ByonmNpmResolverCreateOptions {
|
||||
sys: sys.clone(),
|
||||
pkg_json_resolver: pkg_json_resolver.clone(),
|
||||
root_node_modules_dir,
|
||||
}),
|
||||
);
|
||||
(in_npm_pkg_checker, npm_resolver)
|
||||
}
|
||||
None => {
|
||||
// Packages from different registries are already inlined in the binary,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
let npmrc = create_default_npmrc();
|
||||
let npm_cache_dir = Arc::new(NpmCacheDir::new(
|
||||
&sys,
|
||||
npm_global_cache_dir,
|
||||
npmrc.get_all_known_registries_urls(),
|
||||
));
|
||||
let in_npm_pkg_checker =
|
||||
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed(
|
||||
ManagedInNpmPkgCheckerCreateOptions {
|
||||
root_cache_dir_url: npm_cache_dir.root_dir_url(),
|
||||
maybe_node_modules_path: None,
|
||||
},
|
||||
));
|
||||
let npm_resolution = Arc::new(NpmResolutionCell::default());
|
||||
let npm_resolver = NpmResolver::<DenoRtSys>::new::<DenoRtSys>(
|
||||
NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
|
||||
npm_resolution,
|
||||
sys: sys.clone(),
|
||||
npm_cache_dir,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
npmrc: create_default_npmrc(),
|
||||
}),
|
||||
);
|
||||
(in_npm_pkg_checker, npm_resolver)
|
||||
}
|
||||
};
|
||||
|
||||
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
|
||||
let node_resolver = Arc::new(NodeResolver::new(
|
||||
in_npm_pkg_checker.clone(),
|
||||
RealIsBuiltInNodeModuleChecker,
|
||||
npm_resolver.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
sys.clone(),
|
||||
node_resolver::ConditionsFromResolutionMode::default(),
|
||||
));
|
||||
let cjs_tracker = Arc::new(CjsTracker::new(
|
||||
in_npm_pkg_checker.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
if metadata.unstable_config.detect_cjs {
|
||||
IsCjsResolutionMode::ImplicitTypeCommonJs
|
||||
} else if metadata.workspace_resolver.package_jsons.is_empty() {
|
||||
IsCjsResolutionMode::Disabled
|
||||
} else {
|
||||
IsCjsResolutionMode::ExplicitTypeCommonJs
|
||||
},
|
||||
));
|
||||
let npm_req_resolver = Arc::new(NpmReqResolver::new(NpmReqResolverOptions {
|
||||
sys: sys.clone(),
|
||||
in_npm_pkg_checker: in_npm_pkg_checker.clone(),
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_resolver: npm_resolver.clone(),
|
||||
}));
|
||||
let cjs_esm_code_analyzer =
|
||||
CjsCodeAnalyzer::new(cjs_tracker.clone(), modules.clone(), sys.clone());
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_code_analyzer,
|
||||
in_npm_pkg_checker,
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
sys.clone(),
|
||||
));
|
||||
let workspace_resolver = {
|
||||
let import_map = match metadata.workspace_resolver.import_map {
|
||||
Some(import_map) => Some(
|
||||
import_map::parse_from_json_with_options(
|
||||
root_dir_url.join(&import_map.specifier).unwrap(),
|
||||
&import_map.json,
|
||||
import_map::ImportMapOptions {
|
||||
address_hook: None,
|
||||
expand_imports: true,
|
||||
},
|
||||
)?
|
||||
.import_map,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
let pkg_jsons = metadata
|
||||
.workspace_resolver
|
||||
.package_jsons
|
||||
.into_iter()
|
||||
.map(|(relative_path, json)| {
|
||||
let path = root_dir_url
|
||||
.join(&relative_path)
|
||||
.unwrap()
|
||||
.to_file_path()
|
||||
.unwrap();
|
||||
let pkg_json =
|
||||
deno_package_json::PackageJson::load_from_value(path, json);
|
||||
Arc::new(pkg_json)
|
||||
})
|
||||
.collect();
|
||||
WorkspaceResolver::new_raw(
|
||||
root_dir_url.clone(),
|
||||
import_map,
|
||||
metadata
|
||||
.workspace_resolver
|
||||
.jsr_pkgs
|
||||
.iter()
|
||||
.map(|pkg| ResolverWorkspaceJsrPackage {
|
||||
is_patch: false, // only used for enhancing the diagnostic, which isn't shown in deno compile
|
||||
base: root_dir_url.join(&pkg.relative_base).unwrap(),
|
||||
name: pkg.name.clone(),
|
||||
version: pkg.version.clone(),
|
||||
exports: pkg.exports.clone(),
|
||||
})
|
||||
.collect(),
|
||||
pkg_jsons,
|
||||
metadata.workspace_resolver.pkg_json_resolution,
|
||||
)
|
||||
};
|
||||
let code_cache = match metadata.code_cache_key {
|
||||
Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new(
|
||||
root_path.with_file_name(format!(
|
||||
"{}.cache",
|
||||
root_path.file_name().unwrap().to_string_lossy()
|
||||
)),
|
||||
code_cache_key,
|
||||
))),
|
||||
None => {
|
||||
log::debug!("Code cache disabled.");
|
||||
None
|
||||
}
|
||||
};
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
cjs_tracker: cjs_tracker.clone(),
|
||||
code_cache: code_cache.clone(),
|
||||
modules,
|
||||
node_code_translator: node_code_translator.clone(),
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
cjs_tracker.clone(),
|
||||
node_code_translator,
|
||||
sys.clone(),
|
||||
)),
|
||||
npm_registry_permission_checker,
|
||||
npm_req_resolver,
|
||||
vfs: vfs.clone(),
|
||||
workspace_resolver,
|
||||
}),
|
||||
};
|
||||
|
||||
let permissions = {
|
||||
let mut permissions = metadata.permissions;
|
||||
// grant read access to the vfs
|
||||
match &mut permissions.allow_read {
|
||||
Some(vec) if vec.is_empty() => {
|
||||
// do nothing, already granted
|
||||
}
|
||||
Some(vec) => {
|
||||
vec.push(root_path.to_string_lossy().to_string());
|
||||
}
|
||||
None => {
|
||||
permissions.allow_read =
|
||||
Some(vec![root_path.to_string_lossy().to_string()]);
|
||||
}
|
||||
}
|
||||
|
||||
let desc_parser =
|
||||
Arc::new(RuntimePermissionDescriptorParser::new(sys.clone()));
|
||||
let permissions =
|
||||
Permissions::from_options(desc_parser.as_ref(), &permissions)?;
|
||||
PermissionsContainer::new(desc_parser, permissions)
|
||||
};
|
||||
let feature_checker = Arc::new({
|
||||
let mut checker = FeatureChecker::default();
|
||||
checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
|
||||
for feature in metadata.unstable_config.features {
|
||||
// `metadata` is valid for the whole lifetime of the program, so we
|
||||
// can leak the string here.
|
||||
checker.enable_feature(feature.leak());
|
||||
}
|
||||
checker
|
||||
});
|
||||
let lib_main_worker_options = LibMainWorkerOptions {
|
||||
argv: metadata.argv,
|
||||
log_level: WorkerLogLevel::Info,
|
||||
enable_op_summary_metrics: false,
|
||||
enable_testing_features: false,
|
||||
has_node_modules_dir,
|
||||
inspect_brk: false,
|
||||
inspect_wait: false,
|
||||
strace_ops: None,
|
||||
is_inspecting: false,
|
||||
skip_op_registration: true,
|
||||
location: metadata.location,
|
||||
argv0: NpmPackageReqReference::from_specifier(&main_module)
|
||||
.ok()
|
||||
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref))
|
||||
.or(std::env::args().next()),
|
||||
node_debug: std::env::var("NODE_DEBUG").ok(),
|
||||
origin_data_folder_path: None,
|
||||
seed: metadata.seed,
|
||||
unsafely_ignore_certificate_errors: metadata
|
||||
.unsafely_ignore_certificate_errors,
|
||||
node_ipc: None,
|
||||
serve_port: None,
|
||||
serve_host: None,
|
||||
otel_config: metadata.otel_config,
|
||||
startup_snapshot: deno_snapshots::CLI_SNAPSHOT,
|
||||
};
|
||||
let worker_factory = LibMainWorkerFactory::new(
|
||||
Arc::new(BlobStore::default()),
|
||||
code_cache.map(|c| c.for_deno_core()),
|
||||
feature_checker,
|
||||
fs,
|
||||
None,
|
||||
Box::new(module_loader_factory),
|
||||
node_resolver.clone(),
|
||||
create_npm_process_state_provider(&npm_resolver),
|
||||
pkg_json_resolver,
|
||||
root_cert_store_provider,
|
||||
StorageKeyResolver::empty(),
|
||||
sys.clone(),
|
||||
lib_main_worker_options,
|
||||
);
|
||||
|
||||
// Initialize v8 once from the main thread.
|
||||
v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![]));
|
||||
// TODO(bartlomieju): remove last argument once Deploy no longer needs it
|
||||
deno_core::JsRuntime::init_platform(None, true);
|
||||
|
||||
let main_module = match NpmPackageReqReference::from_specifier(&main_module) {
|
||||
Ok(package_ref) => {
|
||||
let pkg_folder = npm_resolver.resolve_pkg_folder_from_deno_module_req(
|
||||
package_ref.req(),
|
||||
&deno_path_util::url_from_file_path(&vfs.root().join("package.json"))?,
|
||||
)?;
|
||||
worker_factory
|
||||
.resolve_npm_binary_entrypoint(&pkg_folder, package_ref.sub_path())?
|
||||
}
|
||||
Err(_) => main_module,
|
||||
};
|
||||
|
||||
let mut worker = worker_factory.create_main_worker(
|
||||
WorkerExecutionMode::Run,
|
||||
permissions,
|
||||
main_module,
|
||||
)?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
Ok(exit_code)
|
||||
}
|
||||
|
||||
fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
|
||||
// this is fine because multiple registries are combined into
|
||||
// one when compiling the binary
|
||||
Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: Url::parse("https://registry.npmjs.org").unwrap(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
})
|
||||
}
|
20
cli/snapshot/Cargo.toml
Normal file
20
cli/snapshot/Cargo.toml
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_snapshots"
|
||||
version = "0.1.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
readme = "README.md"
|
||||
repository.workspace = true
|
||||
description = "v8 snapshot used by the Deno CLI"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[features]
|
||||
disable = []
|
||||
|
||||
[build-dependencies]
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources", "snapshotting"] }
|
3
cli/snapshot/README.md
Normal file
3
cli/snapshot/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# deno_snapshots
|
||||
|
||||
v8 snapshot used in the Deno CLI.
|
30
cli/snapshot/build.rs
Normal file
30
cli/snapshot/build.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
#[cfg(not(feature = "disable"))]
|
||||
mod shared;
|
||||
|
||||
fn main() {
|
||||
#[cfg(not(feature = "disable"))]
|
||||
{
|
||||
let o = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "disable"))]
|
||||
fn create_cli_snapshot(snapshot_path: std::path::PathBuf) {
|
||||
use deno_runtime::ops::bootstrap::SnapshotOptions;
|
||||
|
||||
let snapshot_options = SnapshotOptions {
|
||||
ts_version: shared::TS_VERSION.to_string(),
|
||||
v8_version: deno_runtime::deno_core::v8::VERSION_STRING,
|
||||
target: std::env::var("TARGET").unwrap(),
|
||||
};
|
||||
|
||||
deno_runtime::snapshot::create_runtime_snapshot(
|
||||
snapshot_path,
|
||||
snapshot_options,
|
||||
vec![],
|
||||
);
|
||||
}
|
13
cli/snapshot/lib.rs
Normal file
13
cli/snapshot/lib.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
#[cfg(not(feature = "disable"))]
|
||||
pub static CLI_SNAPSHOT: Option<&[u8]> = Some(include_bytes!(concat!(
|
||||
env!("OUT_DIR"),
|
||||
"/CLI_SNAPSHOT.bin"
|
||||
)));
|
||||
#[cfg(feature = "disable")]
|
||||
pub static CLI_SNAPSHOT: Option<&[u8]> = None;
|
||||
|
||||
mod shared;
|
||||
|
||||
pub use shared::TS_VERSION;
|
3
cli/snapshot/shared.rs
Normal file
3
cli/snapshot/shared.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
pub static TS_VERSION: &str = "5.6.2";
|
|
@ -1,109 +1,70 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::env;
|
||||
use std::env::current_exe;
|
||||
use std::ffi::OsString;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::future::Future;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
|
||||
use capacity_builder::BytesAppendable;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleKind;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::ResolverWorkspaceJsrPackage;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::io::AllowStdIo;
|
||||
use deno_core::futures::AsyncReadExt;
|
||||
use deno_core::futures::AsyncSeekExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_lib::args::UnstableConfig;
|
||||
use deno_lib::shared::ReleaseChannel;
|
||||
use deno_lib::standalone::binary::CjsExportAnalysisEntry;
|
||||
use deno_lib::standalone::binary::Metadata;
|
||||
use deno_lib::standalone::binary::NodeModules;
|
||||
use deno_lib::standalone::binary::RemoteModuleEntry;
|
||||
use deno_lib::standalone::binary::SerializedResolverWorkspaceJsrPackage;
|
||||
use deno_lib::standalone::binary::SerializedWorkspaceResolver;
|
||||
use deno_lib::standalone::binary::SerializedWorkspaceResolverImportMap;
|
||||
use deno_lib::standalone::binary::SpecifierDataStore;
|
||||
use deno_lib::standalone::binary::SpecifierId;
|
||||
use deno_lib::standalone::binary::MAGIC_BYTES;
|
||||
use deno_lib::standalone::virtual_fs::BuiltVfs;
|
||||
use deno_lib::standalone::virtual_fs::VfsBuilder;
|
||||
use deno_lib::standalone::virtual_fs::VfsEntry;
|
||||
use deno_lib::standalone::virtual_fs::VirtualDirectory;
|
||||
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
|
||||
use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath;
|
||||
use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::url_from_directory_path;
|
||||
use deno_path_util::url_from_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use deno_semver::VersionReqSpecifierParseError;
|
||||
use deno_telemetry::OtelConfig;
|
||||
use indexmap::IndexMap;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use node_resolver::analyze::CjsAnalysis;
|
||||
use node_resolver::analyze::CjsCodeAnalyzer;
|
||||
|
||||
use super::file_system::DenoCompileFileSystem;
|
||||
use super::serialization::deserialize_binary_data_section;
|
||||
use super::serialization::serialize_binary_data_section;
|
||||
use super::serialization::DenoCompileModuleData;
|
||||
use super::serialization::DeserializedDataSection;
|
||||
use super::serialization::RemoteModulesStore;
|
||||
use super::serialization::RemoteModulesStoreBuilder;
|
||||
use super::serialization::SourceMapStore;
|
||||
use super::virtual_fs::output_vfs;
|
||||
use super::virtual_fs::BuiltVfs;
|
||||
use super::virtual_fs::FileBackedVfs;
|
||||
use super::virtual_fs::FileSystemCaseSensitivity;
|
||||
use super::virtual_fs::VfsBuilder;
|
||||
use super::virtual_fs::VfsFileSubDataKind;
|
||||
use super::virtual_fs::VfsRoot;
|
||||
use super::virtual_fs::VirtualDirectory;
|
||||
use super::virtual_fs::VirtualDirectoryEntries;
|
||||
use super::virtual_fs::WindowsSystemRootablePath;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::CliFileFetcher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::node::CliCjsCodeAnalyzer;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliCjsTracker;
|
||||
use crate::shared::ReleaseChannel;
|
||||
use crate::standalone::virtual_fs::VfsEntry;
|
||||
use crate::util::archive;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
|
||||
".deno_compile_node_modules";
|
||||
|
||||
/// A URL that can be designated as the base for relative URLs.
|
||||
///
|
||||
/// After creation, this URL may be used to get the key for a
|
||||
|
@ -149,104 +110,60 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
Managed {
|
||||
/// Relative path for the node_modules directory in the vfs.
|
||||
node_modules_dir: Option<String>,
|
||||
},
|
||||
Byonm {
|
||||
root_node_modules_dir: Option<String>,
|
||||
},
|
||||
struct SpecifierStore<'a> {
|
||||
data: IndexMap<&'a Url, SpecifierId>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolverImportMap {
|
||||
pub specifier: String,
|
||||
pub json: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SerializedResolverWorkspaceJsrPackage {
|
||||
pub relative_base: String,
|
||||
pub name: String,
|
||||
pub version: Option<Version>,
|
||||
pub exports: IndexMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolver {
|
||||
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
|
||||
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
|
||||
pub package_jsons: BTreeMap<String, serde_json::Value>,
|
||||
pub pkg_json_resolution: PackageJsonDepResolution,
|
||||
}
|
||||
|
||||
// Note: Don't use hashmaps/hashsets. Ensure the serialization
|
||||
// is deterministic.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub seed: Option<u64>,
|
||||
pub code_cache_key: Option<u64>,
|
||||
pub permissions: PermissionsOptions,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
pub log_level: Option<Level>,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub env_vars_from_env_file: IndexMap<String, String>,
|
||||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub unstable_config: UnstableConfig,
|
||||
pub otel_config: OtelConfig,
|
||||
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn write_binary_bytes(
|
||||
mut file_writer: File,
|
||||
original_bin: Vec<u8>,
|
||||
metadata: &Metadata,
|
||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
remote_modules: &RemoteModulesStoreBuilder,
|
||||
source_map_store: &SourceMapStore,
|
||||
vfs: &BuiltVfs,
|
||||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let data_section_bytes = serialize_binary_data_section(
|
||||
metadata,
|
||||
npm_snapshot,
|
||||
remote_modules,
|
||||
source_map_store,
|
||||
vfs,
|
||||
)
|
||||
.context("Serializing binary data section.")?;
|
||||
|
||||
let target = compile_flags.resolve_target();
|
||||
if target.contains("linux") {
|
||||
libsui::Elf::new(&original_bin).append(
|
||||
"d3n0l4nd",
|
||||
&data_section_bytes,
|
||||
&mut file_writer,
|
||||
)?;
|
||||
} else if target.contains("windows") {
|
||||
let mut pe = libsui::PortableExecutable::from(&original_bin)?;
|
||||
if let Some(icon) = compile_flags.icon.as_ref() {
|
||||
let icon = std::fs::read(icon)?;
|
||||
pe = pe.set_icon(&icon)?;
|
||||
impl<'a> SpecifierStore<'a> {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
data: IndexMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_add(&mut self, specifier: &'a Url) -> SpecifierId {
|
||||
let len = self.data.len();
|
||||
let entry = self.data.entry(specifier);
|
||||
match entry {
|
||||
indexmap::map::Entry::Occupied(occupied_entry) => *occupied_entry.get(),
|
||||
indexmap::map::Entry::Vacant(vacant_entry) => {
|
||||
let new_id = SpecifierId::new(len as u32);
|
||||
vacant_entry.insert(new_id);
|
||||
new_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_serialization(
|
||||
self,
|
||||
base_url: &StandaloneRelativeFileBaseUrl<'a>,
|
||||
) -> SpecifierStoreForSerialization<'a> {
|
||||
SpecifierStoreForSerialization {
|
||||
data: self
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|(specifier, id)| (base_url.specifier_key(specifier), id))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SpecifierStoreForSerialization<'a> {
|
||||
data: Vec<(Cow<'a, str>, SpecifierId)>,
|
||||
}
|
||||
|
||||
impl<'a> BytesAppendable<'a> for &'a SpecifierStoreForSerialization<'a> {
|
||||
fn append_to_builder<TBytes: capacity_builder::BytesType>(
|
||||
self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
|
||||
) {
|
||||
builder.append_le(self.data.len() as u32);
|
||||
for (specifier_str, id) in &self.data {
|
||||
builder.append_le(specifier_str.len() as u32);
|
||||
builder.append(specifier_str.as_ref());
|
||||
builder.append(*id);
|
||||
}
|
||||
|
||||
pe.write_resource("d3n0l4nd", data_section_bytes)?
|
||||
.build(&mut file_writer)?;
|
||||
} else if target.contains("darwin") {
|
||||
libsui::Macho::from(original_bin)?
|
||||
.write_section("d3n0l4nd", data_section_bytes)?
|
||||
.build_and_sign(&mut file_writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_standalone_binary(exe_path: &Path) -> bool {
|
||||
|
@ -259,146 +176,6 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|
|||
|| libsui::utils::is_macho(&data)
|
||||
}
|
||||
|
||||
pub struct StandaloneData {
|
||||
pub metadata: Metadata,
|
||||
pub modules: StandaloneModules,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub root_path: PathBuf,
|
||||
pub source_maps: SourceMapStore,
|
||||
pub vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
pub struct StandaloneModules {
|
||||
remote_modules: RemoteModulesStore,
|
||||
vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
impl StandaloneModules {
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a ModuleSpecifier,
|
||||
) -> Result<Option<&'a ModuleSpecifier>, AnyError> {
|
||||
if specifier.scheme() == "file" {
|
||||
Ok(Some(specifier))
|
||||
} else {
|
||||
self.remote_modules.resolve_specifier(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_file(&self, path: &Path) -> bool {
|
||||
self.vfs.file_entry(path).is_ok()
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
specifier: &'a ModuleSpecifier,
|
||||
kind: VfsFileSubDataKind,
|
||||
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
||||
if specifier.scheme() == "file" {
|
||||
let path = deno_path_util::url_to_file_path(specifier)?;
|
||||
let bytes = match self.vfs.file_entry(&path) {
|
||||
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
match RealFs.read_file_sync(&path, None) {
|
||||
Ok(bytes) => bytes,
|
||||
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
|
||||
return Ok(None)
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
Ok(Some(DenoCompileModuleData {
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
specifier,
|
||||
data: bytes,
|
||||
}))
|
||||
} else {
|
||||
self.remote_modules.read(specifier).map(|maybe_entry| {
|
||||
maybe_entry.map(|entry| DenoCompileModuleData {
|
||||
media_type: entry.media_type,
|
||||
specifier: entry.specifier,
|
||||
data: match kind {
|
||||
VfsFileSubDataKind::Raw => entry.data,
|
||||
VfsFileSubDataKind::ModuleGraph => {
|
||||
entry.transpiled_data.unwrap_or(entry.data)
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This function will try to run this binary as a standalone binary
|
||||
/// produced by `deno compile`. It determines if this is a standalone
|
||||
/// binary by skipping over the trailer width at the end of the file,
|
||||
/// then checking for the magic trailer string `d3n0l4nd`. If found,
|
||||
/// the bundle is executed. If not, this function exits with `Ok(None)`.
|
||||
pub fn extract_standalone(
|
||||
cli_args: Cow<Vec<OsString>>,
|
||||
) -> Result<Option<StandaloneData>, AnyError> {
|
||||
let Some(data) = libsui::find_section("d3n0l4nd") else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let DeserializedDataSection {
|
||||
mut metadata,
|
||||
npm_snapshot,
|
||||
remote_modules,
|
||||
source_maps,
|
||||
vfs_root_entries,
|
||||
vfs_files_data,
|
||||
} = match deserialize_binary_data_section(data)? {
|
||||
Some(data_section) => data_section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let root_path = {
|
||||
let maybe_current_exe = std::env::current_exe().ok();
|
||||
let current_exe_name = maybe_current_exe
|
||||
.as_ref()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|p| p.to_string_lossy())
|
||||
// should never happen
|
||||
.unwrap_or_else(|| Cow::Borrowed("binary"));
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
|
||||
};
|
||||
let cli_args = cli_args.into_owned();
|
||||
metadata.argv.reserve(cli_args.len() - 1);
|
||||
for arg in cli_args.into_iter().skip(1) {
|
||||
metadata.argv.push(arg.into_string().unwrap());
|
||||
}
|
||||
let vfs = {
|
||||
let fs_root = VfsRoot {
|
||||
dir: VirtualDirectory {
|
||||
// align the name of the directory with the root dir
|
||||
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
entries: vfs_root_entries,
|
||||
},
|
||||
root_path: root_path.clone(),
|
||||
start_file_offset: 0,
|
||||
};
|
||||
Arc::new(FileBackedVfs::new(
|
||||
Cow::Borrowed(vfs_files_data),
|
||||
fs_root,
|
||||
metadata.vfs_case_sensitivity,
|
||||
))
|
||||
};
|
||||
Ok(Some(StandaloneData {
|
||||
metadata,
|
||||
modules: StandaloneModules {
|
||||
remote_modules,
|
||||
vfs: vfs.clone(),
|
||||
},
|
||||
npm_snapshot,
|
||||
root_path,
|
||||
source_maps,
|
||||
vfs,
|
||||
}))
|
||||
}
|
||||
|
||||
pub struct WriteBinOptions<'a> {
|
||||
pub writer: File,
|
||||
pub display_output_filename: &'a str,
|
||||
|
@ -409,11 +186,11 @@ pub struct WriteBinOptions<'a> {
|
|||
}
|
||||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
cjs_code_analyzer: CliCjsCodeAnalyzer,
|
||||
cjs_tracker: &'a CliCjsTracker,
|
||||
cli_options: &'a CliOptions,
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a CliFileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a CliNpmResolver,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
|
@ -423,22 +200,22 @@ pub struct DenoCompileBinaryWriter<'a> {
|
|||
impl<'a> DenoCompileBinaryWriter<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
cjs_code_analyzer: CliCjsCodeAnalyzer,
|
||||
cjs_tracker: &'a CliCjsTracker,
|
||||
cli_options: &'a CliOptions,
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a CliFileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a CliNpmResolver,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_code_analyzer,
|
||||
cjs_tracker,
|
||||
cli_options,
|
||||
deno_dir,
|
||||
emitter,
|
||||
file_fetcher,
|
||||
http_client_provider,
|
||||
npm_resolver,
|
||||
workspace_resolver,
|
||||
|
@ -474,7 +251,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
)
|
||||
}
|
||||
}
|
||||
self.write_standalone_binary(options, original_binary)
|
||||
self.write_standalone_binary(options, original_binary).await
|
||||
}
|
||||
|
||||
async fn get_base_binary(
|
||||
|
@ -494,19 +271,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
let target = compile_flags.resolve_target();
|
||||
let binary_name = format!("denort-{target}.zip");
|
||||
|
||||
let binary_path_suffix =
|
||||
match crate::version::DENO_VERSION_INFO.release_channel {
|
||||
ReleaseChannel::Canary => {
|
||||
format!(
|
||||
"canary/{}/{}",
|
||||
crate::version::DENO_VERSION_INFO.git_hash,
|
||||
binary_name
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
|
||||
}
|
||||
};
|
||||
let binary_path_suffix = match DENO_VERSION_INFO.release_channel {
|
||||
ReleaseChannel::Canary => {
|
||||
format!("canary/{}/{}", DENO_VERSION_INFO.git_hash, binary_name)
|
||||
}
|
||||
_ => {
|
||||
format!("release/v{}/{}", DENO_VERSION_INFO.deno, binary_name)
|
||||
}
|
||||
};
|
||||
|
||||
let download_directory = self.deno_dir.dl_folder_path();
|
||||
let binary_path = download_directory.join(&binary_path_suffix);
|
||||
|
@ -577,7 +349,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
/// This functions creates a standalone deno binary by appending a bundle
|
||||
/// and magic trailer to the currently executing binary.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn write_standalone_binary(
|
||||
async fn write_standalone_binary(
|
||||
&self,
|
||||
options: WriteBinOptions<'_>,
|
||||
original_bin: Vec<u8>,
|
||||
|
@ -621,23 +393,50 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
.add_file_at_path(&path)
|
||||
.with_context(|| format!("Including {}", path.display()))?;
|
||||
}
|
||||
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
|
||||
let mut source_maps = Vec::with_capacity(graph.specifiers_count());
|
||||
// todo(dsherret): transpile in parallel
|
||||
let specifiers_count = graph.specifiers_count();
|
||||
let mut specifier_store = SpecifierStore::with_capacity(specifiers_count);
|
||||
let mut remote_modules_store =
|
||||
SpecifierDataStore::with_capacity(specifiers_count);
|
||||
// todo(dsherret): transpile and analyze CJS in parallel
|
||||
for module in graph.modules() {
|
||||
if module.specifier().scheme() == "data" {
|
||||
continue; // don't store data urls as an entry as they're in the code
|
||||
}
|
||||
let (maybe_original_source, maybe_transpiled, media_type) = match module {
|
||||
let mut maybe_source_map = None;
|
||||
let mut maybe_transpiled = None;
|
||||
let mut maybe_cjs_analysis = None;
|
||||
let (maybe_original_source, media_type) = match module {
|
||||
deno_graph::Module::Js(m) => {
|
||||
let original_bytes = m.source.as_bytes().to_vec();
|
||||
let maybe_transpiled = if m.media_type.is_emittable() {
|
||||
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
|
||||
&m.specifier,
|
||||
let specifier = &m.specifier;
|
||||
let original_bytes = m.source.as_bytes();
|
||||
if self.cjs_tracker.is_maybe_cjs(specifier, m.media_type)? {
|
||||
if self.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
m.media_type,
|
||||
m.is_script,
|
||||
)?;
|
||||
let module_kind = ModuleKind::from_is_cjs(is_cjs);
|
||||
)? {
|
||||
let cjs_analysis = self
|
||||
.cjs_code_analyzer
|
||||
.analyze_cjs(
|
||||
module.specifier(),
|
||||
Some(Cow::Borrowed(m.source.as_ref())),
|
||||
)
|
||||
.await?;
|
||||
maybe_cjs_analysis = Some(match cjs_analysis {
|
||||
CjsAnalysis::Esm(_) => CjsExportAnalysisEntry::Esm,
|
||||
CjsAnalysis::Cjs(exports) => {
|
||||
CjsExportAnalysisEntry::Cjs(exports)
|
||||
}
|
||||
});
|
||||
} else {
|
||||
maybe_cjs_analysis = Some(CjsExportAnalysisEntry::Esm);
|
||||
}
|
||||
}
|
||||
if m.media_type.is_emittable() {
|
||||
let module_kind = match maybe_cjs_analysis.as_ref() {
|
||||
Some(CjsExportAnalysisEntry::Cjs(_)) => ModuleKind::Cjs,
|
||||
_ => ModuleKind::Esm,
|
||||
};
|
||||
let (source, source_map) =
|
||||
self.emitter.emit_parsed_source_for_deno_compile(
|
||||
&m.specifier,
|
||||
|
@ -646,60 +445,67 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
&m.source,
|
||||
)?;
|
||||
if source != m.source.as_ref() {
|
||||
source_maps.push((&m.specifier, source_map));
|
||||
Some(source.into_bytes())
|
||||
} else {
|
||||
None
|
||||
maybe_source_map = Some(source_map.into_bytes());
|
||||
maybe_transpiled = Some(source.into_bytes());
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(Some(original_bytes), maybe_transpiled, m.media_type)
|
||||
}
|
||||
(Some(original_bytes), m.media_type)
|
||||
}
|
||||
deno_graph::Module::Json(m) => {
|
||||
(Some(m.source.as_bytes().to_vec()), None, m.media_type)
|
||||
(Some(m.source.as_bytes()), m.media_type)
|
||||
}
|
||||
deno_graph::Module::Wasm(m) => {
|
||||
(Some(m.source.to_vec()), None, MediaType::Wasm)
|
||||
(Some(m.source.as_ref()), MediaType::Wasm)
|
||||
}
|
||||
deno_graph::Module::Npm(_)
|
||||
| deno_graph::Module::Node(_)
|
||||
| deno_graph::Module::External(_) => (None, None, MediaType::Unknown),
|
||||
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
|
||||
};
|
||||
if let Some(original_source) = maybe_original_source {
|
||||
let maybe_cjs_export_analysis = maybe_cjs_analysis
|
||||
.as_ref()
|
||||
.map(bincode::serialize)
|
||||
.transpose()?;
|
||||
if module.specifier().scheme() == "file" {
|
||||
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
|
||||
vfs
|
||||
.add_file_with_data(
|
||||
&file_path,
|
||||
original_source,
|
||||
VfsFileSubDataKind::Raw,
|
||||
deno_lib::standalone::virtual_fs::AddFileDataOptions {
|
||||
data: original_source.to_vec(),
|
||||
maybe_transpiled,
|
||||
maybe_source_map,
|
||||
maybe_cjs_export_analysis,
|
||||
},
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Failed adding '{}'", file_path.display())
|
||||
})?;
|
||||
if let Some(transpiled_source) = maybe_transpiled {
|
||||
vfs
|
||||
.add_file_with_data(
|
||||
&file_path,
|
||||
transpiled_source,
|
||||
VfsFileSubDataKind::ModuleGraph,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Failed adding '{}'", file_path.display())
|
||||
})?;
|
||||
}
|
||||
} else {
|
||||
let specifier_id = specifier_store.get_or_add(module.specifier());
|
||||
remote_modules_store.add(
|
||||
module.specifier(),
|
||||
media_type,
|
||||
original_source,
|
||||
maybe_transpiled,
|
||||
specifier_id,
|
||||
RemoteModuleEntry {
|
||||
media_type,
|
||||
data: Cow::Borrowed(original_source),
|
||||
maybe_transpiled: maybe_transpiled.map(Cow::Owned),
|
||||
maybe_source_map: maybe_source_map.map(Cow::Owned),
|
||||
maybe_cjs_export_analysis: maybe_cjs_export_analysis
|
||||
.map(Cow::Owned),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
remote_modules_store.add_redirects(&graph.redirects);
|
||||
|
||||
let mut redirects_store =
|
||||
SpecifierDataStore::with_capacity(graph.redirects.len());
|
||||
for (from, to) in &graph.redirects {
|
||||
redirects_store.add(
|
||||
specifier_store.get_or_add(from),
|
||||
specifier_store.get_or_add(to),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(import_map) = self.workspace_resolver.maybe_import_map() {
|
||||
if let Ok(file_path) = url_to_file_path(import_map.base_url()) {
|
||||
|
@ -717,7 +523,48 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
// do CJS export analysis on all the files in the VFS
|
||||
// todo(dsherret): analyze cjs in parallel
|
||||
let mut to_add = Vec::new();
|
||||
for (file_path, file) in vfs.iter_files() {
|
||||
if file.cjs_export_analysis_offset.is_some() {
|
||||
continue; // already analyzed
|
||||
}
|
||||
let specifier = deno_path_util::url_from_file_path(&file_path)?;
|
||||
let media_type = MediaType::from_specifier(&specifier);
|
||||
if self.cjs_tracker.is_maybe_cjs(&specifier, media_type)? {
|
||||
let maybe_source = vfs
|
||||
.file_bytes(file.offset)
|
||||
.map(|text| String::from_utf8_lossy(text));
|
||||
let cjs_analysis_result = self
|
||||
.cjs_code_analyzer
|
||||
.analyze_cjs(&specifier, maybe_source)
|
||||
.await;
|
||||
let maybe_analysis = match cjs_analysis_result {
|
||||
Ok(CjsAnalysis::Esm(_)) => Some(CjsExportAnalysisEntry::Esm),
|
||||
Ok(CjsAnalysis::Cjs(exports)) => {
|
||||
Some(CjsExportAnalysisEntry::Cjs(exports))
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!(
|
||||
"Ignoring cjs export analysis for '{}': {}",
|
||||
specifier,
|
||||
err
|
||||
);
|
||||
None
|
||||
}
|
||||
};
|
||||
if let Some(analysis) = &maybe_analysis {
|
||||
to_add.push((file_path, bincode::serialize(analysis)?));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (file_path, analysis) in to_add {
|
||||
vfs.add_cjs_export_analysis(&file_path, analysis);
|
||||
}
|
||||
|
||||
let vfs = self.build_vfs_consolidating_global_npm_cache(vfs);
|
||||
|
||||
let root_dir_url = match &vfs.root_path {
|
||||
WindowsSystemRootablePath::Path(dir) => {
|
||||
Some(url_from_directory_path(dir)?)
|
||||
|
@ -743,14 +590,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
None
|
||||
};
|
||||
|
||||
let mut source_map_store = SourceMapStore::with_capacity(source_maps.len());
|
||||
for (specifier, source_map) in source_maps {
|
||||
source_map_store.add(
|
||||
Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()),
|
||||
Cow::Owned(source_map.into_bytes()),
|
||||
);
|
||||
}
|
||||
|
||||
let node_modules = match &self.npm_resolver {
|
||||
CliNpmResolver::Managed(_) => {
|
||||
npm_snapshot.as_ref().map(|_| NodeModules::Managed {
|
||||
|
@ -860,17 +699,18 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
vfs_case_sensitivity: vfs.case_sensitivity,
|
||||
};
|
||||
|
||||
write_binary_bytes(
|
||||
writer,
|
||||
original_bin,
|
||||
let data_section_bytes = serialize_binary_data_section(
|
||||
&metadata,
|
||||
npm_snapshot.map(|s| s.into_serialized()),
|
||||
&specifier_store.for_serialization(&root_dir_url),
|
||||
&redirects_store,
|
||||
&remote_modules_store,
|
||||
&source_map_store,
|
||||
&vfs,
|
||||
compile_flags,
|
||||
)
|
||||
.context("Writing binary bytes")
|
||||
.context("Serializing binary data section.")?;
|
||||
|
||||
write_binary_bytes(writer, original_bin, data_section_bytes, compile_flags)
|
||||
.context("Writing binary bytes")
|
||||
}
|
||||
|
||||
fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> {
|
||||
|
@ -1041,6 +881,146 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn write_binary_bytes(
|
||||
mut file_writer: File,
|
||||
original_bin: Vec<u8>,
|
||||
data_section_bytes: Vec<u8>,
|
||||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let target = compile_flags.resolve_target();
|
||||
if target.contains("linux") {
|
||||
libsui::Elf::new(&original_bin).append(
|
||||
"d3n0l4nd",
|
||||
&data_section_bytes,
|
||||
&mut file_writer,
|
||||
)?;
|
||||
} else if target.contains("windows") {
|
||||
let mut pe = libsui::PortableExecutable::from(&original_bin)?;
|
||||
if let Some(icon) = compile_flags.icon.as_ref() {
|
||||
let icon = std::fs::read(icon)?;
|
||||
pe = pe.set_icon(&icon)?;
|
||||
}
|
||||
|
||||
pe.write_resource("d3n0l4nd", data_section_bytes)?
|
||||
.build(&mut file_writer)?;
|
||||
} else if target.contains("darwin") {
|
||||
libsui::Macho::from(original_bin)?
|
||||
.write_section("d3n0l4nd", data_section_bytes)?
|
||||
.build_and_sign(&mut file_writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Binary format:
|
||||
/// * d3n0l4nd
|
||||
/// * <metadata_len><metadata>
|
||||
/// * <npm_snapshot_len><npm_snapshot>
|
||||
/// * <specifiers>
|
||||
/// * <redirects>
|
||||
/// * <remote_modules>
|
||||
/// * <vfs_headers_len><vfs_headers>
|
||||
/// * <vfs_file_data_len><vfs_file_data>
|
||||
/// * d3n0l4nd
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn serialize_binary_data_section(
|
||||
metadata: &Metadata,
|
||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
specifiers: &SpecifierStoreForSerialization,
|
||||
redirects: &SpecifierDataStore<SpecifierId>,
|
||||
remote_modules: &SpecifierDataStore<RemoteModuleEntry<'_>>,
|
||||
vfs: &BuiltVfs,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
let metadata = serde_json::to_string(metadata)?;
|
||||
let npm_snapshot =
|
||||
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
||||
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
|
||||
|
||||
let bytes = capacity_builder::BytesBuilder::build(|builder| {
|
||||
builder.append(MAGIC_BYTES);
|
||||
// 1. Metadata
|
||||
{
|
||||
builder.append_le(metadata.len() as u64);
|
||||
builder.append(&metadata);
|
||||
}
|
||||
// 2. Npm snapshot
|
||||
{
|
||||
builder.append_le(npm_snapshot.len() as u64);
|
||||
builder.append(&npm_snapshot);
|
||||
}
|
||||
// 3. Specifiers
|
||||
builder.append(specifiers);
|
||||
// 4. Redirects
|
||||
redirects.serialize(builder);
|
||||
// 5. Remote modules
|
||||
remote_modules.serialize(builder);
|
||||
// 6. VFS
|
||||
{
|
||||
builder.append_le(serialized_vfs.len() as u64);
|
||||
builder.append(&serialized_vfs);
|
||||
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
|
||||
builder.append_le(vfs_bytes_len);
|
||||
for file in &vfs.files {
|
||||
builder.append(file);
|
||||
}
|
||||
}
|
||||
|
||||
// write the magic bytes at the end so we can use it
|
||||
// to make sure we've deserialized correctly
|
||||
builder.append(MAGIC_BYTES);
|
||||
})?;
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
fn serialize_npm_snapshot(
|
||||
mut snapshot: SerializedNpmResolutionSnapshot,
|
||||
) -> Vec<u8> {
|
||||
fn append_string(bytes: &mut Vec<u8>, string: &str) {
|
||||
let len = string.len() as u32;
|
||||
bytes.extend_from_slice(&len.to_le_bytes());
|
||||
bytes.extend_from_slice(string.as_bytes());
|
||||
}
|
||||
|
||||
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
||||
let ids_to_stored_ids = snapshot
|
||||
.packages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, pkg)| (&pkg.id, i as u32))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
|
||||
root_packages.sort();
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
|
||||
for pkg in &snapshot.packages {
|
||||
append_string(&mut bytes, &pkg.id.as_serialized());
|
||||
}
|
||||
|
||||
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
|
||||
for (req, id) in root_packages {
|
||||
append_string(&mut bytes, &req.to_string());
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
|
||||
for pkg in &snapshot.packages {
|
||||
let deps_len = pkg.dependencies.len() as u32;
|
||||
bytes.extend_from_slice(&deps_len.to_le_bytes());
|
||||
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
|
||||
deps.sort();
|
||||
for (req, id) in deps {
|
||||
append_string(&mut bytes, req);
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {
|
||||
let mut denort = deno_exe;
|
||||
denort.set_file_name(if cfg!(windows) {
|
||||
|
|
|
@ -1,884 +0,0 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use deno_runtime::deno_fs::AccessCheckCb;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_fs::FsDirEntry;
|
||||
use deno_runtime::deno_fs::FsFileType;
|
||||
use deno_runtime::deno_fs::OpenOptions;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::File;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_runtime::deno_io::fs::FsResult;
|
||||
use deno_runtime::deno_io::fs::FsStat;
|
||||
use sys_traits::boxed::BoxedFsDirEntry;
|
||||
use sys_traits::boxed::BoxedFsMetadataValue;
|
||||
use sys_traits::boxed::FsMetadataBoxed;
|
||||
use sys_traits::boxed::FsReadDirBoxed;
|
||||
use sys_traits::FsCopy;
|
||||
use sys_traits::FsMetadata;
|
||||
|
||||
use super::virtual_fs::FileBackedVfs;
|
||||
use super::virtual_fs::FileBackedVfsDirEntry;
|
||||
use super::virtual_fs::FileBackedVfsFile;
|
||||
use super::virtual_fs::FileBackedVfsMetadata;
|
||||
use super::virtual_fs::VfsFileSubDataKind;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
|
||||
|
||||
impl DenoCompileFileSystem {
|
||||
pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
|
||||
Self(vfs)
|
||||
}
|
||||
|
||||
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {
|
||||
if self.0.is_path_within(path) {
|
||||
Err(FsError::NotSupported)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_to_real_path(
|
||||
&self,
|
||||
oldpath: &Path,
|
||||
newpath: &Path,
|
||||
) -> std::io::Result<u64> {
|
||||
let old_file = self.0.file_entry(oldpath)?;
|
||||
let old_file_bytes =
|
||||
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
|
||||
let len = old_file_bytes.len() as u64;
|
||||
RealFs
|
||||
.write_file_sync(
|
||||
newpath,
|
||||
OpenOptions {
|
||||
read: false,
|
||||
write: true,
|
||||
create: true,
|
||||
truncate: true,
|
||||
append: false,
|
||||
create_new: false,
|
||||
mode: None,
|
||||
},
|
||||
None,
|
||||
&old_file_bytes,
|
||||
)
|
||||
.map_err(|err| err.into_io_error())?;
|
||||
Ok(len)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl FileSystem for DenoCompileFileSystem {
|
||||
fn cwd(&self) -> FsResult<PathBuf> {
|
||||
RealFs.cwd()
|
||||
}
|
||||
|
||||
fn tmp_dir(&self) -> FsResult<PathBuf> {
|
||||
RealFs.tmp_dir()
|
||||
}
|
||||
|
||||
fn chdir(&self, path: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chdir(path)
|
||||
}
|
||||
|
||||
fn umask(&self, mask: Option<u32>) -> FsResult<u32> {
|
||||
RealFs.umask(mask)
|
||||
}
|
||||
|
||||
fn open_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
options: OpenOptions,
|
||||
access_check: Option<AccessCheckCb>,
|
||||
) -> FsResult<Rc<dyn File>> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(Rc::new(self.0.open_file(path)?))
|
||||
} else {
|
||||
RealFs.open_sync(path, options, access_check)
|
||||
}
|
||||
}
|
||||
async fn open_async<'a>(
|
||||
&'a self,
|
||||
path: PathBuf,
|
||||
options: OpenOptions,
|
||||
access_check: Option<AccessCheckCb<'a>>,
|
||||
) -> FsResult<Rc<dyn File>> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(Rc::new(self.0.open_file(&path)?))
|
||||
} else {
|
||||
RealFs.open_async(path, options, access_check).await
|
||||
}
|
||||
}
|
||||
|
||||
fn mkdir_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.mkdir_sync(path, recursive, mode)
|
||||
}
|
||||
async fn mkdir_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.mkdir_async(path, recursive, mode).await
|
||||
}
|
||||
|
||||
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chmod_sync(path, mode)
|
||||
}
|
||||
async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.chmod_async(path, mode).await
|
||||
}
|
||||
|
||||
fn chown_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chown_sync(path, uid, gid)
|
||||
}
|
||||
async fn chown_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.chown_async(path, uid, gid).await
|
||||
}
|
||||
|
||||
fn lchown_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.lchown_sync(path, uid, gid)
|
||||
}
|
||||
|
||||
async fn lchown_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.lchown_async(path, uid, gid).await
|
||||
}
|
||||
|
||||
fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.remove_sync(path, recursive)
|
||||
}
|
||||
async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.remove_async(path, recursive).await
|
||||
}
|
||||
|
||||
fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
if self.0.is_path_within(oldpath) {
|
||||
self
|
||||
.copy_to_real_path(oldpath, newpath)
|
||||
.map(|_| ())
|
||||
.map_err(FsError::Io)
|
||||
} else {
|
||||
RealFs.copy_file_sync(oldpath, newpath)
|
||||
}
|
||||
}
|
||||
async fn copy_file_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
if self.0.is_path_within(&oldpath) {
|
||||
let fs = self.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
fs.copy_to_real_path(&oldpath, &newpath)
|
||||
.map(|_| ())
|
||||
.map_err(FsError::Io)
|
||||
})
|
||||
.await?
|
||||
} else {
|
||||
RealFs.copy_file_async(oldpath, newpath).await
|
||||
}
|
||||
}
|
||||
|
||||
fn cp_sync(&self, from: &Path, to: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(to)?;
|
||||
|
||||
RealFs.cp_sync(from, to)
|
||||
}
|
||||
async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&to)?;
|
||||
|
||||
RealFs.cp_async(from, to).await
|
||||
}
|
||||
|
||||
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.stat(path)?.as_fs_stat())
|
||||
} else {
|
||||
RealFs.stat_sync(path)
|
||||
}
|
||||
}
|
||||
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.stat(&path)?.as_fs_stat())
|
||||
} else {
|
||||
RealFs.stat_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn lstat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.lstat(path)?.as_fs_stat())
|
||||
} else {
|
||||
RealFs.lstat_sync(path)
|
||||
}
|
||||
}
|
||||
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.lstat(&path)?.as_fs_stat())
|
||||
} else {
|
||||
RealFs.lstat_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn realpath_sync(&self, path: &Path) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.canonicalize(path)?)
|
||||
} else {
|
||||
RealFs.realpath_sync(path)
|
||||
}
|
||||
}
|
||||
async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.canonicalize(&path)?)
|
||||
} else {
|
||||
RealFs.realpath_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn read_dir_sync(&self, path: &Path) -> FsResult<Vec<FsDirEntry>> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.read_dir(path)?)
|
||||
} else {
|
||||
RealFs.read_dir_sync(path)
|
||||
}
|
||||
}
|
||||
async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.read_dir(&path)?)
|
||||
} else {
|
||||
RealFs.read_dir_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.rename_sync(oldpath, newpath)
|
||||
}
|
||||
async fn rename_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.rename_async(oldpath, newpath).await
|
||||
}
|
||||
|
||||
fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.link_sync(oldpath, newpath)
|
||||
}
|
||||
async fn link_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.link_async(oldpath, newpath).await
|
||||
}
|
||||
|
||||
fn symlink_sync(
|
||||
&self,
|
||||
oldpath: &Path,
|
||||
newpath: &Path,
|
||||
file_type: Option<FsFileType>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.symlink_sync(oldpath, newpath, file_type)
|
||||
}
|
||||
async fn symlink_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
file_type: Option<FsFileType>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.symlink_async(oldpath, newpath, file_type).await
|
||||
}
|
||||
|
||||
fn read_link_sync(&self, path: &Path) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.read_link(path)?)
|
||||
} else {
|
||||
RealFs.read_link_sync(path)
|
||||
}
|
||||
}
|
||||
async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.read_link(&path)?)
|
||||
} else {
|
||||
RealFs.read_link_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.truncate_sync(path, len)
|
||||
}
|
||||
async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.truncate_async(path, len).await
|
||||
}
|
||||
|
||||
fn utime_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
}
|
||||
async fn utime_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs
|
||||
.utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await
|
||||
}
|
||||
|
||||
fn lutime_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
}
|
||||
async fn lutime_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs
|
||||
.lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsHardLink for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
|
||||
self.link_sync(src, dst).map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRead for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_read(&self, path: &Path) -> std::io::Result<Cow<'static, [u8]>> {
|
||||
self
|
||||
.read_file_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsMetadataValue for FileBackedVfsMetadata {
|
||||
fn file_type(&self) -> sys_traits::FileType {
|
||||
self.file_type
|
||||
}
|
||||
|
||||
fn len(&self) -> u64 {
|
||||
self.len
|
||||
}
|
||||
|
||||
fn accessed(&self) -> std::io::Result<SystemTime> {
|
||||
Err(not_supported("accessed time"))
|
||||
}
|
||||
|
||||
fn created(&self) -> std::io::Result<SystemTime> {
|
||||
Err(not_supported("created time"))
|
||||
}
|
||||
|
||||
fn changed(&self) -> std::io::Result<SystemTime> {
|
||||
Err(not_supported("changed time"))
|
||||
}
|
||||
|
||||
fn modified(&self) -> std::io::Result<SystemTime> {
|
||||
Err(not_supported("modified time"))
|
||||
}
|
||||
|
||||
fn dev(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn ino(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn mode(&self) -> std::io::Result<u32> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn nlink(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn uid(&self) -> std::io::Result<u32> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn gid(&self) -> std::io::Result<u32> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn rdev(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn blksize(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn blocks(&self) -> std::io::Result<u64> {
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
fn is_block_device(&self) -> std::io::Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn is_char_device(&self) -> std::io::Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn is_fifo(&self) -> std::io::Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn is_socket(&self) -> std::io::Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn file_attributes(&self) -> std::io::Result<u32> {
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
|
||||
fn not_supported(name: &str) -> std::io::Error {
|
||||
std::io::Error::new(
|
||||
ErrorKind::Unsupported,
|
||||
format!(
|
||||
"{} is not supported for an embedded deno compile file",
|
||||
name
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
impl sys_traits::FsDirEntry for FileBackedVfsDirEntry {
|
||||
type Metadata = BoxedFsMetadataValue;
|
||||
|
||||
fn file_name(&self) -> Cow<std::ffi::OsStr> {
|
||||
Cow::Borrowed(self.metadata.name.as_ref())
|
||||
}
|
||||
|
||||
fn file_type(&self) -> std::io::Result<sys_traits::FileType> {
|
||||
Ok(self.metadata.file_type)
|
||||
}
|
||||
|
||||
fn metadata(&self) -> std::io::Result<Self::Metadata> {
|
||||
Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone())))
|
||||
}
|
||||
|
||||
fn path(&self) -> Cow<Path> {
|
||||
Cow::Owned(self.parent_path.join(&self.metadata.name))
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsReadDir for DenoCompileFileSystem {
|
||||
type ReadDirEntry = BoxedFsDirEntry;
|
||||
|
||||
fn base_fs_read_dir(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> std::io::Result<
|
||||
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
|
||||
> {
|
||||
if self.0.is_path_within(path) {
|
||||
let entries = self.0.read_dir_with_metadata(path)?;
|
||||
Ok(Box::new(
|
||||
entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))),
|
||||
))
|
||||
} else {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.fs_read_dir_boxed(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCanonicalize for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
self.realpath_sync(path).map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsMetadata for DenoCompileFileSystem {
|
||||
type Metadata = BoxedFsMetadataValue;
|
||||
|
||||
#[inline]
|
||||
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(BoxedFsMetadataValue::new(self.0.stat(path)?))
|
||||
} else {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.fs_metadata_boxed(path)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn base_fs_symlink_metadata(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> std::io::Result<Self::Metadata> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?))
|
||||
} else {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCopy for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result<u64> {
|
||||
self
|
||||
.error_if_in_vfs(to)
|
||||
.map_err(|err| err.into_io_error())?;
|
||||
if self.0.is_path_within(from) {
|
||||
self.copy_to_real_path(from, to)
|
||||
} else {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.fs_copy(from, to)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCloneFile for DenoCompileFileSystem {
|
||||
fn base_fs_clone_file(
|
||||
&self,
|
||||
_from: &Path,
|
||||
_to: &Path,
|
||||
) -> std::io::Result<()> {
|
||||
// will cause a fallback in the code that uses this
|
||||
Err(not_supported("cloning files"))
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_create_dir(
|
||||
&self,
|
||||
path: &Path,
|
||||
options: &sys_traits::CreateDirOptions,
|
||||
) -> std::io::Result<()> {
|
||||
self
|
||||
.mkdir_sync(path, options.recursive, options.mode)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRemoveFile for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.remove_sync(path, false)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRename for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.rename_sync(from, to)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
pub enum FsFileAdapter {
|
||||
Real(sys_traits::impls::RealFsFile),
|
||||
Vfs(FileBackedVfsFile),
|
||||
}
|
||||
|
||||
impl sys_traits::FsFile for FsFileAdapter {}
|
||||
|
||||
impl sys_traits::FsFileAsRaw for FsFileAdapter {
|
||||
#[cfg(windows)]
|
||||
fn fs_file_as_raw_handle(&self) -> Option<std::os::windows::io::RawHandle> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_as_raw_handle(),
|
||||
Self::Vfs(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn fs_file_as_raw_fd(&self) -> Option<std::os::fd::RawFd> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_as_raw_fd(),
|
||||
Self::Vfs(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileSyncData for FsFileAdapter {
|
||||
fn fs_file_sync_data(&mut self) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_sync_data(),
|
||||
Self::Vfs(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileSyncAll for FsFileAdapter {
|
||||
fn fs_file_sync_all(&mut self) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_sync_all(),
|
||||
Self::Vfs(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileSetPermissions for FsFileAdapter {
|
||||
#[inline]
|
||||
fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_set_permissions(mode),
|
||||
Self::Vfs(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Read for FsFileAdapter {
|
||||
#[inline]
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
match self {
|
||||
Self::Real(file) => file.read(buf),
|
||||
Self::Vfs(file) => file.read_to_buf(buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Seek for FsFileAdapter {
|
||||
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
|
||||
match self {
|
||||
Self::Real(file) => file.seek(pos),
|
||||
Self::Vfs(file) => file.seek(pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Write for FsFileAdapter {
|
||||
#[inline]
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
match self {
|
||||
Self::Real(file) => file.write(buf),
|
||||
Self::Vfs(_) => Err(not_supported("writing files")),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.flush(),
|
||||
Self::Vfs(_) => Err(not_supported("writing files")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileSetLen for FsFileAdapter {
|
||||
#[inline]
|
||||
fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_set_len(len),
|
||||
Self::Vfs(_) => Err(not_supported("setting file length")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileSetTimes for FsFileAdapter {
|
||||
fn fs_file_set_times(
|
||||
&mut self,
|
||||
times: sys_traits::FsFileTimes,
|
||||
) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_set_times(times),
|
||||
Self::Vfs(_) => Err(not_supported("setting file times")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileLock for FsFileAdapter {
|
||||
fn fs_file_lock(
|
||||
&mut self,
|
||||
mode: sys_traits::FsFileLockMode,
|
||||
) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_lock(mode),
|
||||
Self::Vfs(_) => Err(not_supported("locking files")),
|
||||
}
|
||||
}
|
||||
|
||||
fn fs_file_try_lock(
|
||||
&mut self,
|
||||
mode: sys_traits::FsFileLockMode,
|
||||
) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_try_lock(mode),
|
||||
Self::Vfs(_) => Err(not_supported("locking files")),
|
||||
}
|
||||
}
|
||||
|
||||
fn fs_file_unlock(&mut self) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_unlock(),
|
||||
Self::Vfs(_) => Err(not_supported("unlocking files")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::FsFileIsTerminal for FsFileAdapter {
|
||||
#[inline]
|
||||
fn fs_file_is_terminal(&self) -> bool {
|
||||
match self {
|
||||
Self::Real(file) => file.fs_file_is_terminal(),
|
||||
Self::Vfs(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsOpen for DenoCompileFileSystem {
|
||||
type File = FsFileAdapter;
|
||||
|
||||
fn base_fs_open(
|
||||
&self,
|
||||
path: &Path,
|
||||
options: &sys_traits::OpenOptions,
|
||||
) -> std::io::Result<Self::File> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(FsFileAdapter::Vfs(self.0.open_file(path)?))
|
||||
} else {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
Ok(FsFileAdapter::Real(
|
||||
sys_traits::impls::RealSys.base_fs_open(path, options)?,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsSymlinkDir for DenoCompileFileSystem {
|
||||
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.symlink_sync(src, dst, Some(FsFileType::Directory))
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::SystemRandom for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.sys_random(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::SystemTimeNow for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn sys_time_now(&self) -> SystemTime {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.sys_time_now()
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::ThreadSleep for DenoCompileFileSystem {
|
||||
#[inline]
|
||||
fn thread_sleep(&self, dur: Duration) {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.thread_sleep(dur)
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::EnvCurrentDir for DenoCompileFileSystem {
|
||||
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.env_current_dir()
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseEnvVar for DenoCompileFileSystem {
|
||||
fn base_env_var_os(
|
||||
&self,
|
||||
key: &std::ffi::OsStr,
|
||||
) -> Option<std::ffi::OsString> {
|
||||
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
|
||||
sys_traits::impls::RealSys.base_env_var_os(key)
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,786 +0,0 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
|
||||
use capacity_builder::BytesAppendable;
|
||||
use deno_ast::swc::common::source_map;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::FastString;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleType;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::StackString;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use super::binary::Metadata;
|
||||
use super::virtual_fs::BuiltVfs;
|
||||
use super::virtual_fs::FileSystemCaseSensitivity;
|
||||
use super::virtual_fs::VfsBuilder;
|
||||
use super::virtual_fs::VirtualDirectoryEntries;
|
||||
use crate::standalone::virtual_fs::VirtualDirectory;
|
||||
|
||||
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
/// Binary format:
|
||||
/// * d3n0l4nd
|
||||
/// * <metadata_len><metadata>
|
||||
/// * <npm_snapshot_len><npm_snapshot>
|
||||
/// * <remote_modules>
|
||||
/// * <vfs_headers_len><vfs_headers>
|
||||
/// * <vfs_file_data_len><vfs_file_data>
|
||||
/// * <source_map_data>
|
||||
/// * d3n0l4nd
|
||||
pub fn serialize_binary_data_section(
|
||||
metadata: &Metadata,
|
||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
remote_modules: &RemoteModulesStoreBuilder,
|
||||
source_map_store: &SourceMapStore,
|
||||
vfs: &BuiltVfs,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
let metadata = serde_json::to_string(metadata)?;
|
||||
let npm_snapshot =
|
||||
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
||||
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
|
||||
|
||||
let bytes = capacity_builder::BytesBuilder::build(|builder| {
|
||||
builder.append(MAGIC_BYTES);
|
||||
// 1. Metadata
|
||||
{
|
||||
builder.append_le(metadata.len() as u64);
|
||||
builder.append(&metadata);
|
||||
}
|
||||
// 2. Npm snapshot
|
||||
{
|
||||
builder.append_le(npm_snapshot.len() as u64);
|
||||
builder.append(&npm_snapshot);
|
||||
}
|
||||
// 3. Remote modules
|
||||
{
|
||||
remote_modules.write(builder);
|
||||
}
|
||||
// 4. VFS
|
||||
{
|
||||
builder.append_le(serialized_vfs.len() as u64);
|
||||
builder.append(&serialized_vfs);
|
||||
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
|
||||
builder.append_le(vfs_bytes_len);
|
||||
for file in &vfs.files {
|
||||
builder.append(file);
|
||||
}
|
||||
}
|
||||
// 5. Source maps
|
||||
{
|
||||
builder.append_le(source_map_store.data.len() as u32);
|
||||
for (specifier, source_map) in &source_map_store.data {
|
||||
builder.append_le(specifier.len() as u32);
|
||||
builder.append(specifier);
|
||||
builder.append_le(source_map.len() as u32);
|
||||
builder.append(source_map.as_ref());
|
||||
}
|
||||
}
|
||||
|
||||
// write the magic bytes at the end so we can use it
|
||||
// to make sure we've deserialized correctly
|
||||
builder.append(MAGIC_BYTES);
|
||||
})?;
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub struct DeserializedDataSection {
|
||||
pub metadata: Metadata,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub remote_modules: RemoteModulesStore,
|
||||
pub source_maps: SourceMapStore,
|
||||
pub vfs_root_entries: VirtualDirectoryEntries,
|
||||
pub vfs_files_data: &'static [u8],
|
||||
}
|
||||
|
||||
pub fn deserialize_binary_data_section(
|
||||
data: &'static [u8],
|
||||
) -> Result<Option<DeserializedDataSection>, AnyError> {
|
||||
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
|
||||
if input.len() < MAGIC_BYTES.len() {
|
||||
bail!("Unexpected end of data. Could not find magic bytes.");
|
||||
}
|
||||
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
|
||||
if magic_bytes != MAGIC_BYTES {
|
||||
return Ok((input, false));
|
||||
}
|
||||
Ok((input, true))
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn read_source_map_entry(
|
||||
input: &[u8],
|
||||
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
|
||||
let (input, specifier) = read_string_lossy(input)?;
|
||||
let (input, source_map) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, (specifier, source_map)))
|
||||
}
|
||||
|
||||
let (input, found) = read_magic_bytes(data)?;
|
||||
if !found {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// 1. Metadata
|
||||
let (input, data) =
|
||||
read_bytes_with_u64_len(input).context("reading metadata")?;
|
||||
let metadata: Metadata =
|
||||
serde_json::from_slice(data).context("deserializing metadata")?;
|
||||
// 2. Npm snapshot
|
||||
let (input, data) =
|
||||
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
|
||||
let npm_snapshot = if data.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
|
||||
};
|
||||
// 3. Remote modules
|
||||
let (input, remote_modules) =
|
||||
RemoteModulesStore::build(input).context("deserializing remote modules")?;
|
||||
// 4. VFS
|
||||
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
|
||||
let vfs_root_entries: VirtualDirectoryEntries =
|
||||
serde_json::from_slice(data).context("deserializing vfs data")?;
|
||||
let (input, vfs_files_data) =
|
||||
read_bytes_with_u64_len(input).context("reading vfs files data")?;
|
||||
// 5. Source maps
|
||||
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
|
||||
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
|
||||
for _ in 0..source_map_data_len {
|
||||
let (current_input, (specifier, source_map)) =
|
||||
read_source_map_entry(input)?;
|
||||
input = current_input;
|
||||
source_maps.add(specifier, Cow::Borrowed(source_map));
|
||||
}
|
||||
|
||||
// finally ensure we read the magic bytes at the end
|
||||
let (_input, found) = read_magic_bytes(input)?;
|
||||
if !found {
|
||||
bail!("Could not find magic bytes at the end of the data.");
|
||||
}
|
||||
|
||||
Ok(Some(DeserializedDataSection {
|
||||
metadata,
|
||||
npm_snapshot,
|
||||
remote_modules,
|
||||
source_maps,
|
||||
vfs_root_entries,
|
||||
vfs_files_data,
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RemoteModulesStoreBuilder {
|
||||
specifiers: Vec<(String, u64)>,
|
||||
data: Vec<(MediaType, Vec<u8>, Option<Vec<u8>>)>,
|
||||
data_byte_len: u64,
|
||||
redirects: Vec<(String, String)>,
|
||||
redirects_len: u64,
|
||||
}
|
||||
|
||||
impl RemoteModulesStoreBuilder {
|
||||
pub fn add(
|
||||
&mut self,
|
||||
specifier: &Url,
|
||||
media_type: MediaType,
|
||||
data: Vec<u8>,
|
||||
maybe_transpiled: Option<Vec<u8>>,
|
||||
) {
|
||||
log::debug!("Adding '{}' ({})", specifier, media_type);
|
||||
let specifier = specifier.to_string();
|
||||
self.specifiers.push((specifier, self.data_byte_len));
|
||||
let maybe_transpiled_len = match &maybe_transpiled {
|
||||
// data length (4 bytes), data
|
||||
Some(data) => 4 + data.len() as u64,
|
||||
None => 0,
|
||||
};
|
||||
// media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length
|
||||
self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len;
|
||||
self.data.push((media_type, data, maybe_transpiled));
|
||||
}
|
||||
|
||||
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
|
||||
self.redirects.reserve(redirects.len());
|
||||
for (from, to) in redirects {
|
||||
log::debug!("Adding redirect '{}' -> '{}'", from, to);
|
||||
let from = from.to_string();
|
||||
let to = to.to_string();
|
||||
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
|
||||
self.redirects.push((from, to));
|
||||
}
|
||||
}
|
||||
|
||||
fn write<'a, TBytes: capacity_builder::BytesType>(
|
||||
&'a self,
|
||||
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
|
||||
) {
|
||||
builder.append_le(self.specifiers.len() as u32);
|
||||
builder.append_le(self.redirects.len() as u32);
|
||||
for (specifier, offset) in &self.specifiers {
|
||||
builder.append_le(specifier.len() as u32);
|
||||
builder.append(specifier);
|
||||
builder.append_le(*offset);
|
||||
}
|
||||
for (from, to) in &self.redirects {
|
||||
builder.append_le(from.len() as u32);
|
||||
builder.append(from);
|
||||
builder.append_le(to.len() as u32);
|
||||
builder.append(to);
|
||||
}
|
||||
builder.append_le(
|
||||
self
|
||||
.data
|
||||
.iter()
|
||||
.map(|(_, data, maybe_transpiled)| {
|
||||
1 + 4
|
||||
+ (data.len() as u64)
|
||||
+ 1
|
||||
+ match maybe_transpiled {
|
||||
Some(transpiled) => 4 + (transpiled.len() as u64),
|
||||
None => 0,
|
||||
}
|
||||
})
|
||||
.sum::<u64>(),
|
||||
);
|
||||
for (media_type, data, maybe_transpiled) in &self.data {
|
||||
builder.append(serialize_media_type(*media_type));
|
||||
builder.append_le(data.len() as u32);
|
||||
builder.append(data);
|
||||
if let Some(transpiled) = maybe_transpiled {
|
||||
builder.append(1);
|
||||
builder.append_le(transpiled.len() as u32);
|
||||
builder.append(transpiled);
|
||||
} else {
|
||||
builder.append(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DenoCompileModuleSource {
|
||||
String(&'static str),
|
||||
Bytes(Cow<'static, [u8]>),
|
||||
}
|
||||
|
||||
impl DenoCompileModuleSource {
|
||||
pub fn into_for_v8(self) -> ModuleSourceCode {
|
||||
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
|
||||
ModuleSourceCode::Bytes(match data {
|
||||
Cow::Borrowed(d) => d.into(),
|
||||
Cow::Owned(d) => d.into_boxed_slice().into(),
|
||||
})
|
||||
}
|
||||
|
||||
match self {
|
||||
// todo(https://github.com/denoland/deno_core/pull/943): store whether
|
||||
// the string is ascii or not ahead of time so we can avoid the is_ascii()
|
||||
// check in FastString::from_static
|
||||
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
|
||||
Self::Bytes(b) => into_bytes(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SourceMapStore {
|
||||
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
|
||||
}
|
||||
|
||||
impl SourceMapStore {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
data: IndexMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(
|
||||
&mut self,
|
||||
specifier: Cow<'static, str>,
|
||||
source_map: Cow<'static, [u8]>,
|
||||
) {
|
||||
self.data.insert(specifier, source_map);
|
||||
}
|
||||
|
||||
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
|
||||
self.data.get(specifier).map(|v| v.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DenoCompileModuleData<'a> {
|
||||
pub specifier: &'a Url,
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'static, [u8]>,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileModuleData<'a> {
|
||||
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
|
||||
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
|
||||
match data {
|
||||
Cow::Borrowed(d) => DenoCompileModuleSource::String(
|
||||
// SAFETY: we know this is a valid utf8 string
|
||||
unsafe { std::str::from_utf8_unchecked(d) },
|
||||
),
|
||||
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
|
||||
}
|
||||
}
|
||||
|
||||
let (media_type, source) = match self.media_type {
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::Mjs
|
||||
| MediaType::Cjs
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx => {
|
||||
(ModuleType::JavaScript, into_string_unsafe(self.data))
|
||||
}
|
||||
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
|
||||
MediaType::Wasm => {
|
||||
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
|
||||
}
|
||||
// just assume javascript if we made it here
|
||||
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
|
||||
ModuleType::JavaScript,
|
||||
DenoCompileModuleSource::Bytes(self.data),
|
||||
),
|
||||
};
|
||||
(self.specifier, media_type, source)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RemoteModuleEntry<'a> {
|
||||
pub specifier: &'a Url,
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'static, [u8]>,
|
||||
pub transpiled_data: Option<Cow<'static, [u8]>>,
|
||||
}
|
||||
|
||||
enum RemoteModulesStoreSpecifierValue {
|
||||
Data(usize),
|
||||
Redirect(Url),
|
||||
}
|
||||
|
||||
pub struct RemoteModulesStore {
|
||||
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
|
||||
files_data: &'static [u8],
|
||||
}
|
||||
|
||||
impl RemoteModulesStore {
|
||||
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
|
||||
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
|
||||
let (input, specifier) = read_string_lossy(input)?;
|
||||
let specifier = Url::parse(&specifier)?;
|
||||
let (input, offset) = read_u64(input)?;
|
||||
Ok((input, (specifier, offset)))
|
||||
}
|
||||
|
||||
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
|
||||
let (input, from) = read_string_lossy(input)?;
|
||||
let from = Url::parse(&from)?;
|
||||
let (input, to) = read_string_lossy(input)?;
|
||||
let to = Url::parse(&to)?;
|
||||
Ok((input, (from, to)))
|
||||
}
|
||||
|
||||
fn read_headers(
|
||||
input: &[u8],
|
||||
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
|
||||
{
|
||||
let (input, specifiers_len) = read_u32_as_usize(input)?;
|
||||
let (mut input, redirects_len) = read_u32_as_usize(input)?;
|
||||
let mut specifiers =
|
||||
HashMap::with_capacity(specifiers_len + redirects_len);
|
||||
for _ in 0..specifiers_len {
|
||||
let (current_input, (specifier, offset)) =
|
||||
read_specifier(input).context("reading specifier")?;
|
||||
input = current_input;
|
||||
specifiers.insert(
|
||||
specifier,
|
||||
RemoteModulesStoreSpecifierValue::Data(offset as usize),
|
||||
);
|
||||
}
|
||||
|
||||
for _ in 0..redirects_len {
|
||||
let (current_input, (from, to)) = read_redirect(input)?;
|
||||
input = current_input;
|
||||
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
|
||||
}
|
||||
|
||||
Ok((input, specifiers))
|
||||
}
|
||||
|
||||
let (input, specifiers) = read_headers(input)?;
|
||||
let (input, files_data) = read_bytes_with_u64_len(input)?;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
Self {
|
||||
specifiers,
|
||||
files_data,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<&'a Url>, AnyError> {
|
||||
let mut count = 0;
|
||||
let mut current = specifier;
|
||||
loop {
|
||||
if count > 10 {
|
||||
bail!("Too many redirects resolving '{}'", specifier);
|
||||
}
|
||||
match self.specifiers.get(current) {
|
||||
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
|
||||
current = to;
|
||||
count += 1;
|
||||
}
|
||||
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
|
||||
return Ok(Some(current));
|
||||
}
|
||||
None => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
original_specifier: &'a Url,
|
||||
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
|
||||
let mut count = 0;
|
||||
let mut specifier = original_specifier;
|
||||
loop {
|
||||
if count > 10 {
|
||||
bail!("Too many redirects resolving '{}'", original_specifier);
|
||||
}
|
||||
match self.specifiers.get(specifier) {
|
||||
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
|
||||
specifier = to;
|
||||
count += 1;
|
||||
}
|
||||
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
|
||||
let input = &self.files_data[*offset..];
|
||||
let (input, media_type_byte) = read_bytes(input, 1)?;
|
||||
let media_type = deserialize_media_type(media_type_byte[0])?;
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
check_has_len(input, 1)?;
|
||||
let (input, has_transpiled) = (&input[1..], input[0]);
|
||||
let (_, transpiled_data) = match has_transpiled {
|
||||
0 => (input, None),
|
||||
1 => {
|
||||
let (input, data) = read_bytes_with_u32_len(input)?;
|
||||
(input, Some(data))
|
||||
}
|
||||
value => bail!(
|
||||
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
|
||||
value
|
||||
),
|
||||
};
|
||||
return Ok(Some(RemoteModuleEntry {
|
||||
specifier,
|
||||
media_type,
|
||||
data: Cow::Borrowed(data),
|
||||
transpiled_data: transpiled_data.map(Cow::Borrowed),
|
||||
}));
|
||||
}
|
||||
None => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_npm_snapshot(
|
||||
mut snapshot: SerializedNpmResolutionSnapshot,
|
||||
) -> Vec<u8> {
|
||||
fn append_string(bytes: &mut Vec<u8>, string: &str) {
|
||||
let len = string.len() as u32;
|
||||
bytes.extend_from_slice(&len.to_le_bytes());
|
||||
bytes.extend_from_slice(string.as_bytes());
|
||||
}
|
||||
|
||||
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
||||
let ids_to_stored_ids = snapshot
|
||||
.packages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, pkg)| (&pkg.id, i as u32))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
|
||||
root_packages.sort();
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
|
||||
for pkg in &snapshot.packages {
|
||||
append_string(&mut bytes, &pkg.id.as_serialized());
|
||||
}
|
||||
|
||||
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
|
||||
for (req, id) in root_packages {
|
||||
append_string(&mut bytes, &req.to_string());
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
|
||||
for pkg in &snapshot.packages {
|
||||
let deps_len = pkg.dependencies.len() as u32;
|
||||
bytes.extend_from_slice(&deps_len.to_le_bytes());
|
||||
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
|
||||
deps.sort();
|
||||
for (req, id) in deps {
|
||||
append_string(&mut bytes, req);
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn deserialize_npm_snapshot(
|
||||
input: &[u8],
|
||||
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
|
||||
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
|
||||
let (input, id) = read_string_lossy(input)?;
|
||||
let id = NpmPackageId::from_serialized(&id)?;
|
||||
Ok((input, id))
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_root_package<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let req = PackageReq::from_str(&req)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
Ok((input, (req, id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_package_dep<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
let req = StackString::from_cow(req);
|
||||
Ok((input, (req, id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_package<'a>(
|
||||
input: &'a [u8],
|
||||
id: NpmPackageId,
|
||||
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
|
||||
let (input, deps_len) = read_u32_as_usize(input)?;
|
||||
let (input, dependencies) =
|
||||
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
|
||||
Ok((
|
||||
input,
|
||||
SerializedNpmResolutionSnapshotPackage {
|
||||
id,
|
||||
system: Default::default(),
|
||||
dist: Default::default(),
|
||||
dependencies,
|
||||
optional_dependencies: Default::default(),
|
||||
bin: None,
|
||||
scripts: Default::default(),
|
||||
deprecated: Default::default(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
let (input, packages_len) = read_u32_as_usize(input)?;
|
||||
|
||||
// get a hashmap of all the npm package ids to their serialized ids
|
||||
let (input, data_ids_to_npm_ids) =
|
||||
parse_vec_n_times(input, packages_len, parse_id)
|
||||
.context("deserializing id")?;
|
||||
let data_id_to_npm_id = |id: usize| {
|
||||
data_ids_to_npm_ids
|
||||
.get(id)
|
||||
.cloned()
|
||||
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
|
||||
};
|
||||
|
||||
let (input, root_packages_len) = read_u32_as_usize(input)?;
|
||||
let (input, root_packages) = parse_hashmap_n_times(
|
||||
input,
|
||||
root_packages_len,
|
||||
parse_root_package(&data_id_to_npm_id),
|
||||
)
|
||||
.context("deserializing root package")?;
|
||||
let (input, packages) =
|
||||
parse_vec_n_times_with_index(input, packages_len, |input, index| {
|
||||
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
|
||||
})
|
||||
.context("deserializing package")?;
|
||||
|
||||
if !input.is_empty() {
|
||||
bail!("Unexpected data left over");
|
||||
}
|
||||
|
||||
Ok(
|
||||
SerializedNpmResolutionSnapshot {
|
||||
packages,
|
||||
root_packages,
|
||||
}
|
||||
// this is ok because we have already verified that all the
|
||||
// identifiers found in the snapshot are valid via the
|
||||
// npm package id -> npm package id mapping
|
||||
.into_valid_unsafe(),
|
||||
)
|
||||
}
|
||||
|
||||
fn serialize_media_type(media_type: MediaType) -> u8 {
|
||||
match media_type {
|
||||
MediaType::JavaScript => 0,
|
||||
MediaType::Jsx => 1,
|
||||
MediaType::Mjs => 2,
|
||||
MediaType::Cjs => 3,
|
||||
MediaType::TypeScript => 4,
|
||||
MediaType::Mts => 5,
|
||||
MediaType::Cts => 6,
|
||||
MediaType::Dts => 7,
|
||||
MediaType::Dmts => 8,
|
||||
MediaType::Dcts => 9,
|
||||
MediaType::Tsx => 10,
|
||||
MediaType::Json => 11,
|
||||
MediaType::Wasm => 12,
|
||||
MediaType::Css => 13,
|
||||
MediaType::SourceMap => 14,
|
||||
MediaType::Unknown => 15,
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
|
||||
match value {
|
||||
0 => Ok(MediaType::JavaScript),
|
||||
1 => Ok(MediaType::Jsx),
|
||||
2 => Ok(MediaType::Mjs),
|
||||
3 => Ok(MediaType::Cjs),
|
||||
4 => Ok(MediaType::TypeScript),
|
||||
5 => Ok(MediaType::Mts),
|
||||
6 => Ok(MediaType::Cts),
|
||||
7 => Ok(MediaType::Dts),
|
||||
8 => Ok(MediaType::Dmts),
|
||||
9 => Ok(MediaType::Dcts),
|
||||
10 => Ok(MediaType::Tsx),
|
||||
11 => Ok(MediaType::Json),
|
||||
12 => Ok(MediaType::Wasm),
|
||||
13 => Ok(MediaType::Css),
|
||||
14 => Ok(MediaType::SourceMap),
|
||||
15 => Ok(MediaType::Unknown),
|
||||
_ => bail!("Unknown media type value: {}", value),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
|
||||
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
|
||||
let mut results = HashMap::with_capacity(times);
|
||||
for _ in 0..times {
|
||||
let result = parse(input);
|
||||
let (new_input, (key, value)) = result?;
|
||||
results.insert(key, value);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times<TResult>(
|
||||
input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times_with_index<TResult>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
let mut results = Vec::with_capacity(times);
|
||||
for i in 0..times {
|
||||
let result = parse(input, i);
|
||||
let (new_input, result) = result?;
|
||||
results.push(result);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
||||
let (input, len) = read_u64(input)?;
|
||||
let (input, data) = read_bytes(input, len as usize)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
||||
let (input, len) = read_u32_as_usize(input)?;
|
||||
let (input, data) = read_bytes(input, len)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
|
||||
check_has_len(input, len)?;
|
||||
let (len_bytes, input) = input.split_at(len);
|
||||
Ok((input, len_bytes))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
|
||||
if input.len() < len {
|
||||
bail!("Unexpected end of data.");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
|
||||
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
|
||||
Ok((input, String::from_utf8_lossy(data_bytes)))
|
||||
}
|
||||
|
||||
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
|
||||
let (input, len_bytes) = read_bytes(input, 4)?;
|
||||
let len = u32::from_le_bytes(len_bytes.try_into()?);
|
||||
Ok((input, len as usize))
|
||||
}
|
||||
|
||||
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
|
||||
let (input, len_bytes) = read_bytes(input, 8)?;
|
||||
let len = u64::from_le_bytes(len_bytes.try_into()?);
|
||||
Ok((input, len))
|
||||
}
|
File diff suppressed because it is too large
Load diff
230
cli/sys.rs
230
cli/sys.rs
|
@ -1,230 +0,0 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
// todo(dsherret): this should instead use conditional compilation and directly
|
||||
// surface the underlying implementation.
|
||||
//
|
||||
// The problem atm is that there's no way to have conditional compilation for
|
||||
// denort or the deno binary. We should extract out denort to a separate binary.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use sys_traits::boxed::BoxedFsDirEntry;
|
||||
use sys_traits::boxed::BoxedFsFile;
|
||||
use sys_traits::boxed::BoxedFsMetadataValue;
|
||||
use sys_traits::boxed::FsMetadataBoxed;
|
||||
use sys_traits::boxed::FsOpenBoxed;
|
||||
use sys_traits::boxed::FsReadDirBoxed;
|
||||
use sys_traits::CreateDirOptions;
|
||||
|
||||
use crate::standalone::DenoCompileFileSystem;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CliSys {
|
||||
#[allow(dead_code)] // will be dead code for denort
|
||||
#[allow(clippy::disallowed_types)] // ok because sys impl
|
||||
Real(sys_traits::impls::RealSys),
|
||||
#[allow(dead_code)] // will be dead code for deno
|
||||
DenoCompile(DenoCompileFileSystem),
|
||||
}
|
||||
|
||||
impl Default for CliSys {
|
||||
fn default() -> Self {
|
||||
Self::Real(sys_traits::impls::RealSys)
|
||||
}
|
||||
}
|
||||
|
||||
impl deno_runtime::deno_node::ExtNodeSys for CliSys {}
|
||||
|
||||
impl sys_traits::BaseFsCloneFile for CliSys {
|
||||
fn base_fs_clone_file(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_clone_file(src, dst),
|
||||
Self::DenoCompile(sys) => sys.base_fs_clone_file(src, dst),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsSymlinkDir for CliSys {
|
||||
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_symlink_dir(src, dst),
|
||||
Self::DenoCompile(sys) => sys.base_fs_symlink_dir(src, dst),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCopy for CliSys {
|
||||
fn base_fs_copy(&self, src: &Path, dst: &Path) -> std::io::Result<u64> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_copy(src, dst),
|
||||
Self::DenoCompile(sys) => sys.base_fs_copy(src, dst),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsHardLink for CliSys {
|
||||
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_hard_link(src, dst),
|
||||
Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRead for CliSys {
|
||||
fn base_fs_read(&self, p: &Path) -> std::io::Result<Cow<'static, [u8]>> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_read(p),
|
||||
Self::DenoCompile(sys) => sys.base_fs_read(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsReadDir for CliSys {
|
||||
type ReadDirEntry = BoxedFsDirEntry;
|
||||
|
||||
fn base_fs_read_dir(
|
||||
&self,
|
||||
p: &Path,
|
||||
) -> std::io::Result<
|
||||
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
|
||||
> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.fs_read_dir_boxed(p),
|
||||
Self::DenoCompile(sys) => sys.fs_read_dir_boxed(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCanonicalize for CliSys {
|
||||
fn base_fs_canonicalize(&self, p: &Path) -> std::io::Result<PathBuf> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_canonicalize(p),
|
||||
Self::DenoCompile(sys) => sys.base_fs_canonicalize(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsMetadata for CliSys {
|
||||
type Metadata = BoxedFsMetadataValue;
|
||||
|
||||
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.fs_metadata_boxed(path),
|
||||
Self::DenoCompile(sys) => sys.fs_metadata_boxed(path),
|
||||
}
|
||||
}
|
||||
|
||||
fn base_fs_symlink_metadata(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> std::io::Result<Self::Metadata> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.fs_symlink_metadata_boxed(path),
|
||||
Self::DenoCompile(sys) => sys.fs_symlink_metadata_boxed(path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsCreateDir for CliSys {
|
||||
fn base_fs_create_dir(
|
||||
&self,
|
||||
p: &Path,
|
||||
options: &CreateDirOptions,
|
||||
) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_create_dir(p, options),
|
||||
Self::DenoCompile(sys) => sys.base_fs_create_dir(p, options),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsOpen for CliSys {
|
||||
type File = BoxedFsFile;
|
||||
|
||||
fn base_fs_open(
|
||||
&self,
|
||||
path: &Path,
|
||||
options: &sys_traits::OpenOptions,
|
||||
) -> std::io::Result<Self::File> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.fs_open_boxed(path, options),
|
||||
Self::DenoCompile(sys) => sys.fs_open_boxed(path, options),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRemoveFile for CliSys {
|
||||
fn base_fs_remove_file(&self, p: &Path) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_remove_file(p),
|
||||
Self::DenoCompile(sys) => sys.base_fs_remove_file(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseFsRename for CliSys {
|
||||
fn base_fs_rename(&self, old: &Path, new: &Path) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_fs_rename(old, new),
|
||||
Self::DenoCompile(sys) => sys.base_fs_rename(old, new),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::SystemRandom for CliSys {
|
||||
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.sys_random(buf),
|
||||
Self::DenoCompile(sys) => sys.sys_random(buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::SystemTimeNow for CliSys {
|
||||
fn sys_time_now(&self) -> std::time::SystemTime {
|
||||
match self {
|
||||
Self::Real(sys) => sys.sys_time_now(),
|
||||
Self::DenoCompile(sys) => sys.sys_time_now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::ThreadSleep for CliSys {
|
||||
fn thread_sleep(&self, dur: std::time::Duration) {
|
||||
match self {
|
||||
Self::Real(sys) => sys.thread_sleep(dur),
|
||||
Self::DenoCompile(sys) => sys.thread_sleep(dur),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::EnvCurrentDir for CliSys {
|
||||
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.env_current_dir(),
|
||||
Self::DenoCompile(sys) => sys.env_current_dir(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::BaseEnvVar for CliSys {
|
||||
fn base_env_var_os(
|
||||
&self,
|
||||
key: &std::ffi::OsStr,
|
||||
) -> Option<std::ffi::OsString> {
|
||||
match self {
|
||||
Self::Real(sys) => sys.base_env_var_os(key),
|
||||
Self::DenoCompile(sys) => sys.base_env_var_os(key),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sys_traits::EnvHomeDir for CliSys {
|
||||
fn env_home_dir(&self) -> Option<PathBuf> {
|
||||
#[allow(clippy::disallowed_types)] // ok because sys impl
|
||||
sys_traits::impls::RealSys.env_home_dir()
|
||||
}
|
||||
}
|
|
@ -48,6 +48,7 @@ use crate::util::fs::collect_specifiers;
|
|||
use crate::util::path::is_script_ext;
|
||||
use crate::util::path::matches_pattern_or_exact_path;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CreateCustomWorkerError;
|
||||
|
||||
mod mitata;
|
||||
mod reporters;
|
||||
|
@ -164,7 +165,7 @@ async fn bench_specifier(
|
|||
.await
|
||||
{
|
||||
Ok(()) => Ok(()),
|
||||
Err(CoreError::Js(error)) => {
|
||||
Err(CreateCustomWorkerError::Core(CoreError::Js(error))) => {
|
||||
sender.send(BenchEvent::UncaughtError(
|
||||
specifier.to_string(),
|
||||
Box::new(error),
|
||||
|
@ -182,7 +183,7 @@ async fn bench_specifier_inner(
|
|||
specifier: ModuleSpecifier,
|
||||
sender: &UnboundedSender<BenchEvent>,
|
||||
filter: TestFilter,
|
||||
) -> Result<(), CoreError> {
|
||||
) -> Result<(), CreateCustomWorkerError> {
|
||||
let mut worker = worker_factory
|
||||
.create_custom_worker(
|
||||
WorkerExecutionMode::Bench,
|
||||
|
@ -201,7 +202,7 @@ async fn bench_specifier_inner(
|
|||
// Ensure that there are no pending exceptions before we start running tests
|
||||
worker.run_up_to_duration(Duration::from_millis(0)).await?;
|
||||
|
||||
worker.dispatch_load_event()?;
|
||||
worker.dispatch_load_event().map_err(CoreError::Js)?;
|
||||
|
||||
let benchmarks = {
|
||||
let state_rc = worker.js_runtime.op_state();
|
||||
|
@ -236,11 +237,13 @@ async fn bench_specifier_inner(
|
|||
used_only,
|
||||
names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(),
|
||||
}))
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
.map_err(JsErrorBox::from_err)
|
||||
.map_err(CoreError::JsBox)?;
|
||||
for (desc, function) in benchmarks {
|
||||
sender
|
||||
.send(BenchEvent::Wait(desc.id))
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
.map_err(JsErrorBox::from_err)
|
||||
.map_err(CoreError::JsBox)?;
|
||||
let call = worker.js_runtime.call(&function);
|
||||
let result = worker
|
||||
.js_runtime
|
||||
|
@ -249,18 +252,26 @@ async fn bench_specifier_inner(
|
|||
let scope = &mut worker.js_runtime.handle_scope();
|
||||
let result = v8::Local::new(scope, result);
|
||||
let result = serde_v8::from_v8::<BenchResult>(scope, result)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
.map_err(JsErrorBox::from_err)
|
||||
.map_err(CoreError::JsBox)?;
|
||||
sender
|
||||
.send(BenchEvent::Result(desc.id, result))
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
.map_err(JsErrorBox::from_err)
|
||||
.map_err(CoreError::JsBox)?;
|
||||
}
|
||||
|
||||
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
|
||||
// event loop to continue beyond what's needed to await results.
|
||||
worker.dispatch_beforeunload_event()?;
|
||||
worker.dispatch_process_beforeexit_event()?;
|
||||
worker.dispatch_unload_event()?;
|
||||
worker.dispatch_process_exit_event()?;
|
||||
worker
|
||||
.dispatch_beforeunload_event()
|
||||
.map_err(CoreError::Js)?;
|
||||
worker
|
||||
.dispatch_process_beforeexit_event()
|
||||
.map_err(CoreError::Js)?;
|
||||
worker.dispatch_unload_event().map_err(CoreError::Js)?;
|
||||
worker
|
||||
.dispatch_process_exit_event()
|
||||
.map_err(CoreError::Js)?;
|
||||
|
||||
// Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't
|
||||
// want to wait forever here.
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::*;
|
||||
use crate::tools::test::TestFailureFormatOptions;
|
||||
use crate::version;
|
||||
|
||||
pub trait BenchReporter {
|
||||
fn report_group_summary(&mut self);
|
||||
|
@ -31,11 +31,7 @@ impl Default for JsonReporterOutput {
|
|||
fn default() -> Self {
|
||||
Self {
|
||||
version: JSON_SCHEMA_VERSION,
|
||||
runtime: format!(
|
||||
"{} {}",
|
||||
version::DENO_VERSION_INFO.user_agent,
|
||||
env!("TARGET")
|
||||
),
|
||||
runtime: format!("{} {}", DENO_VERSION_INFO.user_agent, env!("TARGET")),
|
||||
cpu: mitata::cpu::name(),
|
||||
benches: vec![],
|
||||
}
|
||||
|
@ -163,7 +159,7 @@ impl BenchReporter for ConsoleReporter {
|
|||
"{}\n",
|
||||
colors::gray(format!(
|
||||
"Runtime | Deno {} ({})",
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
DENO_VERSION_INFO.deno,
|
||||
env!("TARGET")
|
||||
))
|
||||
);
|
||||
|
|
|
@ -13,6 +13,8 @@ use deno_graph::Module;
|
|||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_lib::util::hash::FastInsecureHasher;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use deno_terminal::colors;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
@ -27,7 +29,6 @@ use crate::args::TsTypeLib;
|
|||
use crate::args::TypeCheckMode;
|
||||
use crate::cache::CacheDBHash;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::TypeCheckCache;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::maybe_additional_sloppy_imports_message;
|
||||
|
@ -261,6 +262,8 @@ impl TypeChecker {
|
|||
maybe_check_hash,
|
||||
} = get_tsc_roots(
|
||||
&self.sys,
|
||||
&self.npm_resolver,
|
||||
&self.node_resolver,
|
||||
&graph,
|
||||
check_js,
|
||||
check_state_hash(&self.npm_resolver),
|
||||
|
@ -373,8 +376,11 @@ struct TscRoots {
|
|||
/// redirects resolved. We need to include all the emittable files in
|
||||
/// the roots, so they get type checked and optionally emitted,
|
||||
/// otherwise they would be ignored if only imported into JavaScript.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn get_tsc_roots(
|
||||
sys: &CliSys,
|
||||
npm_resolver: &CliNpmResolver,
|
||||
node_resolver: &CliNodeResolver,
|
||||
graph: &ModuleGraph,
|
||||
check_js: bool,
|
||||
npm_cache_state_hash: Option<u64>,
|
||||
|
@ -457,6 +463,7 @@ fn get_tsc_roots(
|
|||
if let Some(hasher) = hasher {
|
||||
hasher.write_str(module.specifier.as_str());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -493,17 +500,33 @@ fn get_tsc_roots(
|
|||
let mut pending = VecDeque::new();
|
||||
|
||||
// put in the global types first so that they're resolved before anything else
|
||||
let get_import_specifiers = || {
|
||||
graph
|
||||
.imports
|
||||
for (referrer, import) in graph.imports.iter() {
|
||||
for specifier in import
|
||||
.dependencies
|
||||
.values()
|
||||
.flat_map(|i| i.dependencies.values())
|
||||
.filter_map(|dep| dep.get_type().or_else(|| dep.get_code()))
|
||||
};
|
||||
for specifier in get_import_specifiers() {
|
||||
let specifier = graph.resolve(specifier);
|
||||
if seen.insert(specifier) {
|
||||
pending.push_back((specifier, false));
|
||||
{
|
||||
let specifier = graph.resolve(specifier);
|
||||
if seen.insert(specifier) {
|
||||
if let Ok(nv_ref) = NpmPackageNvReference::from_specifier(specifier) {
|
||||
let Some(resolved) =
|
||||
resolve_npm_nv_ref(npm_resolver, node_resolver, &nv_ref, referrer)
|
||||
else {
|
||||
result.missing_diagnostics.push(
|
||||
tsc::Diagnostic::from_missing_error(
|
||||
specifier,
|
||||
None,
|
||||
maybe_additional_sloppy_imports_message(sys, specifier),
|
||||
),
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let mt = MediaType::from_specifier(&resolved);
|
||||
result.roots.push((resolved, mt));
|
||||
} else {
|
||||
pending.push_back((specifier, false));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -624,6 +647,29 @@ fn get_tsc_roots(
|
|||
result
|
||||
}
|
||||
|
||||
fn resolve_npm_nv_ref(
|
||||
npm_resolver: &CliNpmResolver,
|
||||
node_resolver: &CliNodeResolver,
|
||||
nv_ref: &NpmPackageNvReference,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let pkg_dir = npm_resolver
|
||||
.as_managed()
|
||||
.unwrap()
|
||||
.resolve_pkg_folder_from_deno_module(nv_ref.nv())
|
||||
.ok()?;
|
||||
let resolved = node_resolver
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
&pkg_dir,
|
||||
nv_ref.sub_path(),
|
||||
Some(referrer),
|
||||
node_resolver::ResolutionMode::Import,
|
||||
node_resolver::NodeResolutionKind::Types,
|
||||
)
|
||||
.ok()?;
|
||||
Some(resolved)
|
||||
}
|
||||
|
||||
/// Matches the `@ts-check` pragma.
|
||||
static TS_CHECK_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#"(?i)^\s*@ts-check(?:\s+|$)"#);
|
||||
|
|
|
@ -18,10 +18,12 @@ use deno_config::glob::PathOrPatternSet;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::CoreError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::sourcemap::SourceMap;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::LocalInspectorSession;
|
||||
use deno_error::JsErrorBox;
|
||||
use deno_resolver::npm::DenoInNpmPackageChecker;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use regex::Regex;
|
||||
|
@ -53,7 +55,7 @@ pub struct CoverageCollector {
|
|||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl crate::worker::CoverageCollector for CoverageCollector {
|
||||
async fn start_collecting(&mut self) -> Result<(), AnyError> {
|
||||
async fn start_collecting(&mut self) -> Result<(), CoreError> {
|
||||
self.enable_debugger().await?;
|
||||
self.enable_profiler().await?;
|
||||
self
|
||||
|
@ -67,7 +69,7 @@ impl crate::worker::CoverageCollector for CoverageCollector {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop_collecting(&mut self) -> Result<(), AnyError> {
|
||||
async fn stop_collecting(&mut self) -> Result<(), CoreError> {
|
||||
fs::create_dir_all(&self.dir)?;
|
||||
|
||||
let script_coverages = self.take_precise_coverage().await?.result;
|
||||
|
@ -88,7 +90,8 @@ impl crate::worker::CoverageCollector for CoverageCollector {
|
|||
let filepath = self.dir.join(filename);
|
||||
|
||||
let mut out = BufWriter::new(File::create(&filepath)?);
|
||||
let coverage = serde_json::to_string(&script_coverage)?;
|
||||
let coverage = serde_json::to_string(&script_coverage)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
let formatted_coverage =
|
||||
format_json(&filepath, &coverage, &Default::default())
|
||||
.ok()
|
||||
|
@ -111,7 +114,7 @@ impl CoverageCollector {
|
|||
Self { dir, session }
|
||||
}
|
||||
|
||||
async fn enable_debugger(&mut self) -> Result<(), AnyError> {
|
||||
async fn enable_debugger(&mut self) -> Result<(), CoreError> {
|
||||
self
|
||||
.session
|
||||
.post_message::<()>("Debugger.enable", None)
|
||||
|
@ -119,7 +122,7 @@ impl CoverageCollector {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn enable_profiler(&mut self) -> Result<(), AnyError> {
|
||||
async fn enable_profiler(&mut self) -> Result<(), CoreError> {
|
||||
self
|
||||
.session
|
||||
.post_message::<()>("Profiler.enable", None)
|
||||
|
@ -127,7 +130,7 @@ impl CoverageCollector {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn disable_debugger(&mut self) -> Result<(), AnyError> {
|
||||
async fn disable_debugger(&mut self) -> Result<(), CoreError> {
|
||||
self
|
||||
.session
|
||||
.post_message::<()>("Debugger.disable", None)
|
||||
|
@ -135,7 +138,7 @@ impl CoverageCollector {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn disable_profiler(&mut self) -> Result<(), AnyError> {
|
||||
async fn disable_profiler(&mut self) -> Result<(), CoreError> {
|
||||
self
|
||||
.session
|
||||
.post_message::<()>("Profiler.disable", None)
|
||||
|
@ -146,26 +149,28 @@ impl CoverageCollector {
|
|||
async fn start_precise_coverage(
|
||||
&mut self,
|
||||
parameters: cdp::StartPreciseCoverageArgs,
|
||||
) -> Result<cdp::StartPreciseCoverageResponse, AnyError> {
|
||||
) -> Result<cdp::StartPreciseCoverageResponse, CoreError> {
|
||||
let return_value = self
|
||||
.session
|
||||
.post_message("Profiler.startPreciseCoverage", Some(parameters))
|
||||
.await?;
|
||||
|
||||
let return_object = serde_json::from_value(return_value)?;
|
||||
let return_object =
|
||||
serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?;
|
||||
|
||||
Ok(return_object)
|
||||
}
|
||||
|
||||
async fn take_precise_coverage(
|
||||
&mut self,
|
||||
) -> Result<cdp::TakePreciseCoverageResponse, AnyError> {
|
||||
) -> Result<cdp::TakePreciseCoverageResponse, CoreError> {
|
||||
let return_value = self
|
||||
.session
|
||||
.post_message::<()>("Profiler.takePreciseCoverage", None)
|
||||
.await?;
|
||||
|
||||
let return_object = serde_json::from_value(return_value)?;
|
||||
let return_object =
|
||||
serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?;
|
||||
|
||||
Ok(return_object)
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ use std::path::PathBuf;
|
|||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
|
||||
use super::util;
|
||||
use super::CoverageReport;
|
||||
|
@ -559,7 +560,7 @@ impl HtmlCoverageReporter {
|
|||
|
||||
/// Creates footer part of the contents for html report.
|
||||
pub fn create_html_footer(&self, now: &str) -> String {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
let version = DENO_VERSION_INFO.deno;
|
||||
format!(
|
||||
"
|
||||
<div class='footer quiet pad2 space-top1 center small'>
|
||||
|
|
|
@ -20,6 +20,7 @@ use deno_graph::EsParser;
|
|||
use deno_graph::GraphKind;
|
||||
use deno_graph::ModuleAnalyzer;
|
||||
use deno_graph::ModuleSpecifier;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use doc::html::ShortPath;
|
||||
use doc::DocDiagnostic;
|
||||
use indexmap::IndexMap;
|
||||
|
@ -280,7 +281,7 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
if self.deno_ns.contains_key(symbol) {
|
||||
Some(format!(
|
||||
"https://deno.land/api@v{}?s={}",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
DENO_VERSION_INFO.deno,
|
||||
symbol.join(".")
|
||||
))
|
||||
} else {
|
||||
|
|
|
@ -18,6 +18,7 @@ use deno_graph::Module;
|
|||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Resolution;
|
||||
use deno_lib::util::checksum;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
|
@ -33,7 +34,6 @@ use crate::display;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::graph_exit_integrity_errors;
|
||||
use crate::npm::CliManagedNpmResolver;
|
||||
use crate::util::checksum;
|
||||
use crate::util::display::DisplayTreeNode;
|
||||
|
||||
const JSON_SCHEMA_VERSION: u8 = 1;
|
||||
|
@ -191,7 +191,7 @@ fn print_cache_info(
|
|||
let registry_cache = dir.registries_folder_path();
|
||||
let mut origin_dir = dir.origin_data_folder_path();
|
||||
let deno_dir = dir.root_path_for_display().to_string();
|
||||
let web_cache_dir = crate::worker::get_cache_storage_dir();
|
||||
let web_cache_dir = deno_lib::worker::get_cache_storage_dir();
|
||||
|
||||
if let Some(location) = &location {
|
||||
origin_dir =
|
||||
|
|
|
@ -18,6 +18,7 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_lib::args::CaData;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use log::Level;
|
||||
use once_cell::sync::Lazy;
|
||||
|
@ -26,7 +27,6 @@ use regex::RegexBuilder;
|
|||
|
||||
use crate::args::resolve_no_prompt;
|
||||
use crate::args::AddFlags;
|
||||
use crate::args::CaData;
|
||||
use crate::args::ConfigFlag;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InstallFlags;
|
||||
|
@ -657,6 +657,7 @@ fn is_in_path(dir: &Path) -> bool {
|
|||
mod tests {
|
||||
use std::process::Command;
|
||||
|
||||
use deno_lib::args::UnstableConfig;
|
||||
use test_util::testdata_path;
|
||||
use test_util::TempDir;
|
||||
|
||||
|
@ -664,7 +665,6 @@ mod tests {
|
|||
use crate::args::ConfigFlag;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UninstallFlagsGlobal;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
@ -18,6 +18,7 @@ use deno_core::parking_lot::Mutex;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::CancelFuture;
|
||||
use deno_core::CancelHandle;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use jupyter_runtime::messaging;
|
||||
use jupyter_runtime::ConnectionInfo;
|
||||
use jupyter_runtime::ExecutionCount;
|
||||
|
@ -679,10 +680,10 @@ fn kernel_info() -> messaging::KernelInfoReply {
|
|||
status: ReplyStatus::Ok,
|
||||
protocol_version: "5.3".to_string(),
|
||||
implementation: "Deno kernel".to_string(),
|
||||
implementation_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
implementation_version: DENO_VERSION_INFO.deno.to_string(),
|
||||
language_info: messaging::LanguageInfo {
|
||||
name: "typescript".to_string(),
|
||||
version: crate::version::DENO_VERSION_INFO.typescript.to_string(),
|
||||
version: DENO_VERSION_INFO.typescript.to_string(),
|
||||
mimetype: "text/x.typescript".to_string(),
|
||||
file_extension: ".ts".to_string(),
|
||||
pygments_lexer: "typescript".to_string(),
|
||||
|
|
|
@ -683,10 +683,21 @@ impl DepManager {
|
|||
.and_then(|info| {
|
||||
let latest_tag = info.dist_tags.get("latest")?;
|
||||
let lower_bound = &semver_compatible.as_ref()?.version;
|
||||
if latest_tag > lower_bound {
|
||||
if latest_tag >= lower_bound {
|
||||
Some(latest_tag.clone())
|
||||
} else {
|
||||
latest_version(Some(latest_tag), info.versions.keys())
|
||||
latest_version(
|
||||
Some(latest_tag),
|
||||
info.versions.iter().filter_map(
|
||||
|(version, version_info)| {
|
||||
if version_info.deprecated.is_none() {
|
||||
Some(version)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
})
|
||||
.map(|version| PackageNv {
|
||||
|
|
|
@ -280,9 +280,15 @@ fn choose_new_version_req(
|
|||
if preferred.version <= resolved?.version {
|
||||
return None;
|
||||
}
|
||||
let exact = if let Some(range) = dep.req.version_req.range() {
|
||||
range.0[0].start == range.0[0].end
|
||||
} else {
|
||||
false
|
||||
};
|
||||
Some(
|
||||
VersionReq::parse_from_specifier(
|
||||
format!("^{}", preferred.version).as_str(),
|
||||
format!("{}{}", if exact { "" } else { "^" }, preferred.version)
|
||||
.as_str(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
|
|
@ -8,6 +8,7 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::StreamExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_lib::version::DENO_VERSION_INFO;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use rustyline::error::ReadlineError;
|
||||
|
||||
|
@ -244,7 +245,7 @@ pub async fn run(
|
|||
if !cli_options.is_quiet() {
|
||||
let mut handle = io::stdout().lock();
|
||||
|
||||
writeln!(handle, "Deno {}", crate::version::DENO_VERSION_INFO.deno)?;
|
||||
writeln!(handle, "Deno {}", DENO_VERSION_INFO.deno)?;
|
||||
writeln!(handle, "exit using ctrl+d, ctrl+c, or close()")?;
|
||||
|
||||
if repl_flags.is_default_command {
|
||||
|
|
|
@ -32,6 +32,7 @@ use deno_error::JsErrorBox;
|
|||
use deno_graph::Position;
|
||||
use deno_graph::PositionRange;
|
||||
use deno_graph::SpecifierWithRange;
|
||||
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
|
||||
use deno_runtime::worker::MainWorker;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::NodeResolutionKind;
|
||||
|
@ -402,18 +403,16 @@ impl ReplSession {
|
|||
}
|
||||
Err(err) => {
|
||||
// handle a parsing diagnostic
|
||||
match crate::util::result::any_and_jserrorbox_downcast_ref::<
|
||||
deno_ast::ParseDiagnostic,
|
||||
>(&err)
|
||||
{
|
||||
match any_and_jserrorbox_downcast_ref::<deno_ast::ParseDiagnostic>(
|
||||
&err,
|
||||
) {
|
||||
Some(diagnostic) => {
|
||||
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
|
||||
}
|
||||
None => {
|
||||
match crate::util::result::any_and_jserrorbox_downcast_ref::<
|
||||
ParseDiagnosticsError,
|
||||
>(&err)
|
||||
{
|
||||
match any_and_jserrorbox_downcast_ref::<ParseDiagnosticsError>(
|
||||
&err,
|
||||
) {
|
||||
Some(diagnostics) => Ok(EvaluationOutput::Error(
|
||||
diagnostics
|
||||
.0
|
||||
|
|
|
@ -43,7 +43,8 @@ pub async fn serve(
|
|||
|
||||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
let hmr = serve_flags
|
||||
.watch
|
||||
.map(|watch_flags| watch_flags.hmr)
|
||||
|
@ -58,7 +59,7 @@ pub async fn serve(
|
|||
}
|
||||
|
||||
async fn do_serve(
|
||||
worker_factory: CliMainWorkerFactory,
|
||||
worker_factory: Arc<CliMainWorkerFactory>,
|
||||
main_module: ModuleSpecifier,
|
||||
worker_count: Option<usize>,
|
||||
hmr: bool,
|
||||
|
@ -116,7 +117,7 @@ async fn do_serve(
|
|||
|
||||
async fn run_worker(
|
||||
worker_count: usize,
|
||||
worker_factory: CliMainWorkerFactory,
|
||||
worker_factory: Arc<CliMainWorkerFactory>,
|
||||
main_module: ModuleSpecifier,
|
||||
hmr: bool,
|
||||
) -> Result<i32, AnyError> {
|
||||
|
@ -164,7 +165,8 @@ async fn serve_with_watch(
|
|||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
|
||||
do_serve(worker_factory, main_module.clone(), worker_count, hmr)
|
||||
.await?;
|
||||
|
|
|
@ -87,6 +87,7 @@ use crate::util::path::is_script_ext;
|
|||
use crate::util::path::matches_pattern_or_exact_path;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CoverageCollector;
|
||||
use crate::worker::CreateCustomWorkerError;
|
||||
|
||||
mod channel;
|
||||
pub mod fmt;
|
||||
|
@ -614,7 +615,10 @@ async fn configure_main_worker(
|
|||
permissions_container: PermissionsContainer,
|
||||
worker_sender: TestEventWorkerSender,
|
||||
options: &TestSpecifierOptions,
|
||||
) -> Result<(Option<Box<dyn CoverageCollector>>, MainWorker), CoreError> {
|
||||
) -> Result<
|
||||
(Option<Box<dyn CoverageCollector>>, MainWorker),
|
||||
CreateCustomWorkerError,
|
||||
> {
|
||||
let mut worker = worker_factory
|
||||
.create_custom_worker(
|
||||
WorkerExecutionMode::Test,
|
||||
|
@ -647,7 +651,7 @@ async fn configure_main_worker(
|
|||
&worker.js_runtime.op_state(),
|
||||
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)),
|
||||
)
|
||||
.map_err(JsErrorBox::from_err)?;
|
||||
.map_err(|e| CoreError::JsBox(JsErrorBox::from_err(e)))?;
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
|
@ -687,7 +691,7 @@ pub async fn test_specifier(
|
|||
.await
|
||||
{
|
||||
Ok(()) => Ok(()),
|
||||
Err(CoreError::Js(err)) => {
|
||||
Err(TestSpecifierError::Core(CoreError::Js(err))) => {
|
||||
send_test_event(
|
||||
&worker.js_runtime.op_state(),
|
||||
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)),
|
||||
|
@ -698,6 +702,16 @@ pub async fn test_specifier(
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum TestSpecifierError {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Core(#[from] CoreError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
RunTestsForWorker(#[from] RunTestsForWorkerErr),
|
||||
}
|
||||
|
||||
/// Test a single specifier as documentation containing test programs, an executable test module or
|
||||
/// both.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -707,19 +721,21 @@ async fn test_specifier_inner(
|
|||
specifier: ModuleSpecifier,
|
||||
fail_fast_tracker: FailFastTracker,
|
||||
options: TestSpecifierOptions,
|
||||
) -> Result<(), CoreError> {
|
||||
) -> Result<(), TestSpecifierError> {
|
||||
// Ensure that there are no pending exceptions before we start running tests
|
||||
worker.run_up_to_duration(Duration::from_millis(0)).await?;
|
||||
|
||||
worker.dispatch_load_event()?;
|
||||
worker.dispatch_load_event().map_err(CoreError::Js)?;
|
||||
|
||||
run_tests_for_worker(worker, &specifier, &options, &fail_fast_tracker)
|
||||
.await?;
|
||||
|
||||
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
|
||||
// event loop to continue beyond what's needed to await results.
|
||||
worker.dispatch_beforeunload_event()?;
|
||||
worker.dispatch_unload_event()?;
|
||||
worker
|
||||
.dispatch_beforeunload_event()
|
||||
.map_err(CoreError::Js)?;
|
||||
worker.dispatch_unload_event().map_err(CoreError::Js)?;
|
||||
|
||||
// Ensure all output has been flushed
|
||||
_ = worker
|
||||
|
@ -780,12 +796,25 @@ pub fn send_test_event(
|
|||
.send(event)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, deno_error::JsError)]
|
||||
pub enum RunTestsForWorkerErr {
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
ChannelClosed(#[from] ChannelClosedError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
Core(#[from] CoreError),
|
||||
#[class(inherit)]
|
||||
#[error(transparent)]
|
||||
SerdeV8(#[from] serde_v8::Error),
|
||||
}
|
||||
|
||||
pub async fn run_tests_for_worker(
|
||||
worker: &mut MainWorker,
|
||||
specifier: &ModuleSpecifier,
|
||||
options: &TestSpecifierOptions,
|
||||
fail_fast_tracker: &FailFastTracker,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), RunTestsForWorkerErr> {
|
||||
let state_rc = worker.js_runtime.op_state();
|
||||
// Take whatever tests have been registered
|
||||
let TestContainer(tests, test_functions) =
|
||||
|
@ -814,7 +843,7 @@ async fn run_tests_for_worker_inner(
|
|||
test_functions: Vec<v8::Global<v8::Function>>,
|
||||
options: &TestSpecifierOptions,
|
||||
fail_fast_tracker: &FailFastTracker,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), RunTestsForWorkerErr> {
|
||||
let unfiltered = tests.len();
|
||||
let state_rc = worker.js_runtime.op_state();
|
||||
|
||||
|
@ -1109,7 +1138,7 @@ async fn wait_for_activity_to_stabilize(
|
|||
before: RuntimeActivityStats,
|
||||
sanitize_ops: bool,
|
||||
sanitize_resources: bool,
|
||||
) -> Result<Option<RuntimeActivityDiff>, AnyError> {
|
||||
) -> Result<Option<RuntimeActivityDiff>, CoreError> {
|
||||
// First, check to see if there's any diff at all. If not, just continue.
|
||||
let after = stats.clone().capture(filter);
|
||||
let mut diff = RuntimeActivityStats::diff(&before, &after);
|
||||
|
|
|
@ -19,6 +19,8 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::unsync::spawn;
|
||||
use deno_core::url::Url;
|
||||
use deno_lib::shared::ReleaseChannel;
|
||||
use deno_lib::version;
|
||||
use deno_semver::SmallStackString;
|
||||
use deno_semver::Version;
|
||||
use once_cell::sync::Lazy;
|
||||
|
@ -30,11 +32,9 @@ use crate::colors;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::shared::ReleaseChannel;
|
||||
use crate::util::archive;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::version;
|
||||
|
||||
const RELEASE_URL: &str = "https://github.com/denoland/deno/releases";
|
||||
const CANARY_URL: &str = "https://dl.deno.land/canary";
|
||||
|
|
|
@ -500,6 +500,8 @@ delete Object.prototype.__proto__;
|
|||
// Microsoft/TypeScript#26825 but that doesn't seem to be working here,
|
||||
// so we will ignore complaints about this compiler setting.
|
||||
5070,
|
||||
// TS6053: File '{0}' not found.
|
||||
6053,
|
||||
// TS7016: Could not find a declaration file for module '...'. '...'
|
||||
// implicitly has an 'any' type. This is due to `allowJs` being off by
|
||||
// default but importing of a JavaScript module.
|
||||
|
@ -705,15 +707,14 @@ delete Object.prototype.__proto__;
|
|||
resolveTypeReferenceDirectiveReferences(
|
||||
typeDirectiveReferences,
|
||||
containingFilePath,
|
||||
redirectedReference,
|
||||
_redirectedReference,
|
||||
options,
|
||||
containingSourceFile,
|
||||
_reusedNames,
|
||||
) {
|
||||
const isCjs =
|
||||
containingSourceFile?.impliedNodeFormat === ts.ModuleKind.CommonJS;
|
||||
/** @type {Array<ts.ResolvedTypeReferenceDirectiveWithFailedLookupLocations>} */
|
||||
const result = typeDirectiveReferences.map((arg) => {
|
||||
const toResolve = typeDirectiveReferences.map((arg) => {
|
||||
/** @type {ts.FileReference} */
|
||||
const fileReference = typeof arg === "string"
|
||||
? {
|
||||
|
@ -722,46 +723,50 @@ delete Object.prototype.__proto__;
|
|||
fileName: arg,
|
||||
}
|
||||
: arg;
|
||||
if (fileReference.fileName.startsWith("npm:")) {
|
||||
/** @type {[string, ts.Extension | null] | undefined} */
|
||||
const resolved = ops.op_resolve(
|
||||
containingFilePath,
|
||||
[
|
||||
[
|
||||
fileReference.resolutionMode == null
|
||||
? isCjs
|
||||
: fileReference.resolutionMode === ts.ModuleKind.CommonJS,
|
||||
fileReference.fileName,
|
||||
],
|
||||
],
|
||||
)?.[0];
|
||||
if (resolved && resolved[1]) {
|
||||
return {
|
||||
resolvedTypeReferenceDirective: {
|
||||
primary: true,
|
||||
resolvedFileName: resolved[0],
|
||||
// todo(dsherret): we should probably be setting this
|
||||
isExternalLibraryImport: undefined,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
resolvedTypeReferenceDirective: undefined,
|
||||
};
|
||||
}
|
||||
return [
|
||||
fileReference.resolutionMode == null
|
||||
? isCjs
|
||||
: fileReference.resolutionMode === ts.ModuleKind.CommonJS,
|
||||
fileReference.fileName,
|
||||
];
|
||||
});
|
||||
|
||||
/** @type {Array<[string, ts.Extension | null] | undefined>} */
|
||||
const resolved = ops.op_resolve(
|
||||
containingFilePath,
|
||||
toResolve,
|
||||
);
|
||||
|
||||
/** @type {Array<ts.ResolvedTypeReferenceDirectiveWithFailedLookupLocations>} */
|
||||
const result = resolved.map((item) => {
|
||||
if (item && item[1]) {
|
||||
const [resolvedFileName, extension] = item;
|
||||
return {
|
||||
resolvedTypeReferenceDirective: {
|
||||
primary: true,
|
||||
resolvedFileName,
|
||||
extension,
|
||||
isExternalLibraryImport: false,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return ts.resolveTypeReferenceDirective(
|
||||
fileReference.fileName,
|
||||
containingFilePath,
|
||||
options,
|
||||
host,
|
||||
redirectedReference,
|
||||
undefined,
|
||||
containingSourceFile?.impliedNodeFormat ??
|
||||
fileReference.resolutionMode,
|
||||
);
|
||||
return {
|
||||
resolvedTypeReferenceDirective: undefined,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if (logDebug) {
|
||||
debug(
|
||||
"resolveTypeReferenceDirectiveReferences ",
|
||||
typeDirectiveReferences,
|
||||
containingFilePath,
|
||||
options,
|
||||
containingSourceFile?.fileName,
|
||||
" => ",
|
||||
result,
|
||||
);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
resolveModuleNameLiterals(
|
||||
|
@ -1116,6 +1121,36 @@ delete Object.prototype.__proto__;
|
|||
if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// ignore diagnostics resulting from the `ImportMeta` declaration in deno merging with
|
||||
// the one in @types/node. the types of the filename and dirname properties are different,
|
||||
// which causes tsc to error.
|
||||
const importMetaFilenameDirnameModifiersRe =
|
||||
/^All declarations of '(filename|dirname)'/;
|
||||
const importMetaFilenameDirnameTypesRe =
|
||||
/^Subsequent property declarations must have the same type.\s+Property '(filename|dirname)'/;
|
||||
// Declarations of X must have identical modifiers.
|
||||
if (diagnostic.code === 2687) {
|
||||
if (
|
||||
typeof diagnostic.messageText === "string" &&
|
||||
(importMetaFilenameDirnameModifiersRe.test(diagnostic.messageText)) &&
|
||||
(diagnostic.file?.fileName.startsWith("asset:///") ||
|
||||
diagnostic.file?.fileName?.includes("@types/node"))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Subsequent property declarations must have the same type.
|
||||
if (diagnostic.code === 2717) {
|
||||
if (
|
||||
typeof diagnostic.messageText === "string" &&
|
||||
(importMetaFilenameDirnameTypesRe.test(diagnostic.messageText)) &&
|
||||
(diagnostic.file?.fileName.startsWith("asset:///") ||
|
||||
diagnostic.file?.fileName?.includes("@types/node"))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// make the diagnostic for using an `export =` in an es module a warning
|
||||
if (diagnostic.code === 1203) {
|
||||
diagnostic.category = ts.DiagnosticCategory.Warning;
|
||||
|
@ -1410,7 +1445,6 @@ delete Object.prototype.__proto__;
|
|||
"ErrorConstructor",
|
||||
"gc",
|
||||
"Global",
|
||||
"ImportMeta",
|
||||
"localStorage",
|
||||
"queueMicrotask",
|
||||
"RequestInit",
|
||||
|
@ -1447,9 +1481,15 @@ delete Object.prototype.__proto__;
|
|||
options: SNAPSHOT_COMPILE_OPTIONS,
|
||||
host,
|
||||
});
|
||||
const errors = ts.getPreEmitDiagnostics(TS_SNAPSHOT_PROGRAM);
|
||||
assert(
|
||||
ts.getPreEmitDiagnostics(TS_SNAPSHOT_PROGRAM).length === 0,
|
||||
"lib.d.ts files have errors",
|
||||
errors.length === 0,
|
||||
`lib.d.ts files have errors:\n${
|
||||
ts.formatDiagnosticsWithColorAndContext(
|
||||
errors,
|
||||
host,
|
||||
)
|
||||
}`,
|
||||
);
|
||||
|
||||
// remove this now that we don't need it anymore for warming up tsc
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue