mirror of
https://github.com/denoland/deno.git
synced 2025-02-01 12:16:11 -05:00
Merge branch 'main' into macos_test4
This commit is contained in:
commit
985aebf1ae
1249 changed files with 222114 additions and 200116 deletions
63
.dprint.json
63
.dprint.json
|
@ -8,6 +8,9 @@
|
|||
"json": {
|
||||
"deno": true
|
||||
},
|
||||
"yaml": {
|
||||
"quotes": "preferSingle"
|
||||
},
|
||||
"exec": {
|
||||
"commands": [{
|
||||
"command": "rustfmt --config imports_granularity=item",
|
||||
|
@ -18,49 +21,55 @@
|
|||
".cargo_home",
|
||||
".git",
|
||||
"cli/bench/testdata/express-router.js",
|
||||
"cli/bench/testdata/npm/",
|
||||
"cli/bench/testdata/lsp_benchdata/",
|
||||
"cli/bench/testdata/npm/",
|
||||
"cli/tsc/*typescript.js",
|
||||
"cli/tsc/dts/lib.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.decorators*.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/lib.dom*.d.ts",
|
||||
"cli/tsc/dts/lib.es*.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/typescript.d.ts",
|
||||
"tests/node_compat/test",
|
||||
"tests/registry/",
|
||||
"tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"tests/testdata/fmt/badly_formatted.json",
|
||||
"tests/testdata/fmt/badly_formatted.md",
|
||||
"tests/testdata/fmt/badly_formatted.ipynb",
|
||||
"tests/testdata/byte_order_mark.ts",
|
||||
"tests/testdata/encoding",
|
||||
"tests/testdata/fmt/",
|
||||
"tests/testdata/lint/glob/",
|
||||
"tests/testdata/test/glob/",
|
||||
"tests/testdata/import_attributes/json_with_shebang.json",
|
||||
"tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"tests/testdata/run/inline_js_source_map*",
|
||||
"tests/testdata/malformed_config/",
|
||||
"tests/testdata/test/markdown_windows.md",
|
||||
"cli/tsc/*typescript.js",
|
||||
"ext/websocket/autobahn/reports",
|
||||
"gh-pages",
|
||||
"target",
|
||||
"tests/ffi/tests/test.js",
|
||||
"tests/util/std",
|
||||
"tests/wpt/suite",
|
||||
"third_party",
|
||||
"tests/node_compat/runner/TODO.md",
|
||||
"tests/node_compat/runner/suite",
|
||||
"tests/node_compat/runner/TODO.md",
|
||||
"tests/node_compat/test",
|
||||
"tests/registry/",
|
||||
"tests/specs/fmt",
|
||||
"tests/specs/lint/bom",
|
||||
"tests/specs/lint/syntax_error_reporting",
|
||||
"tests/specs/publish/no_check_surfaces_syntax_error",
|
||||
"tests/testdata/byte_order_mark.ts",
|
||||
"tests/testdata/encoding",
|
||||
"tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"tests/testdata/fmt/",
|
||||
"tests/testdata/fmt/badly_formatted.ipynb",
|
||||
"tests/testdata/fmt/badly_formatted.json",
|
||||
"tests/testdata/fmt/badly_formatted.md",
|
||||
"tests/testdata/import_attributes/json_with_shebang.json",
|
||||
"tests/testdata/lint/glob/",
|
||||
"tests/testdata/malformed_config/",
|
||||
"tests/testdata/run/byte_order_mark.ts",
|
||||
"tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"tests/testdata/run/inline_js_source_map*",
|
||||
"tests/testdata/test/glob/",
|
||||
"tests/testdata/test/markdown_windows.md",
|
||||
"tests/util/std",
|
||||
"tests/wpt/runner/expectation.json",
|
||||
"tests/wpt/runner/manifest.json",
|
||||
"ext/websocket/autobahn/reports"
|
||||
"tests/wpt/suite",
|
||||
"third_party"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.91.1.wasm",
|
||||
"https://plugins.dprint.dev/typescript-0.91.4.wasm",
|
||||
"https://plugins.dprint.dev/json-0.19.3.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.1.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.2.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.4.4.json@c207bf9b9a4ee1f0ecb75c594f774924baf62e8e53a2ce9d873816a408cecbf7"
|
||||
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.3.0.wasm"
|
||||
]
|
||||
}
|
||||
|
|
13
.github/workflows/ci.generate.ts
vendored
13
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 97;
|
||||
const cacheVersion = 9;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
@ -629,6 +629,7 @@ const ci = {
|
|||
path: [
|
||||
"./target",
|
||||
"!./target/*/gn_out",
|
||||
"!./target/*/gn_root",
|
||||
"!./target/*/*.zip",
|
||||
"!./target/*/*.tar.gz",
|
||||
].join("\n"),
|
||||
|
@ -835,7 +836,7 @@ const ci = {
|
|||
"!startsWith(github.ref, 'refs/tags/')",
|
||||
].join("\n"),
|
||||
run:
|
||||
"target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js",
|
||||
"target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js",
|
||||
},
|
||||
{
|
||||
name: "Test (full, debug)",
|
||||
|
@ -888,9 +889,9 @@ const ci = {
|
|||
DENO_BIN: "./target/debug/deno",
|
||||
},
|
||||
run: [
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json \\",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
|
||||
" ./tests/wpt/wpt.ts setup",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json \\",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
|
||||
' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"',
|
||||
].join("\n"),
|
||||
},
|
||||
|
@ -901,9 +902,9 @@ const ci = {
|
|||
DENO_BIN: "./target/release/deno",
|
||||
},
|
||||
run: [
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json \\",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
|
||||
" ./tests/wpt/wpt.ts setup",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json \\",
|
||||
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
|
||||
" ./tests/wpt/wpt.ts run --quiet --release \\",
|
||||
' --binary="$DENO_BIN" \\',
|
||||
" --json=wpt.json \\",
|
||||
|
|
19
.github/workflows/ci.yml
vendored
19
.github/workflows/ci.yml
vendored
|
@ -367,8 +367,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '97-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '97-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '9-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '9-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -377,10 +377,11 @@ jobs:
|
|||
path: |-
|
||||
./target
|
||||
!./target/*/gn_out
|
||||
!./target/*/gn_root
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '97-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '9-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -493,7 +494,7 @@ jobs:
|
|||
matrix.job == 'test' &&
|
||||
matrix.profile == 'release' &&
|
||||
!startsWith(github.ref, 'refs/tags/'))
|
||||
run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js
|
||||
run: target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js
|
||||
- name: 'Test (full, debug)'
|
||||
if: |-
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
|
@ -530,18 +531,18 @@ jobs:
|
|||
env:
|
||||
DENO_BIN: ./target/debug/deno
|
||||
run: |-
|
||||
deno run -A --unstable --lock=tools/deno.lock.json \
|
||||
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run -A --unstable --lock=tools/deno.lock.json \
|
||||
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"
|
||||
- name: Run web platform tests (release)
|
||||
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')'
|
||||
env:
|
||||
DENO_BIN: ./target/release/deno
|
||||
run: |-
|
||||
deno run -A --unstable --lock=tools/deno.lock.json \
|
||||
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run -A --unstable --lock=tools/deno.lock.json \
|
||||
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
|
||||
./tests/wpt/wpt.ts run --quiet --release \
|
||||
--binary="$DENO_BIN" \
|
||||
--json=wpt.json \
|
||||
|
@ -669,7 +670,7 @@ jobs:
|
|||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '97-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '9-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
6
.github/workflows/start_release.yml
vendored
6
.github/workflows/start_release.yml
vendored
|
@ -8,9 +8,9 @@ on:
|
|||
default: 'patch'
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
|
6
.github/workflows/version_bump.yml
vendored
6
.github/workflows/version_bump.yml
vendored
|
@ -8,9 +8,9 @@ on:
|
|||
default: 'patch'
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
|
6
.github/workflows/wpt_epoch.yml
vendored
6
.github/workflows/wpt_epoch.yml
vendored
|
@ -37,7 +37,7 @@ jobs:
|
|||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Log versions
|
||||
run: |
|
||||
|
@ -66,9 +66,9 @@ jobs:
|
|||
- name: Run web platform tests
|
||||
shell: bash
|
||||
run: |
|
||||
deno run --unstable -A --lock=tools/deno.lock.json \
|
||||
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
|
||||
./tests/wpt/wpt.ts setup
|
||||
deno run --unstable -A --lock=tools/deno.lock.json \
|
||||
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
|
||||
./tests/wpt/wpt.ts run \ \
|
||||
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero
|
||||
|
||||
|
|
682
Cargo.lock
generated
682
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
221
Cargo.toml
221
Cargo.toml
|
@ -21,6 +21,7 @@ members = [
|
|||
"ext/napi",
|
||||
"ext/net",
|
||||
"ext/node",
|
||||
"ext/node_resolver",
|
||||
"ext/url",
|
||||
"ext/web",
|
||||
"ext/webgpu",
|
||||
|
@ -43,16 +44,16 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.39.1", features = ["transpiling"] }
|
||||
deno_core = { version = "0.288.0" }
|
||||
deno_ast = { version = "=0.40.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.299.0" }
|
||||
|
||||
deno_bench_util = { version = "0.150.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.157.0", path = "./bench_util" }
|
||||
deno_lockfile = "0.20.0"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_permissions = { version = "0.16.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.164.0", path = "./runtime" }
|
||||
deno_terminal = "0.1.1"
|
||||
napi_sym = { version = "0.86.0", path = "./cli/napi/sym" }
|
||||
deno_permissions = { version = "0.23.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.172.0", path = "./runtime" }
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.93.0", path = "./cli/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
|
@ -61,28 +62,29 @@ denokv_remote = "0.8.1"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.1" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.150.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.88.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.25.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.156.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.36.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.170.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.180.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.143.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.66.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.154.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.66.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.64.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.86.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.148.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.93.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.143.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.156.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.187.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.123.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.156.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.161.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.151.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.157.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.95.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.32.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.163.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.43.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.177.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.187.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.150.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.73.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.161.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.73.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.71.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.94.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.155.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.100.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.150.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.163.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.194.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.130.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.163.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.168.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.158.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.2.0", path = "./ext/node_resolver" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -90,7 +92,7 @@ async-trait = "0.1.73"
|
|||
base32 = "=0.4.0"
|
||||
base64 = "0.21.4"
|
||||
bencher = "0.1"
|
||||
brotli = "3.3.4"
|
||||
brotli = "6.0.0"
|
||||
bytes = "1.4.0"
|
||||
cache_control = "=0.2.0"
|
||||
cbc = { version = "=0.1.2", features = ["alloc"] }
|
||||
|
@ -100,7 +102,8 @@ chrono = { version = "0.4", default-features = false, features = ["std", "serde"
|
|||
console_static_text = "=0.8.1"
|
||||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.10.0"
|
||||
deno_cache_dir = "=0.10.2"
|
||||
deno_package_json = { version = "=0.1.1", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||
|
@ -114,13 +117,16 @@ futures = "0.3.21"
|
|||
glob = "0.3.1"
|
||||
h2 = "0.4.4"
|
||||
http = "1.0"
|
||||
http-body-util = "0.1"
|
||||
http-body = "1.0"
|
||||
http-body-util = "0.1.2"
|
||||
http_v02 = { package = "http", version = "0.2.9" }
|
||||
httparse = "1.8.0"
|
||||
hyper = { version = "=1.3.1", features = ["full"] }
|
||||
hyper-util = { version = "=0.1.5", features = ["tokio", "server", "server-auto"] }
|
||||
hyper = { version = "1.4.1", features = ["full"] }
|
||||
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
|
||||
hyper-util = { version = "=0.1.6", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
|
||||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
ipnet = "2.3"
|
||||
jsonc-parser = { version = "=0.23.0", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
|
@ -145,12 +151,12 @@ prost = "0.11"
|
|||
prost-build = "0.11"
|
||||
rand = "=0.8.5"
|
||||
regex = "^1.7.0"
|
||||
reqwest = { version = "=0.12.4", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
|
||||
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
|
||||
ring = "^0.17.0"
|
||||
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] }
|
||||
rustls = "0.22.4"
|
||||
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
|
||||
rustls-pemfile = "2"
|
||||
rustls-tokio-stream = "=0.2.23"
|
||||
rustls-tokio-stream = "=0.3.0"
|
||||
rustls-webpki = "0.102"
|
||||
rustyline = "=13.0.0"
|
||||
saffron = "=0.1.0"
|
||||
|
@ -159,6 +165,7 @@ serde = { version = "1.0.149", features = ["derive"] }
|
|||
serde_bytes = "0.11"
|
||||
serde_json = "1.0.85"
|
||||
serde_repr = "=0.1.16"
|
||||
sha1 = { version = "0.10.6", features = ["oid"] }
|
||||
sha2 = { version = "0.10.8", features = ["oid"] }
|
||||
signature = "2.1"
|
||||
slab = "0.4"
|
||||
|
@ -168,17 +175,23 @@ spki = "0.7.2"
|
|||
tar = "=0.4.40"
|
||||
tempfile = "3.4.0"
|
||||
termcolor = "1.1.3"
|
||||
thiserror = "1.0.40"
|
||||
thiserror = "1.0.61"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
tokio-metrics = { version = "0.3.0", features = ["rt"] }
|
||||
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
|
||||
tokio-socks = "0.5.1"
|
||||
tokio-util = "0.7.4"
|
||||
tower = { version = "0.4.13", default-features = false, features = ["util"] }
|
||||
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] }
|
||||
tower-lsp = { version = "=0.20.0", features = ["proposed"] }
|
||||
tower-service = "0.3.2"
|
||||
twox-hash = "=1.6.3"
|
||||
# Upgrading past 2.4.1 may cause WPT failures
|
||||
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
webpki-roots = "0.26"
|
||||
zeromq = { version = "=0.3.4", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
which = "4.2.5"
|
||||
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zstd = "=0.12.4"
|
||||
|
||||
# crypto
|
||||
|
@ -187,7 +200,7 @@ rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "
|
|||
|
||||
# webgpu
|
||||
raw-window-handle = "0.6.0"
|
||||
wgpu-core = "0.20"
|
||||
wgpu-core = "0.21.1"
|
||||
wgpu-types = "0.20"
|
||||
|
||||
# macros
|
||||
|
@ -200,7 +213,7 @@ nix = "=0.26.2"
|
|||
# windows deps
|
||||
junction = "=0.2.0"
|
||||
winapi = "=0.3.9"
|
||||
windows-sys = { version = "0.48.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem"] }
|
||||
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry"] }
|
||||
winres = "=0.1.12"
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
|
@ -232,139 +245,145 @@ opt-level = 1
|
|||
|
||||
# Optimize these packages for performance.
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.bench.package.rand]
|
||||
[profile.bench.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.bench.package.flate2]
|
||||
[profile.bench.package.base64-simd]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli]
|
||||
opt-level = 3
|
||||
[profile.bench.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.bench.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.bench.package.brotli-decompressor]
|
||||
opt-level = 3
|
||||
[profile.bench.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_bench_util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_core]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_broadcast_channel]
|
||||
[profile.bench.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_fetch]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_ffi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_tls]
|
||||
[profile.bench.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_websocket]
|
||||
[profile.bench.package.deno_napi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.bench.package.num-bigint-dig]
|
||||
[profile.bench.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.bench.package.v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde]
|
||||
[profile.bench.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.bench.package.deno_url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.url]
|
||||
[profile.bench.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.bench.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.bench.package.futures-util]
|
||||
[profile.bench.package.deno_websocket]
|
||||
opt-level = 3
|
||||
[profile.bench.package.fastwebsockets]
|
||||
opt-level = 3
|
||||
[profile.bench.package.flate2]
|
||||
opt-level = 3
|
||||
[profile.bench.package.futures-util]
|
||||
opt-level = 3
|
||||
[profile.bench.package.hyper]
|
||||
opt-level = 3
|
||||
[profile.bench.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.bench.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
[profile.bench.package.rand]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde]
|
||||
opt-level = 3
|
||||
[profile.bench.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.test_napi]
|
||||
opt-level = 3
|
||||
[profile.bench.package.tokio]
|
||||
opt-level = 3
|
||||
[profile.bench.package.url]
|
||||
opt-level = 3
|
||||
[profile.bench.package.v8]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd]
|
||||
opt-level = 3
|
||||
[profile.bench.package.zstd-sys]
|
||||
opt-level = 3
|
||||
[profile.bench.package.base64-simd]
|
||||
opt-level = 3
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release.package.rand]
|
||||
[profile.release.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.release.package.flate2]
|
||||
[profile.release.package.base64-simd]
|
||||
opt-level = 3
|
||||
[profile.release.package.brotli]
|
||||
opt-level = 3
|
||||
[profile.release.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.release.package.async-compression]
|
||||
opt-level = 3
|
||||
[profile.release.package.brotli-decompressor]
|
||||
opt-level = 3
|
||||
[profile.release.package.bytes]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_bench_util]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_core]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_crypto]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_broadcast_channel]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_fetch]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_ffi]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_websocket]
|
||||
[profile.release.package.deno_http]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_napi]
|
||||
opt-level = 3
|
||||
[profile.release.package.test_napi]
|
||||
[profile.release.package.deno_net]
|
||||
opt-level = 3
|
||||
[profile.release.package.num-bigint-dig]
|
||||
[profile.release.package.deno_node]
|
||||
opt-level = 3
|
||||
[profile.release.package.v8]
|
||||
[profile.release.package.deno_runtime]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde]
|
||||
[profile.release.package.deno_tls]
|
||||
opt-level = 3
|
||||
[profile.release.package.deno_url]
|
||||
opt-level = 3
|
||||
[profile.release.package.url]
|
||||
[profile.release.package.deno_web]
|
||||
opt-level = 3
|
||||
[profile.release.package.bytes]
|
||||
[profile.release.package.deno_websocket]
|
||||
opt-level = 3
|
||||
[profile.release.package.fastwebsockets]
|
||||
opt-level = 3
|
||||
[profile.release.package.flate2]
|
||||
opt-level = 3
|
||||
[profile.release.package.futures-util]
|
||||
opt-level = 3
|
||||
[profile.release.package.hyper]
|
||||
opt-level = 3
|
||||
[profile.release.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
[profile.release.package.num-bigint-dig]
|
||||
opt-level = 3
|
||||
[profile.release.package.rand]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde]
|
||||
opt-level = 3
|
||||
[profile.release.package.serde_v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.test_napi]
|
||||
opt-level = 3
|
||||
[profile.release.package.tokio]
|
||||
opt-level = 3
|
||||
[profile.release.package.url]
|
||||
opt-level = 3
|
||||
[profile.release.package.v8]
|
||||
opt-level = 3
|
||||
[profile.release.package.zstd]
|
||||
opt-level = 3
|
||||
[profile.release.package.zstd-sys]
|
||||
opt-level = 3
|
||||
[profile.release.package.base64-simd]
|
||||
opt-level = 3
|
||||
|
|
201
Releases.md
201
Releases.md
|
@ -6,6 +6,207 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 1.45.4 / 2024.07.26
|
||||
|
||||
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)
|
||||
- fix(ext/fetch): respect authority from URL (#24705)
|
||||
- fix(ext/fetch): use correct ALPN to proxies (#24696)
|
||||
- fix(ext/node): read correct CPU usage stats on Linux (#24732)
|
||||
- fix(ext/node/net): emit `error` before `close` when connection is refused
|
||||
(#24656)
|
||||
- fix(future): Emit `deno install` warning less often, suggest `deno install` in
|
||||
error message (#24706)
|
||||
- fix(lsp): rewrite import for 'infer return type' action (#24685)
|
||||
- fix(node): better detection for when to surface node resolution errors
|
||||
(#24653)
|
||||
- fix(node): cjs pkg dynamically importing esm-only pkg fails (#24730)
|
||||
- fix(node/worker_threads): support `port.once()` (#24725)
|
||||
- fix(publish): workspace included license file had incorrect path (#24747)
|
||||
- fix(unstable): move sloppy-import warnings to lint rule (#24710)
|
||||
- fix(upgrade): do not error if config in cwd invalid (#24689)
|
||||
- fix(workspaces/publish): include the license file from the workspace root if
|
||||
not in pkg (#24714)
|
||||
- fix: enable the reporting of parsing related problems when running deno lint
|
||||
(#24332)
|
||||
- fix: support `npm:bindings` and `npm:callsites` packages (#24727)
|
||||
- fix: update lsp error message of 'relative import path' to 'use deno add' for
|
||||
npm/jsr packages (#24524)
|
||||
- fix: decode percent-encoding source string in `Error.stack` (#24709)
|
||||
- perf: update deno_doc (#24700)
|
||||
|
||||
### 1.45.3 / 2024.07.22
|
||||
|
||||
- Reland "refactor(fetch): reimplement fetch with hyper instead of reqwest"
|
||||
(#24593)
|
||||
- fix(cli): Create child node_modules for conflicting dependency versions,
|
||||
respect aliases in package.json (#24609)
|
||||
- fix(cli): Respect implied BYONM from DENO_FUTURE in `deno task` (#24652)
|
||||
- fix(cli): add NAPI support in standalone mode (#24642)
|
||||
- fix(cron): improve error message for invalid cron names (#24644)
|
||||
- fix(docs): fix some deno.land/manual broken urls (#24557)
|
||||
- fix(ext/console): Error Cause Not Inspect-Formatted when printed (#24526)
|
||||
- fix(ext/node): do not expose `self` global in node (#24637)
|
||||
- fix(ext/node): http request uploads of subarray of buffer should work (#24603)
|
||||
- fix(ext/node): stat.mode on windows (#24434)
|
||||
- fix(fmt): was sometimes putting comments in front of commas in parameter lists
|
||||
(#24650)
|
||||
- fix(init): use bare specifier for `jsr:@std/assert` (#24581)
|
||||
- fix(lsp): hang when caching failed (#24651)
|
||||
- fix(lsp): scope attribution for asset documents (#24663)
|
||||
- fix(lsp): support npm workspaces and fix some resolution issues (#24627)
|
||||
- fix(node): Fix `--allow-scripts` with no `deno.json` (#24533)
|
||||
- fix(node): support `tty.hasColors()` and `tty.getColorDepth()` (#24619)
|
||||
- fix(npm): handle packages with only pre-released 0.0.0 versions (#24563)
|
||||
- fix(publish): surface syntax errors when using --no-check (#24620)
|
||||
- fix(publish): warn about missing license file (#24677)
|
||||
- fix(std/http2): release window capacity back to remote stream (#24576)
|
||||
- fix(types): Conform lib.deno_web.d.ts to lib.dom.d.ts and lib.webworker.d.ts
|
||||
(#24599)
|
||||
- fix(workspace): do not resolve to self for npm pkg depending on matching req
|
||||
(#24591)
|
||||
- fix(workspace): support resolving bare specifiers to npm pkgs within a
|
||||
workspace (#24611)
|
||||
- fix: make vendor cache manifest more deterministic (#24658)
|
||||
- fix: missing `emitWarning` import (#24587)
|
||||
- perf(ext/node): optimize fs.exists[Sync] (#24613)
|
||||
|
||||
### 1.45.2 / 2024.07.12
|
||||
|
||||
- fix(cli/init): broken link in deno init sample template (#24545)
|
||||
- fix(config): regression - should not discover npm workspace for nested
|
||||
deno.json not in workspace (#24559)
|
||||
- fix(ext/node): handle prefix mapping for IPv4-mapped IPv6 addresses (#24546)
|
||||
- fix(ext/webgpu): GPUDevice.createRenderPipelineAsync should return a Promise
|
||||
(#24349)
|
||||
- fix(node): revert invalid package target change (#24539)
|
||||
- fix(publish): show dirty files on dirty check failure (#24541)
|
||||
- fix: include already seen deps in lockfile dep tracking (#24556)
|
||||
- fix: unblock fsevents native module (#24542)
|
||||
- perf(ext/crypto): make randomUUID() 5x faster (#24510)
|
||||
|
||||
### 1.45.1 / 2024.07.11
|
||||
|
||||
- fix(node): Ignore broken default install scripts (#24534)
|
||||
- fix(npm): only warn about lifecycle scripts not being run when setting up
|
||||
directory (#24530)
|
||||
- fix(workspace): allow using --import-map flag with workspace (#24527)
|
||||
|
||||
### 1.45.0 / 2024.07.10
|
||||
|
||||
- BREAKING(unstable/ffi): remove callback reentrant flag (#24367)
|
||||
- feat(cli): Add `--frozen` flag to error out if lockfile is out of date
|
||||
(#24355)
|
||||
- feat(cli): `deno init --lib` (#22499)
|
||||
- feat(compile): support `--env` (#24166)
|
||||
- feat(ext/crypto): make `deriveBits` `length` parameter optional and nullable
|
||||
(#24426)
|
||||
- feat(ext/web): add `Blob.prototype.bytes()` (#24148)
|
||||
- feat(jsr): support publishing jsr packages in npm workspaces (#24507)
|
||||
- feat(jupyter): support `confirm` and `prompt` in notebooks (#23592)
|
||||
- feat(lsp): ts language service scopes (#24345)
|
||||
- feat(node): Support executing npm package lifecycle scripts
|
||||
(preinstall/install/postinstall) (#24487)
|
||||
- feat(workspace): support object config (#24483)
|
||||
- feat: Deprecate `--lock-write` flag (#24436)
|
||||
- feat: Upgrade to TypeScript 5.5.2 (#24326)
|
||||
- feat: add `__tests__` to test file detection defaults (#24443)
|
||||
- feat: deprecate `deno vendor` (#22183)
|
||||
- feat: npm workspace and better Deno workspace support (#24334)
|
||||
- feat: support wildcards in npm workspaces (#24471)
|
||||
- feat: upgrade deno_core (#24364)
|
||||
- feat: upgrade deno_core to 0.293.0 (#24482)
|
||||
- fix(check): CJS types importing dual ESM/CJS package should prefer CJS types
|
||||
(#24492)
|
||||
- fix(compile): atomically write compile output (#24378)
|
||||
- fix(compile): prevent setting unstable feature twice (#24381)
|
||||
- fix(ext/node): Add `fs.lutimes` / `fs.lutimesSync` (#23172)
|
||||
- fix(ext/node): add `Module.parent` (#24333)
|
||||
- fix(ext/node): add ServerResponse#appendHeader (#24216)
|
||||
- fix(ext/node): add Symbol.toStringTag to KeyObject instances (#24377)
|
||||
- fix(ext/node): discover .npmrc in user's homedir (#24021)
|
||||
- fix(ext/node): don't panic on invalid utf-8 in pem (#24303)
|
||||
- fix(ext/node): don't wait for end() call to send http client request (#24390)
|
||||
- fix(ext/node): http chunked writes hangs (#24428)
|
||||
- fix(ext/node): ignore stream error during enqueue (#24243)
|
||||
- fix(ext/node): make next tick queue resilient to `Array.prototype` tampering
|
||||
(#24361)
|
||||
- fix(ext/node): rewrite `crypto.Hash` (#24302)
|
||||
- fix(ext/node): rewrite digest handling (#24392)
|
||||
- fix(ext/node): use cppgc for node:zlib (#24267)
|
||||
- fix(ext/webgpu): fix `GPUUncapturedErrorEvent` parent type (#24369)
|
||||
- fix(ext/websocket): drop connection when close frame not ack (#24301)
|
||||
- fix(lsp): correct scope attribution for injected @types/node (#24404)
|
||||
- fix(lsp): do sloppy resolution for node-to-node imports in byonm (#24481)
|
||||
- fix(lsp): don't use global cache paths for scope allocation (#24353)
|
||||
- fix(lsp): inherit workspace-root-only fields in members (#24440)
|
||||
- fix(lsp): respect lockfile redirects entries for resolution (#24365)
|
||||
- fix(lsp): use CliLockfile (#24387)
|
||||
- fix(net): handle panic on Windows for Unix socket usage in Deno.serve()
|
||||
(#24423)
|
||||
- fix(net): set correct max size for Datagram (#21611)
|
||||
- fix(node): Implement `fs.lchown` (and `process.getegid`) (#24418)
|
||||
- fix(node): add missing readline/promises module (#24336)
|
||||
- fix(node/assert): throws not checking error instance (#24466)
|
||||
- fix(node/http): don't error if request destroyed before send (#24497)
|
||||
- fix(node/http): don't send destroyed requests (#24498)
|
||||
- fix(node/http): don't throw on .address() before .listen() (#24432)
|
||||
- fix(node/http): support all `.writeHead()` signatures (#24469)
|
||||
- fix(node/perf_hooks): stub eventLoopUtilization (#24501)
|
||||
- fix(node/v8): stub serializer methods (#24502)
|
||||
- fix(permissions): handle ipv6 addresses correctly (#24397)
|
||||
- fix(publish): unfurling should always be done with the package json (#24435)
|
||||
- fix(task): do not propagate env changes outside subshells (#24279)
|
||||
- fix(windows): check USERPROFILE env var for finding home directory (#24384)
|
||||
- fix(workspace): better cli file argument handling (#24447)
|
||||
- fix: Add sys permission kinds for node compat (#24242)
|
||||
- fix: add warning for invalid unstable feature use in deno.json/jsonc (#24120)
|
||||
- fix: do not download compilerOptions -> types when not type checking (#24473)
|
||||
- fix: do not return undefined for missing global properties (#24474)
|
||||
- fix: make .setup-cache.bin in node_modules more reproducible (#24480)
|
||||
- fix: memory leak when transpiling (#24490)
|
||||
- fix: node-api get_value_string_utf8 should use utf8_length (#24193)
|
||||
- fix: panic when piping "deno help" or "deno --version" (#22917)
|
||||
- fix: test in presence of `.npmrc` (#24486)
|
||||
- fix: upgrade deno_core to 0.291.0 (#24297)
|
||||
- perf(ext/node): improve `Buffer.from(buffer)` (#24352)
|
||||
- perf(ext/websocket): avoid global interceptor overhead (#24284)
|
||||
- perf(ws): optimize fastwebsockets in release profile (#24277)
|
||||
- perf: optimize Buffer.from("base64") for forgiving-base64 conforming input
|
||||
(#24346)
|
||||
|
||||
### 1.44.4 / 2024.06.19
|
||||
|
||||
- Revert "chore: upgrade to reqwest 0.12.4 and rustls 0.22 (#24056)" (#24262)
|
||||
- fix(ext/node): Add Dirent.path and Dirent.parentPath (#24257)
|
||||
- fix(ext/node): Add SIGPOLL and SIGUNUSED signals (#24259)
|
||||
- fix(ext/node): use primordials in `ext/node/polyfills/_utils.ts` (#24253)
|
||||
|
||||
### 1.44.3 / 2024.06.18
|
||||
|
||||
- feat(lsp): multi deno.json resolver scopes (#24206)
|
||||
- fix(cli): missing flag for `--unstable-process` (#24199)
|
||||
- fix(docs): correctly resolve href for built-ins (#24228)
|
||||
- fix(ext/console): bump default max str lengthto 10_00 (#24245)
|
||||
- fix(ext/http): actually await `goAhead` promise (#24226)
|
||||
- fix(ext/node): add missing BlockList & SocketAddress classes (#24229)
|
||||
- fix(ext/node): `server.close()` does graceful shutdown (#24184)
|
||||
- fix(ext/node): better support for `node:diagnostics_channel` module (#24088)
|
||||
- fix(ext/node): make process.versions own property (#24240)
|
||||
- fix(ext/node): use `Deno.FsFile.statSync()` (#24234)
|
||||
- fix(ext/permissions): add correct feature flags to winapi (#24218)
|
||||
- fix(ext/web): fix `AbortSignal.timeout()` leak (#23842)
|
||||
- fix(ext/webgpu): fix surface creation panic when adapter not initialized
|
||||
(#24201)
|
||||
- fix(inspector): crash on "Debugger.setBlackboxPatterns" (#24204)
|
||||
- fix(lsp): use import map from workspace root (#24246)
|
||||
- fix(napi): Read reference ownership before calling finalizer to avoid crash
|
||||
(#24203)
|
||||
- fix(no-slow-types): handle named type in mapped type (#24205)
|
||||
- fix(npm): use more relaxed package.json version constraint parsing (#24202)
|
||||
- fix(repl): prevent panic when deleting globalThis.closed property (#24014)
|
||||
- perf(lsp): store settings in Arc (#24191)
|
||||
- perf(node): ensure cjs wrapper module has deterministic output (#24248)
|
||||
|
||||
### 1.44.2 / 2024.06.13
|
||||
|
||||
- FUTURE: support `deno install <alias>@npm:<package>` (#24156)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.150.0"
|
||||
version = "0.157.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "1.44.2"
|
||||
version = "1.45.4"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -44,7 +44,7 @@ default = ["upgrade", "__vendored_zlib_ng"]
|
|||
upgrade = []
|
||||
# A dev feature to disable creations and loading of snapshots in favor of
|
||||
# loading JS sources at runtime.
|
||||
__runtime_js_sources = ["deno_runtime/__runtime_js_sources"]
|
||||
hmr = ["deno_runtime/hmr"]
|
||||
# Vendor zlib as zlib-ng
|
||||
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
|
||||
|
||||
|
@ -65,20 +65,22 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = "=0.16.4"
|
||||
deno_config = { version = "=0.26.1", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "=0.139.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.42.0"
|
||||
deno_graph = { version = "=0.78.1", features = ["tokio_executor"] }
|
||||
deno_lint = { version = "=0.60.0", features = ["docs"] }
|
||||
deno_doc = { version = "0.144.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.43.1"
|
||||
deno_graph = { version = "=0.80.1", features = ["tokio_executor"] }
|
||||
deno_lint = { version = "=0.61.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm = "=0.21.4"
|
||||
deno_package_json.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver = "=0.5.6"
|
||||
deno_task_shell = "=0.16.1"
|
||||
deno_semver = "=0.5.7"
|
||||
deno_task_shell = "=0.17.0"
|
||||
deno_terminal.workspace = true
|
||||
eszip = "=0.71.0"
|
||||
eszip = "=0.72.2"
|
||||
napi_sym.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
async-trait.workspace = true
|
||||
base32.workspace = true
|
||||
|
@ -99,7 +101,7 @@ dotenvy = "0.15.7"
|
|||
dprint-plugin-json = "=0.19.3"
|
||||
dprint-plugin-jupyter = "=0.1.3"
|
||||
dprint-plugin-markdown = "=0.17.1"
|
||||
dprint-plugin-typescript = "=0.91.1"
|
||||
dprint-plugin-typescript = "=0.91.4"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
|
@ -107,11 +109,14 @@ faster-hex.workspace = true
|
|||
flate2.workspace = true
|
||||
fs3.workspace = true
|
||||
glob = "0.3.1"
|
||||
ignore = "0.4"
|
||||
import_map = { version = "=0.19.0", features = ["ext"] }
|
||||
http.workspace = true
|
||||
http-body.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper-util.workspace = true
|
||||
import_map = { version = "=0.20.0", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser.workspace = true
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.11.0" }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
libz-sys.workspace = true
|
||||
|
@ -129,7 +134,6 @@ phf.workspace = true
|
|||
quick-junit = "^0.3.5"
|
||||
rand = { workspace = true, features = ["small_rng"] }
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
ring.workspace = true
|
||||
rustyline.workspace = true
|
||||
rustyline-derive = "=0.7.0"
|
||||
|
@ -149,7 +153,7 @@ tower-lsp.workspace = true
|
|||
twox-hash.workspace = true
|
||||
typed-arena = "=2.0.1"
|
||||
uuid = { workspace = true, features = ["serde"] }
|
||||
walkdir = "=2.3.2"
|
||||
which.workspace = true
|
||||
zeromq.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
|
@ -164,6 +168,7 @@ nix.workspace = true
|
|||
deno_bench_util.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
test_util.workspace = true
|
||||
walkdir = "=2.3.2"
|
||||
|
||||
[package.metadata.winres]
|
||||
# This section defines the metadata that appears in the deno.exe PE header.
|
||||
|
|
|
@ -7,8 +7,64 @@ use deno_semver::jsr::JsrDepPackageReq;
|
|||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
||||
#[cfg(test)] // happens to only be used by the tests at the moment
|
||||
pub struct DenoConfigFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
#[cfg(test)]
|
||||
impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
|
||||
fn read_to_string_lossy(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<String, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_text_file_lossy_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn stat_sync(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<deno_config::fs::FsMetadata, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| deno_config::fs::FsMetadata {
|
||||
is_file: stat.is_file,
|
||||
is_directory: stat.is_directory,
|
||||
is_symlink: stat.is_symlink,
|
||||
})
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn read_dir(
|
||||
&self,
|
||||
path: &std::path::Path,
|
||||
) -> Result<Vec<deno_config::fs::FsDirEntry>, std::io::Error> {
|
||||
self
|
||||
.0
|
||||
.read_dir_sync(path)
|
||||
.map_err(|err| err.into_io_error())
|
||||
.map(|entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|e| deno_config::fs::FsDirEntry {
|
||||
path: path.join(e.name),
|
||||
metadata: deno_config::fs::FsMetadata {
|
||||
is_file: e.is_file,
|
||||
is_directory: e.is_directory,
|
||||
is_symlink: e.is_symlink,
|
||||
},
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deno_json_deps(
|
||||
config: &deno_config::ConfigFile,
|
||||
config: &deno_config::deno_json::ConfigFile,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let values = imports_values(config.json.imports.as_ref())
|
||||
.into_iter()
|
||||
|
|
|
@ -9,11 +9,12 @@ use clap::ArgMatches;
|
|||
use clap::ColorChoice;
|
||||
use clap::Command;
|
||||
use clap::ValueHint;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::glob::PathOrPatternSet;
|
||||
use deno_config::ConfigFlag;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
|
@ -23,6 +24,7 @@ use log::debug;
|
|||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::net::SocketAddr;
|
||||
|
@ -39,12 +41,43 @@ use crate::util::fs::canonicalize_path;
|
|||
use super::flags_net;
|
||||
use super::DENO_FUTURE;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub enum ConfigFlag {
|
||||
#[default]
|
||||
Discover,
|
||||
Path(String),
|
||||
Disabled,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct FileFlags {
|
||||
pub ignore: Vec<String>,
|
||||
pub include: Vec<String>,
|
||||
}
|
||||
|
||||
impl FileFlags {
|
||||
pub fn as_file_patterns(
|
||||
&self,
|
||||
base: &Path,
|
||||
) -> Result<FilePatterns, AnyError> {
|
||||
Ok(FilePatterns {
|
||||
include: if self.include.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||
base,
|
||||
&self.include,
|
||||
)?)
|
||||
},
|
||||
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||
base,
|
||||
&self.ignore,
|
||||
)?,
|
||||
base: base.to_path_buf(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct AddFlags {
|
||||
pub packages: Vec<String>,
|
||||
|
@ -133,6 +166,10 @@ impl Default for DocSourceFileFlag {
|
|||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct DocHtmlFlag {
|
||||
pub name: Option<String>,
|
||||
pub category_docs_path: Option<String>,
|
||||
pub symbol_redirect_map_path: Option<String>,
|
||||
pub default_symbol_map_path: Option<String>,
|
||||
pub strip_trailing_html: bool,
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
|
@ -152,7 +189,7 @@ pub struct EvalFlags {
|
|||
pub code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Default, Debug, Eq, PartialEq)]
|
||||
pub struct FmtFlags {
|
||||
pub check: bool,
|
||||
pub files: FileFlags,
|
||||
|
@ -175,6 +212,7 @@ impl FmtFlags {
|
|||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct InitFlags {
|
||||
pub dir: Option<String>,
|
||||
pub lib: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
|
@ -231,7 +269,7 @@ pub struct UninstallFlags {
|
|||
pub kind: UninstallKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct LintFlags {
|
||||
pub files: FileFlags,
|
||||
pub rules: bool,
|
||||
|
@ -319,7 +357,7 @@ pub struct TaskFlags {
|
|||
pub task: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
||||
pub enum TestReporterConfig {
|
||||
#[default]
|
||||
Pretty,
|
||||
|
@ -476,6 +514,30 @@ pub enum CaData {
|
|||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
// Info needed to run NPM lifecycle scripts
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct LifecycleScriptsConfig {
|
||||
pub allowed: PackagesAllowedScripts,
|
||||
pub initial_cwd: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Default)]
|
||||
/// The set of npm packages that are allowed to run lifecycle scripts.
|
||||
pub enum PackagesAllowedScripts {
|
||||
All,
|
||||
Some(Vec<String>),
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
||||
fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
|
||||
if !s.starts_with("npm:") {
|
||||
bail!("Invalid package for --allow-scripts: '{}'. An 'npm:' specifier is required", s);
|
||||
} else {
|
||||
Ok(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
|
@ -494,6 +556,7 @@ pub struct Flags {
|
|||
pub argv: Vec<String>,
|
||||
pub subcommand: DenoSubcommand,
|
||||
|
||||
pub frozen_lockfile: bool,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<CaData>,
|
||||
pub cache_blocklist: Vec<String>,
|
||||
|
@ -515,6 +578,7 @@ pub struct Flags {
|
|||
pub inspect_wait: Option<SocketAddr>,
|
||||
pub inspect: Option<SocketAddr>,
|
||||
pub location: Option<Url>,
|
||||
// TODO(bartlomieju): deprecated, to be removed in Deno 2.
|
||||
pub lock_write: bool,
|
||||
pub lock: Option<String>,
|
||||
pub log_level: Option<Level>,
|
||||
|
@ -529,6 +593,7 @@ pub struct Flags {
|
|||
pub v8_flags: Vec<String>,
|
||||
pub code_cache_enabled: bool,
|
||||
pub permissions: PermissionFlags,
|
||||
pub allow_scripts: PackagesAllowedScripts,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default, Serialize, Deserialize)]
|
||||
|
@ -833,30 +898,54 @@ impl Flags {
|
|||
args
|
||||
}
|
||||
|
||||
/// Extract path arguments for config search paths.
|
||||
/// If it returns Some(vec), the config should be discovered
|
||||
/// from the passed `current_dir` after trying to discover from each entry in
|
||||
/// the returned vector.
|
||||
/// If it returns None, the config file shouldn't be discovered at all.
|
||||
/// Extract the paths the config file should be discovered from.
|
||||
///
|
||||
/// Returns `None` if the config file should not be auto-discovered.
|
||||
pub fn config_path_args(&self, current_dir: &Path) -> Option<Vec<PathBuf>> {
|
||||
use DenoSubcommand::*;
|
||||
fn resolve_multiple_files(
|
||||
files_or_dirs: &[String],
|
||||
current_dir: &Path,
|
||||
) -> Vec<PathBuf> {
|
||||
let mut seen = HashSet::with_capacity(files_or_dirs.len());
|
||||
let result = files_or_dirs
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
let path = normalize_path(current_dir.join(p));
|
||||
if seen.insert(path.clone()) {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if result.is_empty() {
|
||||
vec![current_dir.to_path_buf()]
|
||||
} else {
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
use DenoSubcommand::*;
|
||||
match &self.subcommand {
|
||||
Fmt(FmtFlags { files, .. }) => {
|
||||
Some(files.include.iter().map(|p| current_dir.join(p)).collect())
|
||||
Some(resolve_multiple_files(&files.include, current_dir))
|
||||
}
|
||||
Lint(LintFlags { files, .. }) => {
|
||||
Some(files.include.iter().map(|p| current_dir.join(p)).collect())
|
||||
Some(resolve_multiple_files(&files.include, current_dir))
|
||||
}
|
||||
Run(RunFlags { script, .. }) => {
|
||||
Run(RunFlags { script, .. })
|
||||
| Compile(CompileFlags {
|
||||
source_file: script,
|
||||
..
|
||||
}) => {
|
||||
if let Ok(module_specifier) = resolve_url_or_path(script, current_dir) {
|
||||
if module_specifier.scheme() == "file"
|
||||
|| module_specifier.scheme() == "npm"
|
||||
{
|
||||
if let Ok(p) = module_specifier.to_file_path() {
|
||||
Some(vec![p])
|
||||
Some(vec![p.parent().unwrap().to_path_buf()])
|
||||
} else {
|
||||
Some(vec![])
|
||||
Some(vec![current_dir.to_path_buf()])
|
||||
}
|
||||
} else {
|
||||
// When the entrypoint doesn't have file: scheme (it's the remote
|
||||
|
@ -864,7 +953,7 @@ impl Flags {
|
|||
None
|
||||
}
|
||||
} else {
|
||||
Some(vec![])
|
||||
Some(vec![current_dir.to_path_buf()])
|
||||
}
|
||||
}
|
||||
Task(TaskFlags {
|
||||
|
@ -875,57 +964,10 @@ impl Flags {
|
|||
// `--cwd` when specified
|
||||
match canonicalize_path(&PathBuf::from(path)) {
|
||||
Ok(path) => Some(vec![path]),
|
||||
Err(_) => Some(vec![]),
|
||||
}
|
||||
}
|
||||
_ => Some(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract path argument for `package.json` search paths.
|
||||
/// If it returns Some(path), the `package.json` should be discovered
|
||||
/// from the `path` dir.
|
||||
/// If it returns None, the `package.json` file shouldn't be discovered at
|
||||
/// all.
|
||||
pub fn package_json_search_dir(&self, current_dir: &Path) -> Option<PathBuf> {
|
||||
use DenoSubcommand::*;
|
||||
|
||||
match &self.subcommand {
|
||||
Run(RunFlags { script, .. }) | Serve(ServeFlags { script, .. }) => {
|
||||
let module_specifier = resolve_url_or_path(script, current_dir).ok()?;
|
||||
if module_specifier.scheme() == "file" {
|
||||
let p = module_specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()?
|
||||
.to_owned();
|
||||
Some(p)
|
||||
} else if module_specifier.scheme() == "npm" {
|
||||
Some(current_dir.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Task(TaskFlags { cwd: Some(cwd), .. }) => {
|
||||
resolve_url_or_path(cwd, current_dir)
|
||||
.ok()?
|
||||
.to_file_path()
|
||||
.ok()
|
||||
}
|
||||
Task(_) | Check(_) | Coverage(_) | Cache(_) | Info(_) | Eval(_)
|
||||
| Test(_) | Bench(_) | Repl(_) | Compile(_) | Publish(_) => {
|
||||
Some(current_dir.to_path_buf())
|
||||
}
|
||||
Add(_) | Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_)
|
||||
| Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_)
|
||||
| Vendor(_) => None,
|
||||
Install(_) => {
|
||||
if *DENO_FUTURE {
|
||||
Some(current_dir.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
Err(_) => Some(vec![current_dir.to_path_buf()]),
|
||||
}
|
||||
}
|
||||
_ => Some(vec![current_dir.to_path_buf()]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1483,12 +1525,16 @@ Future runs of this module will trigger no downloads or compilation unless
|
|||
--reload is specified.",
|
||||
)
|
||||
.defer(|cmd| {
|
||||
compile_args(cmd).arg(check_arg(false)).arg(
|
||||
Arg::new("file")
|
||||
.num_args(1..)
|
||||
.required(true)
|
||||
.value_hint(ValueHint::FilePath),
|
||||
)
|
||||
compile_args(cmd)
|
||||
.arg(check_arg(false))
|
||||
.arg(
|
||||
Arg::new("file")
|
||||
.num_args(1..)
|
||||
.required(true)
|
||||
.value_hint(ValueHint::FilePath),
|
||||
)
|
||||
.arg(frozen_lockfile_arg())
|
||||
.arg(allow_scripts_arg())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1794,6 +1840,37 @@ Show documentation for runtime built-ins:
|
|||
.action(ArgAction::Set)
|
||||
.require_equals(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("category-docs")
|
||||
.long("category-docs")
|
||||
.help("Path to a JSON file keyed by category and an optional value of a markdown doc")
|
||||
.requires("html")
|
||||
.action(ArgAction::Set)
|
||||
.require_equals(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("symbol-redirect-map")
|
||||
.long("symbol-redirect-map")
|
||||
.help("Path to a JSON file keyed by file, with an inner map of symbol to an external link")
|
||||
.requires("html")
|
||||
.action(ArgAction::Set)
|
||||
.require_equals(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("strip-trailing-html")
|
||||
.long("strip-trailing-html")
|
||||
.help("Remove trailing .html from various links. Will still generate files with a .html extension.")
|
||||
.requires("html")
|
||||
.action(ArgAction::SetTrue)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("default-symbol-map")
|
||||
.long("default-symbol-map")
|
||||
.help("Uses the provided mapping of default name to wanted name for usage blocks.")
|
||||
.requires("html")
|
||||
.action(ArgAction::Set)
|
||||
.require_equals(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("output")
|
||||
.long("output")
|
||||
|
@ -2009,11 +2086,18 @@ fn init_subcommand() -> Command {
|
|||
Command::new("init")
|
||||
.about("Initialize a new project")
|
||||
.defer(|cmd| {
|
||||
cmd.arg(
|
||||
Arg::new("dir")
|
||||
.required(false)
|
||||
.value_hint(ValueHint::DirPath),
|
||||
)
|
||||
cmd
|
||||
.arg(
|
||||
Arg::new("dir")
|
||||
.required(false)
|
||||
.value_hint(ValueHint::DirPath),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("lib")
|
||||
.long("lib")
|
||||
.required(false)
|
||||
.action(ArgAction::SetTrue),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -2162,7 +2246,7 @@ The installation root is determined, in order of precedence:
|
|||
|
||||
These must be added to the path manually if required.")
|
||||
.defer(|cmd| {
|
||||
let cmd = runtime_args(cmd, true, true).arg(check_arg(true));
|
||||
let cmd = runtime_args(cmd, true, true).arg(check_arg(true)).arg(allow_scripts_arg());
|
||||
install_args(cmd, true)
|
||||
})
|
||||
}
|
||||
|
@ -2548,7 +2632,7 @@ report results to standard output:
|
|||
deno test src/fetch_test.ts src/signal_test.ts
|
||||
|
||||
Directory arguments are expanded to all contained files matching the glob
|
||||
{*_,*.,}test.{js,mjs,ts,mts,jsx,tsx}:
|
||||
{*_,*.,}test.{js,mjs,ts,mts,jsx,tsx} or **/__tests__/**:
|
||||
|
||||
deno test src/",
|
||||
)
|
||||
|
@ -2635,7 +2719,7 @@ Directory arguments are expanded to all contained files matching the glob
|
|||
Arg::new("clean")
|
||||
.long("clean")
|
||||
.help("Empty the temporary coverage profile data directory before running tests.
|
||||
|
||||
|
||||
Note: running multiple `deno test --clean` calls in series or parallel for the same coverage directory may cause race conditions.")
|
||||
.action(ArgAction::SetTrue),
|
||||
)
|
||||
|
@ -2753,11 +2837,16 @@ update to a different location, use the --output flag
|
|||
})
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this subcommand is now deprecated, remove it in Deno 2.
|
||||
fn vendor_subcommand() -> Command {
|
||||
Command::new("vendor")
|
||||
.hide(true)
|
||||
.about("Vendor remote modules into a local directory")
|
||||
.long_about(
|
||||
"Vendor remote modules into a local directory.
|
||||
"⚠️ Warning: `deno vendor` is deprecated and will be removed in Deno 2.0.
|
||||
Add `\"vendor\": true` to your `deno.json` or use the `--vendor` flag instead.
|
||||
|
||||
Vendor remote modules into a local directory.
|
||||
|
||||
Analyzes the provided modules along with their dependencies, downloads
|
||||
remote modules to the output directory, and produces an import map that
|
||||
|
@ -3236,6 +3325,7 @@ fn runtime_args(
|
|||
app
|
||||
};
|
||||
app
|
||||
.arg(frozen_lockfile_arg())
|
||||
.arg(cached_only_arg())
|
||||
.arg(location_arg())
|
||||
.arg(v8_flags_arg())
|
||||
|
@ -3349,6 +3439,17 @@ fn cached_only_arg() -> Arg {
|
|||
.help("Require that remote dependencies are already cached")
|
||||
}
|
||||
|
||||
fn frozen_lockfile_arg() -> Arg {
|
||||
Arg::new("frozen")
|
||||
.long("frozen")
|
||||
.alias("frozen-lockfile")
|
||||
.value_parser(value_parser!(bool))
|
||||
.num_args(0..=1)
|
||||
.require_equals(true)
|
||||
.default_missing_value("true")
|
||||
.help("Error out if lockfile is out of date")
|
||||
}
|
||||
|
||||
/// Used for subcommands that operate on executable scripts only.
|
||||
/// `deno fmt` has its own `--ext` arg because its possible values differ.
|
||||
/// If --ext is not provided and the script doesn't have a file extension,
|
||||
|
@ -3569,12 +3670,14 @@ If value is not provided, defaults to \"deno.lock\" in the current working direc
|
|||
.value_hint(ValueHint::FilePath)
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): deprecated, to be removed in Deno 2.
|
||||
fn lock_write_arg() -> Arg {
|
||||
Arg::new("lock-write")
|
||||
.action(ArgAction::SetTrue)
|
||||
.long("lock-write")
|
||||
.help("Force overwriting the lock file.")
|
||||
.conflicts_with("no-lock")
|
||||
.hide(true)
|
||||
}
|
||||
|
||||
fn no_lock_arg() -> Arg {
|
||||
|
@ -3658,6 +3761,28 @@ fn unsafely_ignore_certificate_errors_arg() -> Arg {
|
|||
.value_parser(flags_net::validator)
|
||||
}
|
||||
|
||||
fn allow_scripts_arg() -> Arg {
|
||||
Arg::new("allow-scripts")
|
||||
.long("allow-scripts")
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.value_name("PACKAGE")
|
||||
.value_parser(parse_packages_allowed_scripts)
|
||||
.help("Allow running npm lifecycle scripts for the given packages. Note: Scripts will only be executed when using a node_modules directory (`--node-modules-dir`)")
|
||||
}
|
||||
|
||||
fn allow_scripts_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
let Some(parts) = matches.remove_many::<String>("allow-scripts") else {
|
||||
return;
|
||||
};
|
||||
if parts.len() == 0 {
|
||||
flags.allow_scripts = PackagesAllowedScripts::All;
|
||||
} else {
|
||||
flags.allow_scripts = PackagesAllowedScripts::Some(parts.collect());
|
||||
}
|
||||
}
|
||||
|
||||
fn add_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.subcommand = DenoSubcommand::Add(add_parse_inner(matches, None));
|
||||
}
|
||||
|
@ -3739,6 +3864,8 @@ fn bundle_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
|
||||
fn cache_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
compile_args_parse(flags, matches);
|
||||
frozen_lockfile_arg_parse(flags, matches);
|
||||
allow_scripts_arg_parse(flags, matches);
|
||||
let files = matches.remove_many::<String>("file").unwrap().collect();
|
||||
flags.subcommand = DenoSubcommand::Cache(CacheFlags { files });
|
||||
}
|
||||
|
@ -3879,10 +4006,23 @@ fn doc_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
let filter = matches.remove_one::<String>("filter");
|
||||
let html = if matches.get_flag("html") {
|
||||
let name = matches.remove_one::<String>("name");
|
||||
let category_docs_path = matches.remove_one::<String>("category-docs");
|
||||
let symbol_redirect_map_path =
|
||||
matches.remove_one::<String>("symbol-redirect-map");
|
||||
let strip_trailing_html = matches.get_flag("strip-trailing-html");
|
||||
let default_symbol_map_path =
|
||||
matches.remove_one::<String>("default-symbol-map");
|
||||
let output = matches
|
||||
.remove_one::<String>("output")
|
||||
.unwrap_or(String::from("./docs/"));
|
||||
Some(DocHtmlFlag { name, output })
|
||||
Some(DocHtmlFlag {
|
||||
name,
|
||||
category_docs_path,
|
||||
symbol_redirect_map_path,
|
||||
default_symbol_map_path,
|
||||
strip_trailing_html,
|
||||
output,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -3962,6 +4102,7 @@ fn fmt_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
fn init_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.subcommand = DenoSubcommand::Init(InitFlags {
|
||||
dir: matches.remove_one::<String>("dir"),
|
||||
lib: matches.get_flag("lib"),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -4011,6 +4152,7 @@ fn install_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
let local_flags = matches
|
||||
.remove_many("cmd")
|
||||
.map(|packages| add_parse_inner(matches, Some(packages)));
|
||||
allow_scripts_arg_parse(flags, matches);
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
global,
|
||||
kind: InstallKind::Local(local_flags),
|
||||
|
@ -4111,6 +4253,53 @@ fn run_parse(
|
|||
matches: &mut ArgMatches,
|
||||
app: Command,
|
||||
) -> clap::error::Result<()> {
|
||||
// todo(dsherret): remove this in Deno 2.0
|
||||
// This is a hack to make https://github.com/netlify/build/pull/5767 work
|
||||
// for old versions of @netlify/edge-bundler with new versions of Deno
|
||||
// where Deno has gotten smarter at resolving config files.
|
||||
//
|
||||
// It's an unfortuante scenario, but Netlify has the version at least
|
||||
// pinned to 1.x in old versions so we can remove this in Deno 2.0 in
|
||||
// a few months.
|
||||
fn temp_netlify_deno_1_hack(flags: &mut Flags, script_arg: &str) {
|
||||
fn is_netlify_edge_bundler_entrypoint(
|
||||
flags: &Flags,
|
||||
script_arg: &str,
|
||||
) -> bool {
|
||||
// based on diff here: https://github.com/netlify/edge-bundler/blame/f1d33b74ca7aeec19a7c2149316d4547a94e43fb/node/config.ts#L85
|
||||
if flags.permissions.allow_read.is_none()
|
||||
|| flags.permissions.allow_write.is_none()
|
||||
|| flags.config_flag != ConfigFlag::Discover
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if !script_arg.contains("@netlify") {
|
||||
return false;
|
||||
}
|
||||
let path = PathBuf::from(script_arg);
|
||||
if !path.ends_with("deno/config.ts") {
|
||||
return false;
|
||||
}
|
||||
let mut found_node_modules = false;
|
||||
for component in path.components().filter_map(|c| c.as_os_str().to_str())
|
||||
{
|
||||
if !found_node_modules {
|
||||
found_node_modules = component == "node_modules";
|
||||
} else {
|
||||
// make this work with pnpm and other package managers
|
||||
if component.contains("@netlify") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
if is_netlify_edge_bundler_entrypoint(flags, script_arg) {
|
||||
flags.config_flag = ConfigFlag::Disabled;
|
||||
}
|
||||
}
|
||||
|
||||
runtime_args_parse(flags, matches, true, true);
|
||||
|
||||
flags.code_cache_enabled = !matches.get_flag("no-code-cache");
|
||||
|
@ -4129,6 +4318,7 @@ fn run_parse(
|
|||
flags.argv.extend(script_arg);
|
||||
|
||||
ext_arg_parse(flags, matches);
|
||||
temp_netlify_deno_1_hack(flags, &script);
|
||||
|
||||
flags.subcommand = DenoSubcommand::Run(RunFlags {
|
||||
script,
|
||||
|
@ -4528,6 +4718,7 @@ fn runtime_args_parse(
|
|||
) {
|
||||
compile_args_parse(flags, matches);
|
||||
cached_only_arg_parse(flags, matches);
|
||||
frozen_lockfile_arg_parse(flags, matches);
|
||||
if include_perms {
|
||||
permission_args_parse(flags, matches);
|
||||
}
|
||||
|
@ -4619,6 +4810,12 @@ fn cached_only_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
}
|
||||
}
|
||||
|
||||
fn frozen_lockfile_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
if let Some(&v) = matches.get_one::<bool>("frozen") {
|
||||
flags.frozen_lockfile = v;
|
||||
}
|
||||
}
|
||||
|
||||
fn ext_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.ext = matches.remove_one::<String>("ext");
|
||||
}
|
||||
|
@ -4672,6 +4869,7 @@ fn check_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
fn lock_args_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
lock_arg_parse(flags, matches);
|
||||
no_lock_arg_parse(flags, matches);
|
||||
// TODO(bartlomieju): deprecated, to be removed in Deno 2.
|
||||
if matches.get_flag("lock-write") {
|
||||
flags.lock_write = true;
|
||||
}
|
||||
|
@ -7780,7 +7978,7 @@ mod tests {
|
|||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"run",
|
||||
"--unsafely-ignore-certificate-errors=deno.land,localhost,::,127.0.0.1,[::1],1.2.3.4",
|
||||
"--unsafely-ignore-certificate-errors=deno.land,localhost,[::],127.0.0.1,[::1],1.2.3.4",
|
||||
"script.ts"
|
||||
]);
|
||||
assert_eq!(
|
||||
|
@ -7792,7 +7990,7 @@ mod tests {
|
|||
unsafely_ignore_certificate_errors: Some(svec![
|
||||
"deno.land",
|
||||
"localhost",
|
||||
"::",
|
||||
"[::]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4"
|
||||
|
@ -7808,7 +8006,7 @@ mod tests {
|
|||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"repl",
|
||||
"--unsafely-ignore-certificate-errors=deno.land,localhost,::,127.0.0.1,[::1],1.2.3.4"]);
|
||||
"--unsafely-ignore-certificate-errors=deno.land,localhost,[::],127.0.0.1,[::1],1.2.3.4"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
|
@ -7820,7 +8018,7 @@ mod tests {
|
|||
unsafely_ignore_certificate_errors: Some(svec![
|
||||
"deno.land",
|
||||
"localhost",
|
||||
"::",
|
||||
"[::]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4"
|
||||
|
@ -8012,7 +8210,7 @@ mod tests {
|
|||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"run",
|
||||
"--allow-net=deno.land,deno.land:80,::,127.0.0.1,[::1],1.2.3.4:5678,:5678,[::1]:8080",
|
||||
"--allow-net=deno.land,deno.land:80,[::],127.0.0.1,[::1],1.2.3.4:5678,:5678,[::1]:8080",
|
||||
"script.ts"
|
||||
]);
|
||||
assert_eq!(
|
||||
|
@ -8025,7 +8223,7 @@ mod tests {
|
|||
allow_net: Some(svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"[::]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -8047,7 +8245,7 @@ mod tests {
|
|||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"run",
|
||||
"--deny-net=deno.land,deno.land:80,::,127.0.0.1,[::1],1.2.3.4:5678,:5678,[::1]:8080",
|
||||
"--deny-net=deno.land,deno.land:80,[::],127.0.0.1,[::1],1.2.3.4:5678,:5678,[::1]:8080",
|
||||
"script.ts"
|
||||
]);
|
||||
assert_eq!(
|
||||
|
@ -8060,7 +8258,7 @@ mod tests {
|
|||
deny_net: Some(svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"[::]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -8789,6 +8987,10 @@ mod tests {
|
|||
lint: false,
|
||||
html: Some(DocHtmlFlag {
|
||||
name: Some("My library".to_string()),
|
||||
category_docs_path: None,
|
||||
symbol_redirect_map_path: None,
|
||||
default_symbol_map_path: None,
|
||||
strip_trailing_html: false,
|
||||
output: String::from("./docs/"),
|
||||
}),
|
||||
source_files: DocSourceFileFlag::Paths(svec!["path/to/module.ts"]),
|
||||
|
@ -8815,6 +9017,10 @@ mod tests {
|
|||
json: false,
|
||||
html: Some(DocHtmlFlag {
|
||||
name: Some("My library".to_string()),
|
||||
category_docs_path: None,
|
||||
symbol_redirect_map_path: None,
|
||||
default_symbol_map_path: None,
|
||||
strip_trailing_html: false,
|
||||
output: String::from("./foo"),
|
||||
}),
|
||||
lint: true,
|
||||
|
@ -9191,7 +9397,15 @@ mod tests {
|
|||
fn test_config_path_args() {
|
||||
let flags = flags_from_vec(svec!["deno", "run", "foo.js"]).unwrap();
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.join("foo.js")]));
|
||||
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
|
||||
|
||||
let flags = flags_from_vec(svec!["deno", "run", "sub_dir/foo.js"]).unwrap();
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("sub_dir").clone()])
|
||||
);
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "run", "https://example.com/foo.js"])
|
||||
|
@ -9199,20 +9413,31 @@ mod tests {
|
|||
assert_eq!(flags.config_path_args(&cwd), None);
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "lint", "dir/a.js", "dir/b.js"]).unwrap();
|
||||
flags_from_vec(svec!["deno", "lint", "dir/a/a.js", "dir/b/b.js"])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
|
||||
Some(vec![cwd.join("dir/a/a.js"), cwd.join("dir/b/b.js")])
|
||||
);
|
||||
|
||||
let flags = flags_from_vec(svec!["deno", "lint"]).unwrap();
|
||||
assert!(flags.config_path_args(&cwd).unwrap().is_empty());
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "fmt", "dir/a.js", "dir/b.js"]).unwrap();
|
||||
let flags = flags_from_vec(svec![
|
||||
"deno",
|
||||
"fmt",
|
||||
"dir/a/a.js",
|
||||
"dir/a/a2.js",
|
||||
"dir/b.js"
|
||||
])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
|
||||
Some(vec![
|
||||
cwd.join("dir/a/a.js"),
|
||||
cwd.join("dir/a/a2.js"),
|
||||
cwd.join("dir/b.js")
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -9647,7 +9872,10 @@ mod tests {
|
|||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Init(InitFlags { dir: None }),
|
||||
subcommand: DenoSubcommand::Init(InitFlags {
|
||||
dir: None,
|
||||
lib: false
|
||||
}),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
@ -9658,6 +9886,7 @@ mod tests {
|
|||
Flags {
|
||||
subcommand: DenoSubcommand::Init(InitFlags {
|
||||
dir: Some(String::from("foo")),
|
||||
lib: false
|
||||
}),
|
||||
..Flags::default()
|
||||
}
|
||||
|
@ -9667,11 +9896,38 @@ mod tests {
|
|||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Init(InitFlags { dir: None }),
|
||||
subcommand: DenoSubcommand::Init(InitFlags {
|
||||
dir: None,
|
||||
lib: false
|
||||
}),
|
||||
log_level: Some(Level::Error),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
||||
let r = flags_from_vec(svec!["deno", "init", "--lib"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Init(InitFlags {
|
||||
dir: None,
|
||||
lib: true
|
||||
}),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
||||
let r = flags_from_vec(svec!["deno", "init", "foo", "--lib"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Init(InitFlags {
|
||||
dir: Some(String::from("foo")),
|
||||
lib: true
|
||||
}),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -9789,4 +10045,79 @@ mod tests {
|
|||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_with_frozen_lockfile() {
|
||||
let cases = [
|
||||
(Some("--frozen"), true),
|
||||
(Some("--frozen=true"), true),
|
||||
(Some("--frozen=false"), false),
|
||||
(None, false),
|
||||
];
|
||||
for (flag, frozen) in cases {
|
||||
let mut args = svec!["deno", "run"];
|
||||
if let Some(f) = flag {
|
||||
args.push(f.into());
|
||||
}
|
||||
args.push("script.ts".into());
|
||||
let r = flags_from_vec(args);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Run(RunFlags::new_default(
|
||||
"script.ts".to_string(),
|
||||
)),
|
||||
frozen_lockfile: frozen,
|
||||
code_cache_enabled: true,
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_scripts() {
|
||||
let cases = [
|
||||
(Some("--allow-scripts"), Ok(PackagesAllowedScripts::All)),
|
||||
(None, Ok(PackagesAllowedScripts::None)),
|
||||
(
|
||||
Some("--allow-scripts=npm:foo"),
|
||||
Ok(PackagesAllowedScripts::Some(svec!["npm:foo"])),
|
||||
),
|
||||
(
|
||||
Some("--allow-scripts=npm:foo,npm:bar"),
|
||||
Ok(PackagesAllowedScripts::Some(svec!["npm:foo", "npm:bar"])),
|
||||
),
|
||||
(Some("--allow-scripts=foo"), Err("Invalid package")),
|
||||
];
|
||||
for (flag, value) in cases {
|
||||
let mut args = svec!["deno", "cache"];
|
||||
if let Some(flag) = flag {
|
||||
args.push(flag.into());
|
||||
}
|
||||
args.push("script.ts".into());
|
||||
let r = flags_from_vec(args);
|
||||
match value {
|
||||
Ok(value) => {
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Cache(CacheFlags {
|
||||
files: svec!["script.ts"],
|
||||
}),
|
||||
allow_scripts: value,
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
let err = r.unwrap_err();
|
||||
assert!(
|
||||
err.to_string().contains(e),
|
||||
"expected to contain '{e}' got '{err}'"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_permissions::NetDescriptor;
|
||||
use std::net::IpAddr;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -42,21 +43,17 @@ pub fn validator(host_and_port: &str) -> Result<String, String> {
|
|||
/// `127.0.0.1:port` and `localhost:port`.
|
||||
pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
|
||||
let mut out: Vec<String> = vec![];
|
||||
for host_and_port in paths.iter() {
|
||||
if Url::parse(&format!("internal://{host_and_port}")).is_ok()
|
||||
|| host_and_port.parse::<IpAddr>().is_ok()
|
||||
{
|
||||
out.push(host_and_port.to_owned())
|
||||
} else if let Ok(port) = host_and_port.parse::<BarePort>() {
|
||||
for host_and_port in paths.into_iter() {
|
||||
if let Ok(port) = host_and_port.parse::<BarePort>() {
|
||||
// we got bare port, let's add default hosts
|
||||
for host in ["0.0.0.0", "127.0.0.1", "localhost"].iter() {
|
||||
out.push(format!("{}:{}", host, port.0));
|
||||
}
|
||||
} else {
|
||||
return Err(clap::Error::raw(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
format!("Bad host:port pair: {host_and_port}"),
|
||||
));
|
||||
host_and_port.parse::<NetDescriptor>().map_err(|e| {
|
||||
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}"))
|
||||
})?;
|
||||
out.push(host_and_port)
|
||||
}
|
||||
}
|
||||
Ok(out)
|
||||
|
@ -121,8 +118,8 @@ mod tests {
|
|||
let entries = svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"::1",
|
||||
"[::]",
|
||||
"[::1]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -142,8 +139,8 @@ mod tests {
|
|||
let expected = svec![
|
||||
"deno.land",
|
||||
"deno.land:80",
|
||||
"::",
|
||||
"::1",
|
||||
"[::]",
|
||||
"[::1]",
|
||||
"127.0.0.1",
|
||||
"[::1]",
|
||||
"1.2.3.4:5678",
|
||||
|
@ -174,10 +171,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6() {
|
||||
let entries =
|
||||
svec!["::", "::1", "[::1]", "[::]:5678", "[::1]:5678", "::cafe"];
|
||||
let expected =
|
||||
svec!["::", "::1", "[::1]", "[::]:5678", "[::1]:5678", "::cafe"];
|
||||
let entries = svec!["[::1]", "[::]:5678", "[::1]:5678"];
|
||||
let expected = svec!["[::1]", "[::]:5678", "[::1]:5678"];
|
||||
let actual = parse(entries).unwrap();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
@ -190,12 +185,36 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error2() {
|
||||
let entries = svec!["0123:4567:890a:bcde:fg::"];
|
||||
let entries = svec!["::1"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error3() {
|
||||
let entries = svec!["::"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error4() {
|
||||
let entries = svec!["::cafe"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error5() {
|
||||
let entries = svec!["1::1"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error6() {
|
||||
let entries = svec!["0123:4567:890a:bcde:fg::"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_net_args_ipv6_error7() {
|
||||
let entries = svec!["[::q]:8080"];
|
||||
assert!(parse(entries).is_err());
|
||||
}
|
||||
|
|
|
@ -1,127 +1,25 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use import_map::ImportMap;
|
||||
use import_map::ImportMapDiagnostic;
|
||||
use log::warn;
|
||||
|
||||
use super::ConfigFile;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub async fn resolve_import_map(
|
||||
specified_specifier: Option<&Url>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
pub async fn resolve_import_map_value_from_specifier(
|
||||
specifier: &Url,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<Option<ImportMap>, AnyError> {
|
||||
if let Some(specifier) = specified_specifier {
|
||||
resolve_import_map_from_specifier(specifier.clone(), file_fetcher)
|
||||
.await
|
||||
.with_context(|| format!("Unable to load '{}' import map", specifier))
|
||||
.map(Some)
|
||||
} else if let Some(config_file) = maybe_config_file {
|
||||
let maybe_url_and_value = config_file
|
||||
.to_import_map_value(|specifier| {
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
let file = file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
Ok(file.source.to_string())
|
||||
}
|
||||
})
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Unable to resolve import map in '{}'",
|
||||
config_file.specifier
|
||||
)
|
||||
})?;
|
||||
match maybe_url_and_value {
|
||||
Some((url, value)) => {
|
||||
import_map_from_value(url.into_owned(), value).map(Some)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve_import_map_from_specifier(
|
||||
specifier: Url,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
let value: serde_json::Value = if specifier.scheme() == "data" {
|
||||
) -> Result<serde_json::Value, AnyError> {
|
||||
if specifier.scheme() == "data" {
|
||||
let data_url_text =
|
||||
deno_graph::source::RawDataUrl::parse(&specifier)?.decode()?;
|
||||
serde_json::from_str(&data_url_text)?
|
||||
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
||||
Ok(serde_json::from_str(&data_url_text)?)
|
||||
} else {
|
||||
let file = file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.fetch(specifier, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
serde_json::from_str(&file.source)?
|
||||
};
|
||||
import_map_from_value(specifier, value)
|
||||
}
|
||||
|
||||
pub fn import_map_from_value(
|
||||
specifier: Url,
|
||||
json_value: serde_json::Value,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
debug_assert!(
|
||||
!specifier.as_str().contains("../"),
|
||||
"Import map specifier incorrectly contained ../: {}",
|
||||
specifier.as_str()
|
||||
);
|
||||
let result = import_map::parse_from_value(specifier, json_value)?;
|
||||
print_import_map_diagnostics(&result.diagnostics);
|
||||
Ok(result.import_map)
|
||||
}
|
||||
|
||||
fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) {
|
||||
if !diagnostics.is_empty() {
|
||||
warn!(
|
||||
"Import map diagnostics:\n{}",
|
||||
diagnostics
|
||||
.iter()
|
||||
.map(|d| format!(" - {d}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
Ok(serde_json::from_str(&file.source)?)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enhance_import_map_value_with_workspace_members(
|
||||
mut import_map_value: serde_json::Value,
|
||||
workspace_members: &[deno_config::WorkspaceMemberConfig],
|
||||
) -> serde_json::Value {
|
||||
let mut imports =
|
||||
if let Some(imports) = import_map_value.get("imports").as_ref() {
|
||||
imports.as_object().unwrap().clone()
|
||||
} else {
|
||||
serde_json::Map::new()
|
||||
};
|
||||
|
||||
for workspace_member in workspace_members {
|
||||
let name = &workspace_member.package_name;
|
||||
let version = &workspace_member.package_version;
|
||||
// Don't override existings, explicit imports
|
||||
if imports.contains_key(name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
imports.insert(
|
||||
name.to_string(),
|
||||
serde_json::Value::String(format!("jsr:{}@^{}", name, version)),
|
||||
);
|
||||
}
|
||||
|
||||
import_map_value["imports"] = serde_json::Value::Object(imports);
|
||||
::import_map::ext::expand_import_map_value(import_map_value)
|
||||
}
|
||||
|
|
|
@ -1,91 +1,254 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_lockfile::WorkspaceMemberConfig;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
|
||||
use crate::args::ConfigFile;
|
||||
use crate::cache;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::Flags;
|
||||
|
||||
use super::DenoSubcommand;
|
||||
use super::InstallFlags;
|
||||
use super::InstallKind;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::InstallFlags;
|
||||
use crate::args::InstallKind;
|
||||
|
||||
pub use deno_lockfile::Lockfile;
|
||||
use deno_lockfile::Lockfile;
|
||||
|
||||
pub fn discover(
|
||||
flags: &Flags,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
) -> Result<Option<Lockfile>, AnyError> {
|
||||
if flags.no_lock
|
||||
|| matches!(
|
||||
flags.subcommand,
|
||||
DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(..),
|
||||
..
|
||||
}) | DenoSubcommand::Uninstall(_)
|
||||
)
|
||||
{
|
||||
return Ok(None);
|
||||
#[derive(Debug)]
|
||||
pub struct CliLockfile {
|
||||
lockfile: Mutex<Lockfile>,
|
||||
pub filename: PathBuf,
|
||||
pub frozen: bool,
|
||||
}
|
||||
|
||||
pub struct Guard<'a, T> {
|
||||
guard: MutexGuard<'a, T>,
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::Deref for Guard<'a, T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> std::ops::DerefMut for Guard<'a, T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.guard
|
||||
}
|
||||
}
|
||||
|
||||
impl CliLockfile {
|
||||
pub fn new(lockfile: Lockfile, frozen: bool) -> Self {
|
||||
let filename = lockfile.filename.clone();
|
||||
Self {
|
||||
lockfile: Mutex::new(lockfile),
|
||||
filename,
|
||||
frozen,
|
||||
}
|
||||
}
|
||||
|
||||
let filename = match flags.lock {
|
||||
Some(ref lock) => PathBuf::from(lock),
|
||||
None => match maybe_config_file {
|
||||
Some(config_file) => {
|
||||
if config_file.specifier.scheme() == "file" {
|
||||
match config_file.resolve_lockfile_path()? {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
}
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
None => match maybe_package_json {
|
||||
Some(package_json) => {
|
||||
package_json.path.parent().unwrap().join("deno.lock")
|
||||
}
|
||||
/// Get the inner deno_lockfile::Lockfile.
|
||||
pub fn lock(&self) -> Guard<Lockfile> {
|
||||
Guard {
|
||||
guard: self.lockfile.lock(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_workspace_config(
|
||||
&self,
|
||||
options: deno_lockfile::SetWorkspaceConfigOptions,
|
||||
) {
|
||||
self.lockfile.lock().set_workspace_config(options);
|
||||
}
|
||||
|
||||
pub fn overwrite(&self) -> bool {
|
||||
self.lockfile.lock().overwrite
|
||||
}
|
||||
|
||||
pub fn write_if_changed(&self) -> Result<(), AnyError> {
|
||||
self.error_if_changed()?;
|
||||
let mut lockfile = self.lockfile.lock();
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
return Ok(()); // nothing to do
|
||||
};
|
||||
// do an atomic write to reduce the chance of multiple deno
|
||||
// processes corrupting the file
|
||||
atomic_write_file_with_retries(
|
||||
&lockfile.filename,
|
||||
bytes,
|
||||
cache::CACHE_PERM,
|
||||
)
|
||||
.context("Failed writing lockfile.")?;
|
||||
lockfile.has_content_changed = false;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn discover(
|
||||
flags: &Flags,
|
||||
workspace: &Workspace,
|
||||
) -> Result<Option<CliLockfile>, AnyError> {
|
||||
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
|
||||
let Some(pkg_json) = maybe_pkg_json else {
|
||||
return Default::default();
|
||||
};
|
||||
pkg_json
|
||||
.resolve_local_package_json_deps()
|
||||
.values()
|
||||
.filter_map(|dep| dep.as_ref().ok())
|
||||
.filter_map(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => Some(req),
|
||||
PackageJsonDepValue::Workspace(_) => None,
|
||||
})
|
||||
.map(|r| format!("npm:{}", r))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deno_json_deps(
|
||||
maybe_deno_json: Option<&ConfigFile>,
|
||||
) -> BTreeSet<String> {
|
||||
maybe_deno_json
|
||||
.map(|c| {
|
||||
crate::args::deno_json::deno_json_deps(c)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
if flags.no_lock
|
||||
|| matches!(
|
||||
flags.subcommand,
|
||||
DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(..),
|
||||
..
|
||||
}) | DenoSubcommand::Uninstall(_)
|
||||
)
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let filename = match flags.lock {
|
||||
Some(ref lock) => PathBuf::from(lock),
|
||||
None => match workspace.resolve_lockfile_path()? {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
let lockfile = if flags.lock_write {
|
||||
Lockfile::new_empty(filename, true)
|
||||
} else {
|
||||
read_lockfile_at_path(filename)?
|
||||
};
|
||||
Ok(Some(lockfile))
|
||||
}
|
||||
let lockfile = if flags.lock_write {
|
||||
log::warn!(
|
||||
"{} \"--lock-write\" flag is deprecated and will be removed in Deno 2.",
|
||||
crate::colors::yellow("Warning")
|
||||
);
|
||||
CliLockfile::new(
|
||||
Lockfile::new_empty(filename, true),
|
||||
flags.frozen_lockfile,
|
||||
)
|
||||
} else {
|
||||
Self::read_from_path(filename, flags.frozen_lockfile)?
|
||||
};
|
||||
|
||||
pub fn read_lockfile_at_path(filename: PathBuf) -> Result<Lockfile, AnyError> {
|
||||
match std::fs::read_to_string(&filename) {
|
||||
Ok(text) => Ok(Lockfile::with_lockfile_content(filename, &text, false)?),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
Ok(Lockfile::new_empty(filename, false))
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
let root_url = workspace.root_dir();
|
||||
let root_folder = workspace.root_folder_configs();
|
||||
let config = deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
|
||||
},
|
||||
members: workspace
|
||||
.config_folders()
|
||||
.iter()
|
||||
.filter(|(folder_url, _)| *folder_url != root_url)
|
||||
.filter_map(|(folder_url, folder)| {
|
||||
Some((
|
||||
{
|
||||
// should never be None here, but just ignore members that
|
||||
// do fail for this
|
||||
let mut relative_path = root_url.make_relative(folder_url)?;
|
||||
if relative_path.ends_with('/') {
|
||||
// make it slightly cleaner by removing the trailing slash
|
||||
relative_path.pop();
|
||||
}
|
||||
relative_path
|
||||
},
|
||||
{
|
||||
let config = WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(folder.deno_json.as_deref()),
|
||||
};
|
||||
if config.package_json_deps.is_empty()
|
||||
&& config.dependencies.is_empty()
|
||||
{
|
||||
// exclude empty workspace members
|
||||
return None;
|
||||
}
|
||||
config
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
lockfile.set_workspace_config(deno_lockfile::SetWorkspaceConfigOptions {
|
||||
no_npm: flags.no_npm,
|
||||
no_config: flags.config_flag == super::ConfigFlag::Disabled,
|
||||
config,
|
||||
});
|
||||
|
||||
Ok(Some(lockfile))
|
||||
}
|
||||
pub fn read_from_path(
|
||||
filename: PathBuf,
|
||||
frozen: bool,
|
||||
) -> Result<CliLockfile, AnyError> {
|
||||
match std::fs::read_to_string(&filename) {
|
||||
Ok(text) => Ok(CliLockfile::new(
|
||||
Lockfile::with_lockfile_content(filename, &text, false)?,
|
||||
frozen,
|
||||
)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(
|
||||
CliLockfile::new(Lockfile::new_empty(filename, false), frozen),
|
||||
),
|
||||
Err(err) => Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", filename.display())
|
||||
}),
|
||||
}
|
||||
}
|
||||
pub fn error_if_changed(&self) -> Result<(), AnyError> {
|
||||
if !self.frozen {
|
||||
return Ok(());
|
||||
}
|
||||
let lockfile = self.lockfile.lock();
|
||||
if lockfile.has_content_changed {
|
||||
let suggested = if *super::DENO_FUTURE {
|
||||
"`deno cache --frozen=false`, `deno install --frozen=false`,"
|
||||
} else {
|
||||
"`deno cache --frozen=false`"
|
||||
};
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
|
||||
let new_contents = lockfile.as_json_string();
|
||||
let diff = crate::util::diff::diff(&contents, &new_contents);
|
||||
// has an extra newline at the end
|
||||
let diff = diff.trim_end();
|
||||
Err(deno_core::anyhow::anyhow!(
|
||||
"The lockfile is out of date. Run {suggested} or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", filename.display())
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_lockfile_if_has_changes(
|
||||
lockfile: &mut Lockfile,
|
||||
) -> Result<(), AnyError> {
|
||||
let Some(bytes) = lockfile.resolve_write_bytes() else {
|
||||
return Ok(()); // nothing to do
|
||||
};
|
||||
// do an atomic write to reduce the chance of multiple deno
|
||||
// processes corrupting the file
|
||||
atomic_write_file_with_retries(&lockfile.filename, bytes, cache::CACHE_PERM)
|
||||
.context("Failed writing lockfile.")?;
|
||||
lockfile.has_content_changed = false;
|
||||
Ok(())
|
||||
}
|
||||
|
|
1408
cli/args/mod.rs
1408
cli/args/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,292 +1,105 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_npm::registry::parse_dep_entry_name_and_raw_version;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReq;
|
||||
use indexmap::IndexMap;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error, Clone)]
|
||||
pub enum PackageJsonDepValueParseError {
|
||||
#[error(transparent)]
|
||||
VersionReq(#[from] NpmVersionReqParseError),
|
||||
#[error("Not implemented scheme '{scheme}'")]
|
||||
Unsupported { scheme: String },
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmRemotePkg {
|
||||
pub alias: String,
|
||||
// todo(24419): use this when setting up the node_modules dir
|
||||
#[allow(dead_code)]
|
||||
pub base_dir: PathBuf,
|
||||
pub req: PackageReq,
|
||||
}
|
||||
|
||||
pub type PackageJsonDeps =
|
||||
IndexMap<String, Result<PackageReq, PackageJsonDepValueParseError>>;
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmWorkspacePkg {
|
||||
pub alias: String,
|
||||
// todo(24419): use this when setting up the node_modules dir
|
||||
#[allow(dead_code)]
|
||||
pub base_dir: PathBuf,
|
||||
pub target_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonDepsProvider(Option<PackageJsonDeps>);
|
||||
|
||||
impl PackageJsonDepsProvider {
|
||||
pub fn new(deps: Option<PackageJsonDeps>) -> Self {
|
||||
Self(deps)
|
||||
}
|
||||
|
||||
pub fn deps(&self) -> Option<&PackageJsonDeps> {
|
||||
self.0.as_ref()
|
||||
}
|
||||
|
||||
pub fn reqs(&self) -> Option<Vec<&PackageReq>> {
|
||||
match &self.0 {
|
||||
Some(deps) => {
|
||||
let mut package_reqs = deps
|
||||
.values()
|
||||
.filter_map(|r| r.as_ref().ok())
|
||||
.collect::<Vec<_>>();
|
||||
package_reqs.sort(); // deterministic resolution
|
||||
Some(package_reqs)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub struct PackageJsonInstallDepsProvider {
|
||||
remote_pkgs: Vec<InstallNpmRemotePkg>,
|
||||
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
|
||||
}
|
||||
|
||||
/// Gets an application level package.json's npm package requirements.
|
||||
///
|
||||
/// Note that this function is not general purpose. It is specifically for
|
||||
/// parsing the application level package.json that the user has control
|
||||
/// over. This is a design limitation to allow mapping these dependency
|
||||
/// entries to npm specifiers which can then be used in the resolver.
|
||||
pub fn get_local_package_json_version_reqs(
|
||||
package_json: &PackageJson,
|
||||
) -> PackageJsonDeps {
|
||||
fn parse_entry(
|
||||
key: &str,
|
||||
value: &str,
|
||||
) -> Result<PackageReq, PackageJsonDepValueParseError> {
|
||||
if value.starts_with("workspace:")
|
||||
|| value.starts_with("file:")
|
||||
|| value.starts_with("git:")
|
||||
|| value.starts_with("http:")
|
||||
|| value.starts_with("https:")
|
||||
{
|
||||
return Err(PackageJsonDepValueParseError::Unsupported {
|
||||
scheme: value.split(':').next().unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
|
||||
let result = VersionReq::parse_from_npm(version_req);
|
||||
match result {
|
||||
Ok(version_req) => Ok(PackageReq {
|
||||
name: name.to_string(),
|
||||
version_req,
|
||||
}),
|
||||
Err(err) => Err(PackageJsonDepValueParseError::VersionReq(err)),
|
||||
}
|
||||
impl PackageJsonInstallDepsProvider {
|
||||
pub fn empty() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn insert_deps(
|
||||
deps: Option<&IndexMap<String, String>>,
|
||||
result: &mut PackageJsonDeps,
|
||||
) {
|
||||
if let Some(deps) = deps {
|
||||
for (key, value) in deps {
|
||||
result
|
||||
.entry(key.to_string())
|
||||
.or_insert_with(|| parse_entry(key, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
|
||||
let mut workspace_pkgs = Vec::new();
|
||||
let mut remote_pkgs = Vec::new();
|
||||
let workspace_npm_pkgs = workspace.npm_packages();
|
||||
for pkg_json in workspace.package_jsons() {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let Ok(dep) = dep else {
|
||||
continue;
|
||||
};
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_req(&pkg_req)
|
||||
// do not resolve to the current package
|
||||
&& pkg.pkg_json.path != pkg_json.path
|
||||
});
|
||||
|
||||
let deps = package_json.dependencies.as_ref();
|
||||
let dev_deps = package_json.dev_dependencies.as_ref();
|
||||
let mut result = IndexMap::new();
|
||||
|
||||
// favors the deps over dev_deps
|
||||
insert_deps(deps, &mut result);
|
||||
insert_deps(dev_deps, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Attempts to discover the package.json file, maybe stopping when it
|
||||
/// reaches the specified `maybe_stop_at` directory.
|
||||
pub fn discover_from(
|
||||
start: &Path,
|
||||
maybe_stop_at: Option<PathBuf>,
|
||||
) -> Result<Option<PackageJson>, AnyError> {
|
||||
const PACKAGE_JSON_NAME: &str = "package.json";
|
||||
|
||||
// note: ancestors() includes the `start` path
|
||||
for ancestor in start.ancestors() {
|
||||
let path = ancestor.join(PACKAGE_JSON_NAME);
|
||||
|
||||
let source = match std::fs::read_to_string(&path) {
|
||||
Ok(source) => source,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
if let Some(stop_at) = maybe_stop_at.as_ref() {
|
||||
if ancestor == stop_at {
|
||||
break;
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
req: pkg_req,
|
||||
});
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(err) => bail!(
|
||||
"Error loading package.json at {}. {:#}",
|
||||
path.display(),
|
||||
err
|
||||
),
|
||||
};
|
||||
// sort within each package
|
||||
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
|
||||
|
||||
let package_json = PackageJson::load_from_string(path.clone(), source)?;
|
||||
log::debug!("package.json file found at '{}'", path.display());
|
||||
return Ok(Some(package_json));
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
}
|
||||
remote_pkgs.shrink_to_fit();
|
||||
workspace_pkgs.shrink_to_fit();
|
||||
Self {
|
||||
remote_pkgs,
|
||||
workspace_pkgs,
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("No package.json file found");
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn get_local_package_json_version_reqs_for_tests(
|
||||
package_json: &PackageJson,
|
||||
) -> IndexMap<String, Result<PackageReq, String>> {
|
||||
get_local_package_json_version_reqs(package_json)
|
||||
.into_iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k,
|
||||
match v {
|
||||
Ok(v) => Ok(v),
|
||||
Err(err) => Err(err.to_string()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<IndexMap<_, _>>()
|
||||
pub fn remote_pkgs(&self) -> &Vec<InstallNpmRemotePkg> {
|
||||
&self.remote_pkgs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(IndexMap::from([
|
||||
("test".to_string(), "^1.2".to_string()),
|
||||
("other".to_string(), "npm:package@~1.3".to_string()),
|
||||
]));
|
||||
package_json.dev_dependencies = Some(IndexMap::from([
|
||||
("package_b".to_string(), "~2.2".to_string()),
|
||||
// should be ignored
|
||||
("other".to_string(), "^3.2".to_string()),
|
||||
]));
|
||||
let deps = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
deps,
|
||||
IndexMap::from([
|
||||
(
|
||||
"test".to_string(),
|
||||
Ok(PackageReq::from_str("test@^1.2").unwrap())
|
||||
),
|
||||
(
|
||||
"other".to_string(),
|
||||
Ok(PackageReq::from_str("package@~1.3").unwrap())
|
||||
),
|
||||
(
|
||||
"package_b".to_string(),
|
||||
Ok(PackageReq::from_str("package_b@~2.2").unwrap())
|
||||
)
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs_errors_non_npm_specifier() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(IndexMap::from([(
|
||||
"test".to_string(),
|
||||
"%*(#$%()".to_string(),
|
||||
)]));
|
||||
let map = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
map,
|
||||
IndexMap::from([(
|
||||
"test".to_string(),
|
||||
Err(
|
||||
concat!(
|
||||
"Invalid npm version requirement. Unexpected character.\n",
|
||||
" %*(#$%()\n",
|
||||
" ~"
|
||||
)
|
||||
.to_string()
|
||||
)
|
||||
)])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs_range() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(IndexMap::from([(
|
||||
"test".to_string(),
|
||||
"1.x - 1.3".to_string(),
|
||||
)]));
|
||||
let map = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
map,
|
||||
IndexMap::from([(
|
||||
"test".to_string(),
|
||||
Ok(PackageReq {
|
||||
name: "test".to_string(),
|
||||
version_req: VersionReq::parse_from_npm("1.x - 1.3").unwrap()
|
||||
})
|
||||
)])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_local_package_json_version_reqs_skips_certain_specifiers() {
|
||||
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
|
||||
package_json.dependencies = Some(IndexMap::from([
|
||||
("test".to_string(), "1".to_string()),
|
||||
("work-test".to_string(), "workspace:1.1.1".to_string()),
|
||||
("file-test".to_string(), "file:something".to_string()),
|
||||
("git-test".to_string(), "git:something".to_string()),
|
||||
("http-test".to_string(), "http://something".to_string()),
|
||||
("https-test".to_string(), "https://something".to_string()),
|
||||
]));
|
||||
let result = get_local_package_json_version_reqs_for_tests(&package_json);
|
||||
assert_eq!(
|
||||
result,
|
||||
IndexMap::from([
|
||||
(
|
||||
"file-test".to_string(),
|
||||
Err("Not implemented scheme 'file'".to_string()),
|
||||
),
|
||||
(
|
||||
"git-test".to_string(),
|
||||
Err("Not implemented scheme 'git'".to_string()),
|
||||
),
|
||||
(
|
||||
"http-test".to_string(),
|
||||
Err("Not implemented scheme 'http'".to_string()),
|
||||
),
|
||||
(
|
||||
"https-test".to_string(),
|
||||
Err("Not implemented scheme 'https'".to_string()),
|
||||
),
|
||||
(
|
||||
"test".to_string(),
|
||||
Ok(PackageReq::from_str("test@1").unwrap())
|
||||
),
|
||||
(
|
||||
"work-test".to_string(),
|
||||
Err("Not implemented scheme 'workspace'".to_string()),
|
||||
)
|
||||
])
|
||||
);
|
||||
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> {
|
||||
&self.workspace_pkgs
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,6 +124,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"check",
|
||||
"--reload",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
|
@ -135,6 +137,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"--reload",
|
||||
"--no-check",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
|
@ -144,6 +148,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
&[
|
||||
"bundle",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
|
@ -154,6 +160,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
"bundle",
|
||||
"--no-check",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
|
@ -320,6 +328,8 @@ fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
|
|||
deno_exe.to_str().unwrap(),
|
||||
"bundle",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
url,
|
||||
&path,
|
||||
],
|
||||
|
|
11
cli/build.rs
11
cli/build.rs
|
@ -235,6 +235,7 @@ mod ts {
|
|||
"es2023",
|
||||
"es2023.array",
|
||||
"es2023.collection",
|
||||
"es2023.intl",
|
||||
"esnext",
|
||||
"esnext.array",
|
||||
"esnext.collection",
|
||||
|
@ -243,6 +244,8 @@ mod ts {
|
|||
"esnext.intl",
|
||||
"esnext.object",
|
||||
"esnext.promise",
|
||||
"esnext.regexp",
|
||||
"esnext.string",
|
||||
];
|
||||
|
||||
let path_dts = cwd.join("tsc/dts");
|
||||
|
@ -311,7 +314,7 @@ mod ts {
|
|||
|
||||
pub(crate) fn version() -> String {
|
||||
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
|
||||
let version_text = " version = \"";
|
||||
let version_text = " version = \"";
|
||||
for line in file_text.lines() {
|
||||
if let Some(index) = line.find(version_text) {
|
||||
let remaining_line = &line[index + version_text.len()..];
|
||||
|
@ -322,7 +325,7 @@ mod ts {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) {
|
||||
use deno_runtime::ops::bootstrap::SnapshotOptions;
|
||||
|
||||
|
@ -453,7 +456,7 @@ fn main() {
|
|||
);
|
||||
|
||||
let ts_version = ts::version();
|
||||
debug_assert_eq!(ts_version, "5.4.5"); // bump this assertion when it changes
|
||||
debug_assert_eq!(ts_version, "5.5.2"); // bump this assertion when it changes
|
||||
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
|
||||
println!("cargo:rerun-if-env-changed=TS_VERSION");
|
||||
|
||||
|
@ -466,7 +469,7 @@ fn main() {
|
|||
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
|
||||
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
|
||||
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
|
|
10
cli/cache/deno_dir.rs
vendored
10
cli/cache/deno_dir.rs
vendored
|
@ -169,7 +169,7 @@ impl DenoDir {
|
|||
|
||||
/// To avoid the poorly managed dirs crate
|
||||
#[cfg(not(windows))]
|
||||
mod dirs {
|
||||
pub mod dirs {
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn cache_dir() -> Option<PathBuf> {
|
||||
|
@ -227,7 +227,7 @@ mod dirs {
|
|||
// https://github.com/dirs-dev/dirs-sys-rs/blob/ec7cee0b3e8685573d847f0a0f60aae3d9e07fa2/src/lib.rs#L140-L164
|
||||
// MIT license. Copyright (c) 2018-2019 dirs-rs contributors
|
||||
#[cfg(windows)]
|
||||
mod dirs {
|
||||
pub mod dirs {
|
||||
use std::ffi::OsString;
|
||||
use std::os::windows::ffi::OsStringExt;
|
||||
use std::path::PathBuf;
|
||||
|
@ -266,6 +266,12 @@ mod dirs {
|
|||
}
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
if let Some(userprofile) = std::env::var_os("USERPROFILE") {
|
||||
if !userprofile.is_empty() {
|
||||
return Some(PathBuf::from(userprofile));
|
||||
}
|
||||
}
|
||||
|
||||
known_folder(&knownfolders::FOLDERID_Profile)
|
||||
}
|
||||
}
|
||||
|
|
1
cli/cache/mod.rs
vendored
1
cli/cache/mod.rs
vendored
|
@ -43,6 +43,7 @@ pub use caches::Caches;
|
|||
pub use check::TypeCheckCache;
|
||||
pub use code_cache::CodeCache;
|
||||
pub use common::FastInsecureHasher;
|
||||
pub use deno_dir::dirs::home_dir;
|
||||
pub use deno_dir::DenoDir;
|
||||
pub use deno_dir::DenoDirProvider;
|
||||
pub use disk_cache::DiskCache;
|
||||
|
|
492
cli/factory.rs
492
cli/factory.rs
|
@ -1,11 +1,11 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::deno_json::deno_json_deps;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
|
@ -45,6 +45,7 @@ use crate::resolver::SloppyImportsResolver;
|
|||
use crate::standalone::DenoCompileBinaryWriter;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::tools::coverage::CoverageCollector;
|
||||
use crate::tools::lint::LintRuleProvider;
|
||||
use crate::tools::run::hmr::HmrRunner;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
@ -54,53 +55,59 @@ use crate::worker::CliMainWorkerFactory;
|
|||
use crate::worker::CliMainWorkerOptions;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::FeatureChecker;
|
||||
|
||||
use deno_lockfile::WorkspaceMemberConfig;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use import_map::ImportMap;
|
||||
use log::warn;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct CliFactoryBuilder {
|
||||
watcher_communicator: Option<Arc<WatcherCommunicator>>,
|
||||
struct CliRootCertStoreProvider {
|
||||
cell: OnceCell<RootCertStore>,
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
}
|
||||
|
||||
impl CliFactoryBuilder {
|
||||
pub fn new() -> Self {
|
||||
impl CliRootCertStoreProvider {
|
||||
pub fn new(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Self {
|
||||
Self {
|
||||
watcher_communicator: None,
|
||||
cell: Default::default(),
|
||||
maybe_root_path,
|
||||
maybe_ca_stores,
|
||||
maybe_ca_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_from_flags(self, flags: Flags) -> Result<CliFactory, AnyError> {
|
||||
Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?)))
|
||||
}
|
||||
|
||||
pub fn build_from_flags_for_watcher(
|
||||
mut self,
|
||||
flags: Flags,
|
||||
watcher_communicator: Arc<WatcherCommunicator>,
|
||||
) -> Result<CliFactory, AnyError> {
|
||||
self.watcher_communicator = Some(watcher_communicator);
|
||||
self.build_from_flags(flags)
|
||||
}
|
||||
|
||||
pub fn build_from_cli_options(self, options: Arc<CliOptions>) -> CliFactory {
|
||||
CliFactory {
|
||||
watcher_communicator: self.watcher_communicator,
|
||||
options,
|
||||
services: Default::default(),
|
||||
}
|
||||
impl RootCertStoreProvider for CliRootCertStoreProvider {
|
||||
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
|
||||
self
|
||||
.cell
|
||||
.get_or_try_init(|| {
|
||||
get_root_cert_store(
|
||||
self.maybe_root_path.clone(),
|
||||
self.maybe_ca_stores.clone(),
|
||||
self.maybe_ca_data.clone(),
|
||||
)
|
||||
})
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,6 +120,10 @@ impl<T> Default for Deferred<T> {
|
|||
}
|
||||
|
||||
impl<T> Deferred<T> {
|
||||
pub fn from_value(value: T) -> Self {
|
||||
Self(once_cell::unsync::OnceCell::from(value))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn get_or_try_init(
|
||||
&self,
|
||||
|
@ -146,6 +157,7 @@ impl<T> Deferred<T> {
|
|||
|
||||
#[derive(Default)]
|
||||
struct CliFactoryServices {
|
||||
cli_options: Deferred<Arc<CliOptions>>,
|
||||
deno_dir_provider: Deferred<Arc<DenoDirProvider>>,
|
||||
caches: Deferred<Arc<Caches>>,
|
||||
file_fetcher: Deferred<Arc<FileFetcher>>,
|
||||
|
@ -156,8 +168,6 @@ struct CliFactoryServices {
|
|||
emitter: Deferred<Arc<Emitter>>,
|
||||
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
||||
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
|
||||
lockfile: Deferred<Option<Arc<Mutex<Lockfile>>>>,
|
||||
maybe_import_map: Deferred<Option<Arc<ImportMap>>>,
|
||||
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
|
||||
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
|
||||
blob_store: Deferred<Arc<BlobStore>>,
|
||||
|
@ -171,51 +181,78 @@ struct CliFactoryServices {
|
|||
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
|
||||
node_resolver: Deferred<Arc<NodeResolver>>,
|
||||
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
|
||||
package_json_deps_provider: Deferred<Arc<PackageJsonDepsProvider>>,
|
||||
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>,
|
||||
text_only_progress_bar: Deferred<ProgressBar>,
|
||||
type_checker: Deferred<Arc<TypeChecker>>,
|
||||
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
|
||||
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
|
||||
feature_checker: Deferred<Arc<FeatureChecker>>,
|
||||
code_cache: Deferred<Arc<CodeCache>>,
|
||||
workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
pub struct CliFactory {
|
||||
watcher_communicator: Option<Arc<WatcherCommunicator>>,
|
||||
options: Arc<CliOptions>,
|
||||
flags: Arc<Flags>,
|
||||
services: CliFactoryServices,
|
||||
}
|
||||
|
||||
impl CliFactory {
|
||||
pub fn from_flags(flags: Flags) -> Result<Self, AnyError> {
|
||||
CliFactoryBuilder::new().build_from_flags(flags)
|
||||
pub fn from_flags(flags: Arc<Flags>) -> Self {
|
||||
Self {
|
||||
flags,
|
||||
watcher_communicator: None,
|
||||
services: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_cli_options(options: Arc<CliOptions>) -> Self {
|
||||
CliFactoryBuilder::new().build_from_cli_options(options)
|
||||
pub fn from_cli_options(cli_options: Arc<CliOptions>) -> Self {
|
||||
let (cli_options, flags) = cli_options.into_self_and_flags();
|
||||
CliFactory {
|
||||
watcher_communicator: None,
|
||||
flags,
|
||||
services: CliFactoryServices {
|
||||
cli_options: Deferred::from_value(cli_options),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cli_options(&self) -> &Arc<CliOptions> {
|
||||
&self.options
|
||||
pub fn from_flags_for_watcher(
|
||||
flags: Arc<Flags>,
|
||||
watcher_communicator: Arc<WatcherCommunicator>,
|
||||
) -> Self {
|
||||
CliFactory {
|
||||
watcher_communicator: Some(watcher_communicator),
|
||||
flags,
|
||||
services: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deno_dir_provider(&self) -> &Arc<DenoDirProvider> {
|
||||
self.services.deno_dir_provider.get_or_init(|| {
|
||||
Arc::new(DenoDirProvider::new(
|
||||
self.options.maybe_custom_root().clone(),
|
||||
))
|
||||
pub fn cli_options(&self) -> Result<&Arc<CliOptions>, AnyError> {
|
||||
self.services.cli_options.get_or_try_init(|| {
|
||||
CliOptions::from_flags(self.flags.clone()).map(Arc::new)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deno_dir_provider(&self) -> Result<&Arc<DenoDirProvider>, AnyError> {
|
||||
self.services.deno_dir_provider.get_or_try_init(|| {
|
||||
Ok(Arc::new(DenoDirProvider::new(
|
||||
self.cli_options()?.maybe_custom_root().clone(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> {
|
||||
Ok(self.deno_dir_provider().get_or_create()?)
|
||||
Ok(self.deno_dir_provider()?.get_or_create()?)
|
||||
}
|
||||
|
||||
pub fn caches(&self) -> Result<&Arc<Caches>, AnyError> {
|
||||
self.services.caches.get_or_try_init(|| {
|
||||
let caches = Arc::new(Caches::new(self.deno_dir_provider().clone()));
|
||||
let cli_options = self.cli_options()?;
|
||||
let caches = Arc::new(Caches::new(self.deno_dir_provider()?.clone()));
|
||||
// Warm up the caches we know we'll likely need based on the CLI mode
|
||||
match self.options.sub_command() {
|
||||
match cli_options.sub_command() {
|
||||
DenoSubcommand::Run(_)
|
||||
| DenoSubcommand::Serve(_)
|
||||
| DenoSubcommand::Bench(_)
|
||||
|
@ -223,11 +260,11 @@ impl CliFactory {
|
|||
| DenoSubcommand::Check(_) => {
|
||||
_ = caches.dep_analysis_db();
|
||||
_ = caches.node_analysis_db();
|
||||
if self.options.type_check_mode().is_true() {
|
||||
if cli_options.type_check_mode().is_true() {
|
||||
_ = caches.fast_check_db();
|
||||
_ = caches.type_checking_cache_db();
|
||||
}
|
||||
if self.options.code_cache_enabled() {
|
||||
if cli_options.code_cache_enabled() {
|
||||
_ = caches.code_cache_db();
|
||||
}
|
||||
}
|
||||
|
@ -242,10 +279,13 @@ impl CliFactory {
|
|||
}
|
||||
|
||||
pub fn root_cert_store_provider(&self) -> &Arc<dyn RootCertStoreProvider> {
|
||||
self
|
||||
.services
|
||||
.root_cert_store_provider
|
||||
.get_or_init(|| self.options.resolve_root_cert_store_provider())
|
||||
self.services.root_cert_store_provider.get_or_init(|| {
|
||||
Arc::new(CliRootCertStoreProvider::new(
|
||||
None,
|
||||
self.flags.ca_stores.clone(),
|
||||
self.flags.ca_data.clone(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn text_only_progress_bar(&self) -> &ProgressBar {
|
||||
|
@ -267,7 +307,7 @@ impl CliFactory {
|
|||
pub fn http_cache(&self) -> Result<&Arc<dyn HttpCache>, AnyError> {
|
||||
self.services.http_cache.get_or_try_init(|| {
|
||||
let global_cache = self.global_http_cache()?.clone();
|
||||
match self.options.vendor_dir_path() {
|
||||
match self.cli_options()?.vendor_dir_path() {
|
||||
Some(local_path) => {
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_path.clone(), global_cache);
|
||||
|
@ -282,17 +322,18 @@ impl CliFactory {
|
|||
self.services.http_client_provider.get_or_init(|| {
|
||||
Arc::new(HttpClientProvider::new(
|
||||
Some(self.root_cert_store_provider().clone()),
|
||||
self.options.unsafely_ignore_certificate_errors().clone(),
|
||||
self.flags.unsafely_ignore_certificate_errors.clone(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
|
||||
self.services.file_fetcher.get_or_try_init(|| {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(FileFetcher::new(
|
||||
self.http_cache()?.clone(),
|
||||
self.options.cache_setting(),
|
||||
!self.options.no_remote(),
|
||||
cli_options.cache_setting(),
|
||||
!cli_options.no_remote(),
|
||||
self.http_client_provider().clone(),
|
||||
self.blob_store().clone(),
|
||||
Some(self.text_only_progress_bar().clone()),
|
||||
|
@ -304,100 +345,6 @@ impl CliFactory {
|
|||
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
|
||||
}
|
||||
|
||||
pub fn maybe_lockfile(&self) -> &Option<Arc<Mutex<Lockfile>>> {
|
||||
fn check_no_npm(lockfile: &Mutex<Lockfile>, options: &CliOptions) -> bool {
|
||||
if options.no_npm() {
|
||||
return true;
|
||||
}
|
||||
// Deno doesn't yet understand npm workspaces and the package.json resolution
|
||||
// may be in a different folder than the deno.json/lockfile. So for now, ignore
|
||||
// any package.jsons that are in different folders
|
||||
options
|
||||
.maybe_package_json()
|
||||
.as_ref()
|
||||
.map(|package_json| {
|
||||
package_json.path.parent() != lockfile.lock().filename.parent()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
self.services.lockfile.get_or_init(|| {
|
||||
let maybe_lockfile = self.options.maybe_lockfile();
|
||||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
if let Some(lockfile) = &maybe_lockfile {
|
||||
let no_npm = check_no_npm(lockfile, &self.options);
|
||||
let package_json_deps = (!no_npm)
|
||||
.then(|| {
|
||||
self
|
||||
.package_json_deps_provider()
|
||||
.reqs()
|
||||
.map(|reqs| {
|
||||
reqs.into_iter().map(|s| format!("npm:{}", s)).collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let mut lockfile = lockfile.lock();
|
||||
let config = match self.options.maybe_workspace_config() {
|
||||
Some(workspace_config) => deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps,
|
||||
dependencies: deno_json_deps(
|
||||
self.options.maybe_config_file().as_ref().unwrap(),
|
||||
)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect(),
|
||||
},
|
||||
members: workspace_config
|
||||
.members
|
||||
.iter()
|
||||
.map(|member| {
|
||||
(
|
||||
member.package_name.clone(),
|
||||
WorkspaceMemberConfig {
|
||||
package_json_deps: Default::default(),
|
||||
dependencies: deno_json_deps(&member.config_file)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect(),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
None => deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps,
|
||||
dependencies: self
|
||||
.options
|
||||
.maybe_config_file()
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
deno_json_deps(config)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
members: Default::default(),
|
||||
},
|
||||
};
|
||||
lockfile.set_workspace_config(
|
||||
deno_lockfile::SetWorkspaceConfigOptions {
|
||||
no_npm,
|
||||
no_config: self.options.no_config(),
|
||||
config,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
maybe_lockfile
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn npm_resolver(
|
||||
&self,
|
||||
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
|
||||
|
@ -406,25 +353,26 @@ impl CliFactory {
|
|||
.npm_resolver
|
||||
.get_or_try_init_async(async {
|
||||
let fs = self.fs();
|
||||
let cli_options = self.cli_options()?;
|
||||
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory.
|
||||
create_cli_npm_resolver(if self.options.use_byonm() && !matches!(self.options.sub_command(), DenoSubcommand::Install(_)) {
|
||||
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_)) {
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
root_node_modules_dir: match self.options.node_modules_dir_path() {
|
||||
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() {
|
||||
Some(node_modules_path) => node_modules_path.to_path_buf(),
|
||||
// path needs to be canonicalized for node resolution
|
||||
// (node_modules_dir_path above is already canonicalized)
|
||||
None => canonicalize_path_maybe_not_exists(self.options.initial_cwd())?
|
||||
None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())?
|
||||
.join("node_modules"),
|
||||
},
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
|
||||
snapshot: match self.options.resolve_npm_resolution_snapshot()? {
|
||||
snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
|
||||
Some(snapshot) => {
|
||||
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot))
|
||||
}
|
||||
None => match self.maybe_lockfile().as_ref() {
|
||||
None => match cli_options.maybe_lockfile() {
|
||||
Some(lockfile) => {
|
||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
|
||||
lockfile.clone(),
|
||||
|
@ -433,46 +381,74 @@ impl CliFactory {
|
|||
None => CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
},
|
||||
},
|
||||
maybe_lockfile: self.maybe_lockfile().as_ref().cloned(),
|
||||
maybe_lockfile: cli_options.maybe_lockfile().cloned(),
|
||||
fs: fs.clone(),
|
||||
http_client_provider: self.http_client_provider().clone(),
|
||||
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(),
|
||||
cache_setting: self.options.cache_setting(),
|
||||
cache_setting: cli_options.cache_setting(),
|
||||
text_only_progress_bar: self.text_only_progress_bar().clone(),
|
||||
maybe_node_modules_path: self.options.node_modules_dir_path().cloned(),
|
||||
package_json_deps_provider:
|
||||
self.package_json_deps_provider().clone(),
|
||||
npm_system_info: self.options.npm_system_info(),
|
||||
npmrc: self.options.npmrc().clone()
|
||||
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace(
|
||||
cli_options.workspace(),
|
||||
)),
|
||||
npm_system_info: cli_options.npm_system_info(),
|
||||
npmrc: cli_options.npmrc().clone(),
|
||||
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
|
||||
})
|
||||
}).await
|
||||
}.boxed_local())
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn package_json_deps_provider(&self) -> &Arc<PackageJsonDepsProvider> {
|
||||
self.services.package_json_deps_provider.get_or_init(|| {
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
self.options.maybe_package_json_deps(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn maybe_import_map(
|
||||
pub fn sloppy_imports_resolver(
|
||||
&self,
|
||||
) -> Result<&Option<Arc<ImportMap>>, AnyError> {
|
||||
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.maybe_import_map
|
||||
.get_or_try_init_async(async {
|
||||
.sloppy_imports_resolver
|
||||
.get_or_try_init(|| {
|
||||
Ok(
|
||||
self
|
||||
.options
|
||||
.resolve_import_map(self.file_fetcher()?)
|
||||
.await?
|
||||
.map(Arc::new),
|
||||
.cli_options()?
|
||||
.unstable_sloppy_imports()
|
||||
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))),
|
||||
)
|
||||
})
|
||||
.map(|maybe| maybe.as_ref())
|
||||
}
|
||||
|
||||
pub async fn workspace_resolver(
|
||||
&self,
|
||||
) -> Result<&Arc<WorkspaceResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.workspace_resolver
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
let resolver = cli_options
|
||||
.create_workspace_resolver(
|
||||
self.file_fetcher()?,
|
||||
if cli_options.use_byonm() {
|
||||
PackageJsonDepResolution::Disabled
|
||||
} else {
|
||||
// todo(dsherret): this should be false for nodeModulesDir: true
|
||||
PackageJsonDepResolution::Enabled
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
if !resolver.diagnostics().is_empty() {
|
||||
warn!(
|
||||
"Import map diagnostics:\n{}",
|
||||
resolver
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.map(|d| format!(" - {d}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
Ok(Arc::new(resolver))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
|
@ -482,29 +458,22 @@ impl CliFactory {
|
|||
.resolver
|
||||
.get_or_try_init_async(
|
||||
async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
|
||||
sloppy_imports_resolver: if self.options.unstable_sloppy_imports() {
|
||||
Some(SloppyImportsResolver::new(self.fs().clone()))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
|
||||
node_resolver: Some(self.cli_node_resolver().await?.clone()),
|
||||
npm_resolver: if self.options.no_npm() {
|
||||
npm_resolver: if cli_options.no_npm() {
|
||||
None
|
||||
} else {
|
||||
Some(self.npm_resolver().await?.clone())
|
||||
},
|
||||
package_json_deps_provider: self
|
||||
.package_json_deps_provider()
|
||||
.clone(),
|
||||
maybe_jsx_import_source_config: self
|
||||
.options
|
||||
.to_maybe_jsx_import_source_config()?,
|
||||
maybe_import_map: self.maybe_import_map().await?.clone(),
|
||||
maybe_vendor_dir: self.options.vendor_dir_path(),
|
||||
bare_node_builtins_enabled: self
|
||||
.options
|
||||
workspace_resolver: self.workspace_resolver().await?.clone(),
|
||||
bare_node_builtins_enabled: cli_options
|
||||
.unstable_bare_node_builtins(),
|
||||
maybe_jsx_import_source_config: cli_options
|
||||
.workspace()
|
||||
.to_maybe_jsx_import_source_config()?,
|
||||
maybe_vendor_dir: cli_options.vendor_dir_path(),
|
||||
})))
|
||||
}
|
||||
.boxed_local(),
|
||||
|
@ -552,9 +521,9 @@ impl CliFactory {
|
|||
|
||||
pub fn emitter(&self) -> Result<&Arc<Emitter>, AnyError> {
|
||||
self.services.emitter.get_or_try_init(|| {
|
||||
let ts_config_result = self
|
||||
.options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||
let cli_options = self.cli_options()?;
|
||||
let ts_config_result =
|
||||
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
warn!("{}", ignored_options);
|
||||
}
|
||||
|
@ -571,6 +540,13 @@ impl CliFactory {
|
|||
})
|
||||
}
|
||||
|
||||
pub async fn lint_rule_provider(&self) -> Result<LintRuleProvider, AnyError> {
|
||||
Ok(LintRuleProvider::new(
|
||||
self.sloppy_imports_resolver()?.cloned(),
|
||||
Some(self.workspace_resolver().await?.clone()),
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn node_resolver(&self) -> Result<&Arc<NodeResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
|
@ -578,7 +554,7 @@ impl CliFactory {
|
|||
.get_or_try_init_async(
|
||||
async {
|
||||
Ok(Arc::new(NodeResolver::new(
|
||||
self.fs().clone(),
|
||||
DenoFsNodeResolverEnv::new(self.fs().clone()),
|
||||
self.npm_resolver().await?.clone().into_npm_resolver(),
|
||||
)))
|
||||
}
|
||||
|
@ -602,7 +578,7 @@ impl CliFactory {
|
|||
|
||||
Ok(Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_analyzer,
|
||||
self.fs().clone(),
|
||||
DenoFsNodeResolverEnv::new(self.fs().clone()),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone().into_npm_resolver(),
|
||||
)))
|
||||
|
@ -615,9 +591,10 @@ impl CliFactory {
|
|||
.services
|
||||
.type_checker
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(TypeChecker::new(
|
||||
self.caches()?.clone(),
|
||||
self.options.clone(),
|
||||
cli_options.clone(),
|
||||
self.module_graph_builder().await?.clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
|
@ -633,15 +610,16 @@ impl CliFactory {
|
|||
.services
|
||||
.module_graph_builder
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(ModuleGraphBuilder::new(
|
||||
self.options.clone(),
|
||||
cli_options.clone(),
|
||||
self.caches()?.clone(),
|
||||
self.fs().clone(),
|
||||
self.resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
self.module_info_cache()?.clone(),
|
||||
self.parsed_source_cache().clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.emit_cache()?.clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
|
@ -658,8 +636,9 @@ impl CliFactory {
|
|||
.services
|
||||
.module_graph_creator
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(ModuleGraphCreator::new(
|
||||
self.options.clone(),
|
||||
cli_options.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
self.module_graph_builder().await?.clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
|
@ -676,7 +655,7 @@ impl CliFactory {
|
|||
.main_graph_container
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(MainModuleGraphContainer::new(
|
||||
self.cli_options().clone(),
|
||||
self.cli_options()?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
)))
|
||||
})
|
||||
|
@ -687,7 +666,8 @@ impl CliFactory {
|
|||
&self,
|
||||
) -> Result<&Option<Arc<InspectorServer>>, AnyError> {
|
||||
self.services.maybe_inspector_server.get_or_try_init(|| {
|
||||
match self.options.resolve_inspector_server() {
|
||||
let cli_options = self.cli_options()?;
|
||||
match cli_options.resolve_inspector_server() {
|
||||
Ok(server) => Ok(server.map(Arc::new)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
|
@ -701,9 +681,10 @@ impl CliFactory {
|
|||
.services
|
||||
.module_load_preparer
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(ModuleLoadPreparer::new(
|
||||
self.options.clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
cli_options.clone(),
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
self.module_graph_builder().await?.clone(),
|
||||
self.text_only_progress_bar().clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
|
@ -724,7 +705,7 @@ impl CliFactory {
|
|||
.cli_node_resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(CliNodeResolver::new(
|
||||
Some(self.cjs_resolutions().clone()),
|
||||
self.cjs_resolutions().clone(),
|
||||
self.fs().clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
|
@ -733,61 +714,64 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
pub fn feature_checker(&self) -> &Arc<FeatureChecker> {
|
||||
self.services.feature_checker.get_or_init(|| {
|
||||
pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> {
|
||||
self.services.feature_checker.get_or_try_init(|| {
|
||||
let cli_options = self.cli_options()?;
|
||||
let mut checker = FeatureChecker::default();
|
||||
checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
|
||||
checker.set_warn_cb(Box::new(crate::unstable_warn_cb));
|
||||
if self.options.legacy_unstable_flag() {
|
||||
if cli_options.legacy_unstable_flag() {
|
||||
checker.enable_legacy_unstable();
|
||||
checker.warn_on_legacy_unstable();
|
||||
}
|
||||
let unstable_features = self.options.unstable_features();
|
||||
let unstable_features = cli_options.unstable_features();
|
||||
for (flag_name, _, _) in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if unstable_features.contains(&flag_name.to_string()) {
|
||||
checker.enable_feature(flag_name);
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(checker)
|
||||
Ok(Arc::new(checker))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_compile_binary_writer(
|
||||
&self,
|
||||
) -> Result<DenoCompileBinaryWriter, AnyError> {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.deno_dir()?,
|
||||
self.file_fetcher()?,
|
||||
self.http_client_provider(),
|
||||
self.npm_resolver().await?.as_ref(),
|
||||
self.options.npm_system_info(),
|
||||
self.package_json_deps_provider(),
|
||||
self.workspace_resolver().await?.as_ref(),
|
||||
cli_options.npm_system_info(),
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn create_cli_main_worker_factory(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerFactory, AnyError> {
|
||||
let cli_options = self.cli_options()?;
|
||||
let node_resolver = self.node_resolver().await?;
|
||||
let npm_resolver = self.npm_resolver().await?;
|
||||
let fs = self.fs();
|
||||
let cli_node_resolver = self.cli_node_resolver().await?;
|
||||
let maybe_file_watcher_communicator = if self.options.has_hmr() {
|
||||
let maybe_file_watcher_communicator = if cli_options.has_hmr() {
|
||||
Some(self.watcher_communicator.clone().unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(CliMainWorkerFactory::new(
|
||||
StorageKeyResolver::from_options(&self.options),
|
||||
self.options.sub_command().clone(),
|
||||
StorageKeyResolver::from_options(cli_options),
|
||||
cli_options.sub_command().clone(),
|
||||
npm_resolver.clone(),
|
||||
node_resolver.clone(),
|
||||
self.blob_store().clone(),
|
||||
Box::new(CliModuleLoaderFactory::new(
|
||||
&self.options,
|
||||
if self.options.code_cache_enabled() {
|
||||
cli_options,
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -809,18 +793,18 @@ impl CliFactory {
|
|||
self.fs().clone(),
|
||||
maybe_file_watcher_communicator,
|
||||
self.maybe_inspector_server()?.clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
self.feature_checker().clone(),
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
self.feature_checker()?.clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
self.options.node_ipc_fd(),
|
||||
self.options.serve_port(),
|
||||
self.options.serve_host(),
|
||||
self.options.enable_future_features(),
|
||||
cli_options.node_ipc_fd(),
|
||||
cli_options.serve_port(),
|
||||
cli_options.serve_host(),
|
||||
cli_options.enable_future_features(),
|
||||
// TODO(bartlomieju): temporarily disabled
|
||||
// self.options.disable_deprecated_api_warning,
|
||||
// cli_options.disable_deprecated_api_warning,
|
||||
true,
|
||||
self.options.verbose_deprecated_api_warning,
|
||||
if self.options.code_cache_enabled() {
|
||||
cli_options.verbose_deprecated_api_warning,
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -831,7 +815,8 @@ impl CliFactory {
|
|||
fn create_cli_main_worker_options(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerOptions, AnyError> {
|
||||
let create_hmr_runner = if self.options.has_hmr() {
|
||||
let cli_options = self.cli_options()?;
|
||||
let create_hmr_runner = if cli_options.has_hmr() {
|
||||
let watcher_communicator = self.watcher_communicator.clone().unwrap();
|
||||
let emitter = self.emitter()?.clone();
|
||||
let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| {
|
||||
|
@ -846,7 +831,7 @@ impl CliFactory {
|
|||
None
|
||||
};
|
||||
let create_coverage_collector =
|
||||
if let Some(coverage_dir) = self.options.coverage_dir() {
|
||||
if let Some(coverage_dir) = cli_options.coverage_dir() {
|
||||
let coverage_dir = PathBuf::from(coverage_dir);
|
||||
let fn_: crate::worker::CreateCoverageCollectorCb =
|
||||
Box::new(move |session| {
|
||||
|
@ -858,37 +843,34 @@ impl CliFactory {
|
|||
};
|
||||
|
||||
Ok(CliMainWorkerOptions {
|
||||
argv: self.options.argv().clone(),
|
||||
argv: cli_options.argv().clone(),
|
||||
// This optimization is only available for "run" subcommand
|
||||
// because we need to register new ops for testing and jupyter
|
||||
// integration.
|
||||
skip_op_registration: self.options.sub_command().is_run(),
|
||||
log_level: self.options.log_level().unwrap_or(log::Level::Info).into(),
|
||||
enable_op_summary_metrics: self.options.enable_op_summary_metrics(),
|
||||
enable_testing_features: self.options.enable_testing_features(),
|
||||
has_node_modules_dir: self.options.has_node_modules_dir(),
|
||||
hmr: self.options.has_hmr(),
|
||||
inspect_brk: self.options.inspect_brk().is_some(),
|
||||
inspect_wait: self.options.inspect_wait().is_some(),
|
||||
strace_ops: self.options.strace_ops().clone(),
|
||||
is_inspecting: self.options.is_inspecting(),
|
||||
is_npm_main: self.options.is_npm_main(),
|
||||
location: self.options.location_flag().clone(),
|
||||
skip_op_registration: cli_options.sub_command().is_run(),
|
||||
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
|
||||
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
|
||||
enable_testing_features: cli_options.enable_testing_features(),
|
||||
has_node_modules_dir: cli_options.has_node_modules_dir(),
|
||||
hmr: cli_options.has_hmr(),
|
||||
inspect_brk: cli_options.inspect_brk().is_some(),
|
||||
inspect_wait: cli_options.inspect_wait().is_some(),
|
||||
strace_ops: cli_options.strace_ops().clone(),
|
||||
is_inspecting: cli_options.is_inspecting(),
|
||||
is_npm_main: cli_options.is_npm_main(),
|
||||
location: cli_options.location_flag().clone(),
|
||||
// if the user ran a binary command, we'll need to set process.argv[0]
|
||||
// to be the name of the binary command instead of deno
|
||||
argv0: self
|
||||
.options
|
||||
argv0: cli_options
|
||||
.take_binary_npm_command_name()
|
||||
.or(std::env::args().next()),
|
||||
node_debug: std::env::var("NODE_DEBUG").ok(),
|
||||
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
|
||||
seed: self.options.seed(),
|
||||
unsafely_ignore_certificate_errors: self
|
||||
.options
|
||||
seed: cli_options.seed(),
|
||||
unsafely_ignore_certificate_errors: cli_options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
unstable: self.options.legacy_unstable_flag(),
|
||||
maybe_root_package_json_deps: self.options.maybe_package_json_deps(),
|
||||
unstable: cli_options.legacy_unstable_flag(),
|
||||
create_hmr_runner,
|
||||
create_coverage_collector,
|
||||
})
|
||||
|
|
|
@ -98,7 +98,7 @@ impl MainModuleGraphContainer {
|
|||
&self,
|
||||
files: &[String],
|
||||
) -> Result<Vec<ModuleSpecifier>, AnyError> {
|
||||
let excludes = self.cli_options.resolve_config_excludes()?;
|
||||
let excludes = self.cli_options.workspace().resolve_config_excludes()?;
|
||||
Ok(
|
||||
files
|
||||
.iter()
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::config_to_deno_graph_workspace_member;
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||
use crate::cache;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
|
@ -18,12 +19,13 @@ use crate::tools::check;
|
|||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_emit::LoaderChecksum;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -240,12 +242,12 @@ impl ModuleGraphCreator {
|
|||
|
||||
pub async fn create_and_validate_publish_graph(
|
||||
&self,
|
||||
packages: &[WorkspaceMemberConfig],
|
||||
package_configs: &[JsrPackageConfig],
|
||||
build_fast_check_graph: bool,
|
||||
) -> Result<ModuleGraph, AnyError> {
|
||||
let mut roots = Vec::new();
|
||||
for package in packages {
|
||||
roots.extend(package.config_file.resolve_export_value_urls()?);
|
||||
for package_config in package_configs {
|
||||
roots.extend(package_config.config_file.resolve_export_value_urls()?);
|
||||
}
|
||||
let mut graph = self
|
||||
.create_graph_with_options(CreateGraphOptions {
|
||||
|
@ -260,10 +262,16 @@ impl ModuleGraphCreator {
|
|||
self.type_check_graph(graph.clone()).await?;
|
||||
}
|
||||
if build_fast_check_graph {
|
||||
let fast_check_workspace_members = package_configs
|
||||
.iter()
|
||||
.map(|p| config_to_deno_graph_workspace_member(&p.config_file))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
self.module_graph_builder.build_fast_check_graph(
|
||||
&mut graph,
|
||||
BuildFastCheckGraphOptions {
|
||||
workspace_fast_check: true,
|
||||
workspace_fast_check: WorkspaceFastCheckOption::Enabled(
|
||||
&fast_check_workspace_members,
|
||||
),
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
@ -340,10 +348,10 @@ impl ModuleGraphCreator {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct BuildFastCheckGraphOptions {
|
||||
pub struct BuildFastCheckGraphOptions<'a> {
|
||||
/// Whether to do fast check on workspace members. This
|
||||
/// is mostly only useful when publishing.
|
||||
pub workspace_fast_check: bool,
|
||||
pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
|
||||
}
|
||||
|
||||
pub struct ModuleGraphBuilder {
|
||||
|
@ -354,7 +362,7 @@ pub struct ModuleGraphBuilder {
|
|||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
|
@ -371,7 +379,7 @@ impl ModuleGraphBuilder {
|
|||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
|
@ -412,7 +420,7 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
struct LockfileLocker<'a>(&'a Mutex<Lockfile>);
|
||||
struct LockfileLocker<'a>(&'a CliLockfile);
|
||||
|
||||
impl<'a> deno_graph::source::Locker for LockfileLocker<'a> {
|
||||
fn get_remote_checksum(
|
||||
|
@ -473,7 +481,11 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let maybe_imports = if options.graph_kind.include_types() {
|
||||
self.options.to_compiler_option_types()?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let analyzer = self
|
||||
.module_info_cache
|
||||
.as_module_analyzer(&self.parsed_source_cache);
|
||||
|
@ -622,7 +634,10 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
|
||||
log::debug!("Building fast check graph");
|
||||
let fast_check_cache = if !options.workspace_fast_check {
|
||||
let fast_check_cache = if matches!(
|
||||
options.workspace_fast_check,
|
||||
deno_graph::WorkspaceFastCheckOption::Disabled
|
||||
) {
|
||||
Some(cache::FastCheckCache::new(self.caches.fast_check_db()))
|
||||
} else {
|
||||
None
|
||||
|
@ -631,11 +646,6 @@ impl ModuleGraphBuilder {
|
|||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
let workspace_members = if options.workspace_fast_check {
|
||||
Some(self.options.resolve_deno_graph_workspace_members()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
graph.build_fast_check_type_graph(
|
||||
deno_graph::BuildFastCheckTypeGraphOptions {
|
||||
|
@ -645,11 +655,7 @@ impl ModuleGraphBuilder {
|
|||
module_parser: Some(&parser),
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(&graph_npm_resolver),
|
||||
workspace_fast_check: if let Some(members) = &workspace_members {
|
||||
deno_graph::WorkspaceFastCheckOption::Enabled(members)
|
||||
} else {
|
||||
deno_graph::WorkspaceFastCheckOption::Disabled
|
||||
},
|
||||
workspace_fast_check: options.workspace_fast_check,
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
|
@ -744,8 +750,8 @@ fn enhanced_sloppy_imports_error_message(
|
|||
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
|
||||
| ModuleError::Missing(specifier, _) => {
|
||||
let additional_message = SloppyImportsResolver::new(fs.clone())
|
||||
.resolve(specifier, ResolutionMode::Execution)
|
||||
.as_suggestion_message()?;
|
||||
.resolve(specifier, ResolutionMode::Execution)?
|
||||
.as_suggestion_message();
|
||||
Some(format!(
|
||||
"{} {} or run with --unstable-sloppy-imports",
|
||||
error,
|
||||
|
@ -843,7 +849,7 @@ fn get_resolution_error_bare_specifier(
|
|||
error: &ResolutionError,
|
||||
) -> Option<&str> {
|
||||
if let ResolutionError::InvalidSpecifier {
|
||||
error: SpecifierError::ImportPrefixMissing(specifier, _),
|
||||
error: SpecifierError::ImportPrefixMissing { specifier, .. },
|
||||
..
|
||||
} = error
|
||||
{
|
||||
|
@ -1065,7 +1071,10 @@ mod test {
|
|||
start: Position::zeroed(),
|
||||
end: Position::zeroed(),
|
||||
},
|
||||
error: SpecifierError::ImportPrefixMissing(input.to_string(), None),
|
||||
error: SpecifierError::ImportPrefixMissing {
|
||||
specifier: input.to_string(),
|
||||
referrer: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(get_resolution_error_bare_node_specifier(&err), output,);
|
||||
}
|
||||
|
|
240
cli/http_util.rs
240
cli/http_util.rs
|
@ -12,18 +12,22 @@ use deno_core::error::generic_error;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_fetch;
|
||||
use deno_runtime::deno_fetch::create_http_client;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fetch::reqwest::header::HeaderName;
|
||||
use deno_runtime::deno_fetch::reqwest::header::HeaderValue;
|
||||
use deno_runtime::deno_fetch::reqwest::header::ACCEPT;
|
||||
use deno_runtime::deno_fetch::reqwest::header::AUTHORIZATION;
|
||||
use deno_runtime::deno_fetch::reqwest::header::IF_NONE_MATCH;
|
||||
use deno_runtime::deno_fetch::reqwest::header::LOCATION;
|
||||
use deno_runtime::deno_fetch::reqwest::StatusCode;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use http::header::HeaderName;
|
||||
use http::header::HeaderValue;
|
||||
use http::header::ACCEPT;
|
||||
use http::header::AUTHORIZATION;
|
||||
use http::header::IF_NONE_MATCH;
|
||||
use http::header::LOCATION;
|
||||
use http::StatusCode;
|
||||
use http_body_util::BodyExt;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::thread::ThreadId;
|
||||
|
@ -208,8 +212,7 @@ pub struct HttpClientProvider {
|
|||
// it's not safe to share a reqwest::Client across tokio runtimes,
|
||||
// so we store these Clients keyed by thread id
|
||||
// https://github.com/seanmonstar/reqwest/issues/1148#issuecomment-910868788
|
||||
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
|
||||
clients_by_thread_id: Mutex<HashMap<ThreadId, reqwest::Client>>,
|
||||
clients_by_thread_id: Mutex<HashMap<ThreadId, deno_fetch::Client>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for HttpClientProvider {
|
||||
|
@ -270,9 +273,15 @@ pub struct BadResponseError {
|
|||
#[derive(Debug, Error)]
|
||||
pub enum DownloadError {
|
||||
#[error(transparent)]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
Fetch(AnyError),
|
||||
#[error(transparent)]
|
||||
ToStr(#[from] reqwest::header::ToStrError),
|
||||
UrlParse(#[from] deno_core::url::ParseError),
|
||||
#[error(transparent)]
|
||||
HttpParse(#[from] http::Error),
|
||||
#[error(transparent)]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error(transparent)]
|
||||
ToStr(#[from] http::header::ToStrError),
|
||||
#[error("Redirection from '{}' did not provide location header", .request_url)]
|
||||
NoRedirectHeader { request_url: Url },
|
||||
#[error("Too many redirects.")]
|
||||
|
@ -283,8 +292,7 @@ pub enum DownloadError {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct HttpClient {
|
||||
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
|
||||
client: reqwest::Client,
|
||||
client: deno_fetch::Client,
|
||||
// don't allow sending this across threads because then
|
||||
// it might be shared accidentally across tokio runtimes
|
||||
// which will cause issues
|
||||
|
@ -295,22 +303,56 @@ pub struct HttpClient {
|
|||
impl HttpClient {
|
||||
// DO NOT make this public. You should always be creating one of these from
|
||||
// the HttpClientProvider
|
||||
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
|
||||
fn new(client: reqwest::Client) -> Self {
|
||||
fn new(client: deno_fetch::Client) -> Self {
|
||||
Self {
|
||||
client,
|
||||
_unsend_marker: deno_core::unsync::UnsendMarker::default(),
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): don't expose `reqwest::RequestBuilder` because it
|
||||
// is `Sync` and could accidentally be shared with multiple tokio runtimes
|
||||
pub fn get(&self, url: impl reqwest::IntoUrl) -> reqwest::RequestBuilder {
|
||||
self.client.get(url)
|
||||
pub fn get(&self, url: Url) -> Result<RequestBuilder, http::Error> {
|
||||
let body = http_body_util::Empty::new()
|
||||
.map_err(|never| match never {})
|
||||
.boxed();
|
||||
let mut req = http::Request::new(body);
|
||||
*req.uri_mut() = url.as_str().parse()?;
|
||||
Ok(RequestBuilder {
|
||||
client: self.client.clone(),
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn post(&self, url: impl reqwest::IntoUrl) -> reqwest::RequestBuilder {
|
||||
self.client.post(url)
|
||||
pub fn post(
|
||||
&self,
|
||||
url: Url,
|
||||
body: deno_fetch::ReqBody,
|
||||
) -> Result<RequestBuilder, http::Error> {
|
||||
let mut req = http::Request::new(body);
|
||||
*req.method_mut() = http::Method::POST;
|
||||
*req.uri_mut() = url.as_str().parse()?;
|
||||
Ok(RequestBuilder {
|
||||
client: self.client.clone(),
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn post_json<S>(
|
||||
&self,
|
||||
url: Url,
|
||||
ser: &S,
|
||||
) -> Result<RequestBuilder, DownloadError>
|
||||
where
|
||||
S: serde::Serialize,
|
||||
{
|
||||
let json = deno_core::serde_json::to_vec(ser)?;
|
||||
let body = http_body_util::Full::new(json.into())
|
||||
.map_err(|never| match never {})
|
||||
.boxed();
|
||||
let builder = self.post(url, body)?;
|
||||
Ok(builder.header(
|
||||
http::header::CONTENT_TYPE,
|
||||
"application/json".parse().map_err(http::Error::from)?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Asynchronously fetches the given HTTP URL one pass only.
|
||||
|
@ -322,27 +364,35 @@ impl HttpClient {
|
|||
&self,
|
||||
args: FetchOnceArgs<'a>,
|
||||
) -> Result<FetchOnceResult, AnyError> {
|
||||
let mut request = self.client.get(args.url.clone());
|
||||
let body = http_body_util::Empty::new()
|
||||
.map_err(|never| match never {})
|
||||
.boxed();
|
||||
let mut request = http::Request::new(body);
|
||||
*request.uri_mut() = args.url.as_str().parse()?;
|
||||
|
||||
if let Some(etag) = args.maybe_etag {
|
||||
let if_none_match_val = HeaderValue::from_str(&etag)?;
|
||||
request = request.header(IF_NONE_MATCH, if_none_match_val);
|
||||
request
|
||||
.headers_mut()
|
||||
.insert(IF_NONE_MATCH, if_none_match_val);
|
||||
}
|
||||
if let Some(auth_token) = args.maybe_auth_token {
|
||||
let authorization_val = HeaderValue::from_str(&auth_token.to_string())?;
|
||||
request = request.header(AUTHORIZATION, authorization_val);
|
||||
request
|
||||
.headers_mut()
|
||||
.insert(AUTHORIZATION, authorization_val);
|
||||
}
|
||||
if let Some(accept) = args.maybe_accept {
|
||||
let accepts_val = HeaderValue::from_str(&accept)?;
|
||||
request = request.header(ACCEPT, accepts_val);
|
||||
request.headers_mut().insert(ACCEPT, accepts_val);
|
||||
}
|
||||
let response = match request.send().await {
|
||||
let response = match self.client.clone().send(request).await {
|
||||
Ok(resp) => resp,
|
||||
Err(err) => {
|
||||
if err.is_connect() || err.is_timeout() {
|
||||
if is_error_connect(&err) {
|
||||
return Ok(FetchOnceResult::RequestError(err.to_string()));
|
||||
}
|
||||
return Err(err.into());
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -406,18 +456,12 @@ impl HttpClient {
|
|||
Ok(FetchOnceResult::Code(body, result_headers))
|
||||
}
|
||||
|
||||
pub async fn download_text(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
) -> Result<String, AnyError> {
|
||||
pub async fn download_text(&self, url: Url) -> Result<String, AnyError> {
|
||||
let bytes = self.download(url).await?;
|
||||
Ok(String::from_utf8(bytes)?)
|
||||
}
|
||||
|
||||
pub async fn download(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
pub async fn download(&self, url: Url) -> Result<Vec<u8>, AnyError> {
|
||||
let maybe_bytes = self.download_inner(url, None, None).await?;
|
||||
match maybe_bytes {
|
||||
Some(bytes) => Ok(bytes),
|
||||
|
@ -427,7 +471,7 @@ impl HttpClient {
|
|||
|
||||
pub async fn download_with_progress(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
url: Url,
|
||||
maybe_header: Option<(HeaderName, HeaderValue)>,
|
||||
progress_guard: &UpdateGuard,
|
||||
) -> Result<Option<Vec<u8>>, DownloadError> {
|
||||
|
@ -438,26 +482,26 @@ impl HttpClient {
|
|||
|
||||
pub async fn get_redirected_url(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
url: Url,
|
||||
maybe_header: Option<(HeaderName, HeaderValue)>,
|
||||
) -> Result<Url, AnyError> {
|
||||
let response = self.get_redirected_response(url, maybe_header).await?;
|
||||
Ok(response.url().clone())
|
||||
let (_, url) = self.get_redirected_response(url, maybe_header).await?;
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
async fn download_inner(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
url: Url,
|
||||
maybe_header: Option<(HeaderName, HeaderValue)>,
|
||||
progress_guard: Option<&UpdateGuard>,
|
||||
) -> Result<Option<Vec<u8>>, DownloadError> {
|
||||
let response = self.get_redirected_response(url, maybe_header).await?;
|
||||
let (response, _) = self.get_redirected_response(url, maybe_header).await?;
|
||||
|
||||
if response.status() == 404 {
|
||||
return Ok(None);
|
||||
} else if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let maybe_response_text = response.text().await.ok();
|
||||
let maybe_response_text = body_to_string(response).await.ok();
|
||||
return Err(DownloadError::BadResponse(BadResponseError {
|
||||
status_code: status,
|
||||
response_text: maybe_response_text
|
||||
|
@ -469,60 +513,77 @@ impl HttpClient {
|
|||
get_response_body_with_progress(response, progress_guard)
|
||||
.await
|
||||
.map(Some)
|
||||
.map_err(Into::into)
|
||||
.map_err(DownloadError::Fetch)
|
||||
}
|
||||
|
||||
async fn get_redirected_response(
|
||||
&self,
|
||||
url: impl reqwest::IntoUrl,
|
||||
mut url: Url,
|
||||
mut maybe_header: Option<(HeaderName, HeaderValue)>,
|
||||
) -> Result<reqwest::Response, DownloadError> {
|
||||
let mut url = url.into_url()?;
|
||||
let mut builder = self.get(url.clone());
|
||||
) -> Result<(http::Response<deno_fetch::ResBody>, Url), DownloadError> {
|
||||
let mut req = self.get(url.clone())?.build();
|
||||
if let Some((header_name, header_value)) = maybe_header.as_ref() {
|
||||
builder = builder.header(header_name, header_value);
|
||||
req.headers_mut().append(header_name, header_value.clone());
|
||||
}
|
||||
let mut response = builder.send().await?;
|
||||
let mut response = self
|
||||
.client
|
||||
.clone()
|
||||
.send(req)
|
||||
.await
|
||||
.map_err(DownloadError::Fetch)?;
|
||||
let status = response.status();
|
||||
if status.is_redirection() {
|
||||
for _ in 0..5 {
|
||||
let new_url = resolve_redirect_from_response(&url, &response)?;
|
||||
let mut builder = self.get(new_url.clone());
|
||||
let mut req = self.get(new_url.clone())?.build();
|
||||
|
||||
if new_url.origin() == url.origin() {
|
||||
if let Some((header_name, header_value)) = maybe_header.as_ref() {
|
||||
builder = builder.header(header_name, header_value);
|
||||
req.headers_mut().append(header_name, header_value.clone());
|
||||
}
|
||||
} else {
|
||||
maybe_header = None;
|
||||
}
|
||||
|
||||
let new_response = builder.send().await?;
|
||||
let new_response = self
|
||||
.client
|
||||
.clone()
|
||||
.send(req)
|
||||
.await
|
||||
.map_err(DownloadError::Fetch)?;
|
||||
let status = new_response.status();
|
||||
if status.is_redirection() {
|
||||
response = new_response;
|
||||
url = new_url;
|
||||
} else {
|
||||
return Ok(new_response);
|
||||
return Ok((new_response, new_url));
|
||||
}
|
||||
}
|
||||
Err(DownloadError::TooManyRedirects)
|
||||
} else {
|
||||
Ok(response)
|
||||
Ok((response, url))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_error_connect(err: &AnyError) -> bool {
|
||||
err
|
||||
.downcast_ref::<hyper_util::client::legacy::Error>()
|
||||
.map(|err| err.is_connect())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn get_response_body_with_progress(
|
||||
response: reqwest::Response,
|
||||
response: http::Response<deno_fetch::ResBody>,
|
||||
progress_guard: Option<&UpdateGuard>,
|
||||
) -> Result<Vec<u8>, reqwest::Error> {
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
use http_body::Body as _;
|
||||
if let Some(progress_guard) = progress_guard {
|
||||
if let Some(total_size) = response.content_length() {
|
||||
if let Some(total_size) = response.body().size_hint().exact() {
|
||||
progress_guard.set_total_size(total_size);
|
||||
let mut current_size = 0;
|
||||
let mut data = Vec::with_capacity(total_size as usize);
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut stream = response.into_body().into_data_stream();
|
||||
while let Some(item) = stream.next().await {
|
||||
let bytes = item?;
|
||||
current_size += bytes.len() as u64;
|
||||
|
@ -532,7 +593,7 @@ async fn get_response_body_with_progress(
|
|||
return Ok(data);
|
||||
}
|
||||
}
|
||||
let bytes = response.bytes().await?;
|
||||
let bytes = response.collect().await?.to_bytes();
|
||||
Ok(bytes.into())
|
||||
}
|
||||
|
||||
|
@ -563,9 +624,9 @@ fn resolve_url_from_location(base_url: &Url, location: &str) -> Url {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_redirect_from_response(
|
||||
fn resolve_redirect_from_response<B>(
|
||||
request_url: &Url,
|
||||
response: &reqwest::Response,
|
||||
response: &http::Response<B>,
|
||||
) -> Result<Url, DownloadError> {
|
||||
debug_assert!(response.status().is_redirection());
|
||||
if let Some(location) = response.headers().get(LOCATION) {
|
||||
|
@ -580,6 +641,49 @@ fn resolve_redirect_from_response(
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn body_to_string<B>(body: B) -> Result<String, AnyError>
|
||||
where
|
||||
B: http_body::Body,
|
||||
AnyError: From<B::Error>,
|
||||
{
|
||||
let bytes = body.collect().await?.to_bytes();
|
||||
let s = std::str::from_utf8(&bytes)?;
|
||||
Ok(s.into())
|
||||
}
|
||||
|
||||
pub async fn body_to_json<B, D>(body: B) -> Result<D, AnyError>
|
||||
where
|
||||
B: http_body::Body,
|
||||
AnyError: From<B::Error>,
|
||||
D: serde::de::DeserializeOwned,
|
||||
{
|
||||
let bytes = body.collect().await?.to_bytes();
|
||||
let val = deno_core::serde_json::from_slice(&bytes)?;
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
pub struct RequestBuilder {
|
||||
client: deno_fetch::Client,
|
||||
req: http::Request<deno_fetch::ReqBody>,
|
||||
}
|
||||
|
||||
impl RequestBuilder {
|
||||
pub fn header(mut self, name: HeaderName, value: HeaderValue) -> Self {
|
||||
self.req.headers_mut().append(name, value);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(
|
||||
self,
|
||||
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
|
||||
self.client.send(self.req).await
|
||||
}
|
||||
|
||||
pub fn build(self) -> http::Request<deno_fetch::ReqBody> {
|
||||
self.req
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[cfg(test)]
|
||||
|
@ -600,14 +704,20 @@ mod test {
|
|||
|
||||
// make a request to the redirect server
|
||||
let text = client
|
||||
.download_text("http://localhost:4546/subdir/redirects/redirect1.js")
|
||||
.download_text(
|
||||
Url::parse("http://localhost:4546/subdir/redirects/redirect1.js")
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(text, "export const redirect = 1;\n");
|
||||
|
||||
// now make one to the infinite redirects server
|
||||
let err = client
|
||||
.download_text("http://localhost:4549/subdir/redirects/redirect1.js")
|
||||
.download_text(
|
||||
Url::parse("http://localhost:4549/subdir/redirects/redirect1.js")
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.err()
|
||||
.unwrap();
|
||||
|
|
|
@ -2,17 +2,17 @@
|
|||
|
||||
use log::debug;
|
||||
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
static CLI_SNAPSHOT: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
|
||||
|
||||
pub fn deno_isolate_init() -> Option<&'static [u8]> {
|
||||
debug!("Deno isolate init with snapshots.");
|
||||
#[cfg(not(feature = "__runtime_js_sources"))]
|
||||
#[cfg(not(feature = "hmr"))]
|
||||
{
|
||||
Some(CLI_SNAPSHOT)
|
||||
}
|
||||
#[cfg(feature = "__runtime_js_sources")]
|
||||
#[cfg(feature = "hmr")]
|
||||
{
|
||||
None
|
||||
}
|
||||
|
|
|
@ -337,7 +337,14 @@ async function formatInner(obj, raw) {
|
|||
internals.jupyter = { formatInner };
|
||||
|
||||
function enableJupyter() {
|
||||
const { op_jupyter_broadcast } = core.ops;
|
||||
const { op_jupyter_broadcast, op_jupyter_input } = core.ops;
|
||||
|
||||
function input(
|
||||
prompt,
|
||||
password,
|
||||
) {
|
||||
return op_jupyter_input(prompt, password);
|
||||
}
|
||||
|
||||
async function broadcast(
|
||||
msgType,
|
||||
|
@ -412,6 +419,45 @@ function enableJupyter() {
|
|||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt for user confirmation (in Jupyter Notebook context)
|
||||
* Override confirm and prompt because they depend on a tty
|
||||
* and in the Deno.jupyter environment that doesn't exist.
|
||||
* @param {string} message - The message to display.
|
||||
* @returns {Promise<boolean>} User confirmation.
|
||||
*/
|
||||
function confirm(message = "Confirm") {
|
||||
const answer = input(`${message} [y/N] `, false);
|
||||
return answer === "Y" || answer === "y";
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt for user input (in Jupyter Notebook context)
|
||||
* @param {string} message - The message to display.
|
||||
* @param {string} defaultValue - The value used if none is provided.
|
||||
* @param {object} options Options
|
||||
* @param {boolean} options.password Hide the output characters
|
||||
* @returns {Promise<string>} The user input.
|
||||
*/
|
||||
function prompt(
|
||||
message = "Prompt",
|
||||
defaultValue = "",
|
||||
{ password = false } = {},
|
||||
) {
|
||||
if (defaultValue != "") {
|
||||
message += ` [${defaultValue}]`;
|
||||
}
|
||||
const answer = input(`${message}`, password);
|
||||
|
||||
if (answer === "") {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return answer;
|
||||
}
|
||||
|
||||
globalThis.confirm = confirm;
|
||||
globalThis.prompt = prompt;
|
||||
globalThis.Deno.jupyter = {
|
||||
broadcast,
|
||||
display,
|
||||
|
|
|
@ -8,8 +8,8 @@ use super::resolver::LspResolver;
|
|||
use super::tsc;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::tools::lint::create_linter;
|
||||
use deno_lint::linter::LintConfig;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use deno_lint::diagnostic::LintDiagnosticRange;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
use deno_ast::SourceRange;
|
||||
|
@ -23,9 +23,6 @@ use deno_core::serde::Serialize;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::rules::LintRule;
|
||||
use deno_runtime::deno_node::NpmResolver;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
use deno_semver::jsr::JsrPackageNvReference;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
|
@ -36,6 +33,7 @@ use deno_semver::package::PackageReq;
|
|||
use deno_semver::package::PackageReqReference;
|
||||
use deno_semver::Version;
|
||||
use import_map::ImportMap;
|
||||
use node_resolver::NpmResolver;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::cmp::Ordering;
|
||||
|
@ -73,8 +71,9 @@ static PREFERRED_FIXES: Lazy<HashMap<&'static str, (u32, bool)>> =
|
|||
.collect()
|
||||
});
|
||||
|
||||
static IMPORT_SPECIFIER_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#"\sfrom\s+["']([^"']*)["']"#);
|
||||
static IMPORT_SPECIFIER_RE: Lazy<Regex> = lazy_regex::lazy_regex!(
|
||||
r#"\sfrom\s+["']([^"']*)["']|import\s*\(\s*["']([^"']*)["']\s*\)"#
|
||||
);
|
||||
|
||||
const SUPPORTED_EXTENSIONS: &[&str] = &[
|
||||
".ts", ".tsx", ".js", ".jsx", ".mjs", ".mts", ".cjs", ".cts", ".d.ts",
|
||||
|
@ -148,8 +147,10 @@ impl Reference {
|
|||
}
|
||||
}
|
||||
|
||||
fn as_lsp_range_from_diagnostic(diagnostic: &LintDiagnostic) -> Range {
|
||||
as_lsp_range(diagnostic.range, &diagnostic.text_info)
|
||||
fn as_lsp_range_from_lint_diagnostic(
|
||||
diagnostic_range: &LintDiagnosticRange,
|
||||
) -> Range {
|
||||
as_lsp_range(diagnostic_range.range, &diagnostic_range.text_info)
|
||||
}
|
||||
|
||||
fn as_lsp_range(
|
||||
|
@ -172,37 +173,39 @@ fn as_lsp_range(
|
|||
|
||||
pub fn get_lint_references(
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
lint_rules: Vec<&'static dyn LintRule>,
|
||||
lint_config: LintConfig,
|
||||
linter: &CliLinter,
|
||||
) -> Result<Vec<Reference>, AnyError> {
|
||||
let linter = create_linter(lint_rules);
|
||||
let lint_diagnostics = linter.lint_with_ast(parsed_source, lint_config);
|
||||
let lint_diagnostics = linter.lint_with_ast(parsed_source);
|
||||
|
||||
Ok(
|
||||
lint_diagnostics
|
||||
.into_iter()
|
||||
.map(|d| Reference {
|
||||
range: as_lsp_range_from_diagnostic(&d),
|
||||
category: Category::Lint {
|
||||
message: d.message,
|
||||
code: d.code,
|
||||
hint: d.hint,
|
||||
quick_fixes: d
|
||||
.fixes
|
||||
.into_iter()
|
||||
.map(|f| DataQuickFix {
|
||||
description: f.description.to_string(),
|
||||
changes: f
|
||||
.changes
|
||||
.into_iter()
|
||||
.map(|change| DataQuickFixChange {
|
||||
range: as_lsp_range(change.range, &d.text_info),
|
||||
new_text: change.new_text.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
.filter_map(|d| {
|
||||
let range = d.range.as_ref()?;
|
||||
Some(Reference {
|
||||
range: as_lsp_range_from_lint_diagnostic(range),
|
||||
category: Category::Lint {
|
||||
message: d.details.message,
|
||||
code: d.details.code.to_string(),
|
||||
hint: d.details.hint,
|
||||
quick_fixes: d
|
||||
.details
|
||||
.fixes
|
||||
.into_iter()
|
||||
.map(|f| DataQuickFix {
|
||||
description: f.description.to_string(),
|
||||
changes: f
|
||||
.changes
|
||||
.into_iter()
|
||||
.map(|change| DataQuickFixChange {
|
||||
range: as_lsp_range(change.range, &range.text_info),
|
||||
new_text: change.new_text.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
|
@ -528,7 +531,8 @@ pub fn fix_ts_import_changes(
|
|||
.map(|line| {
|
||||
// This assumes that there's only one import per line.
|
||||
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
|
||||
let specifier = captures.get(1).unwrap().as_str();
|
||||
let specifier =
|
||||
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
{
|
||||
|
|
|
@ -11,6 +11,7 @@ use deno_runtime::fs_util::specifier_to_file_path;
|
|||
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
@ -29,13 +30,14 @@ pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy =
|
|||
pub fn calculate_fs_version(
|
||||
cache: &LspCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<String> {
|
||||
match specifier.scheme() {
|
||||
"npm" | "node" | "data" | "blob" => None,
|
||||
"file" => specifier_to_file_path(specifier)
|
||||
.ok()
|
||||
.and_then(|path| calculate_fs_version_at_path(&path)),
|
||||
_ => calculate_fs_version_in_cache(cache, specifier),
|
||||
_ => calculate_fs_version_in_cache(cache, specifier, file_referrer),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,8 +58,9 @@ pub fn calculate_fs_version_at_path(path: &Path) -> Option<String> {
|
|||
fn calculate_fs_version_in_cache(
|
||||
cache: &LspCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<String> {
|
||||
let http_cache = cache.root_vendor_or_global();
|
||||
let http_cache = cache.for_specifier(file_referrer);
|
||||
let Ok(cache_key) = http_cache.cache_item_key(specifier) else {
|
||||
return Some("1".to_string());
|
||||
};
|
||||
|
@ -77,7 +80,7 @@ fn calculate_fs_version_in_cache(
|
|||
pub struct LspCache {
|
||||
deno_dir: DenoDir,
|
||||
global: Arc<GlobalHttpCache>,
|
||||
root_vendor: Option<Arc<LocalLspHttpCache>>,
|
||||
vendors_by_scope: BTreeMap<ModuleSpecifier, Option<Arc<LocalLspHttpCache>>>,
|
||||
}
|
||||
|
||||
impl Default for LspCache {
|
||||
|
@ -107,18 +110,24 @@ impl LspCache {
|
|||
Self {
|
||||
deno_dir,
|
||||
global,
|
||||
root_vendor: None,
|
||||
vendors_by_scope: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_config(&mut self, config: &Config) {
|
||||
self.root_vendor = config.tree.root_data().and_then(|data| {
|
||||
let vendor_dir = data.vendor_dir.as_ref()?;
|
||||
Some(Arc::new(LocalLspHttpCache::new(
|
||||
vendor_dir.clone(),
|
||||
self.global.clone(),
|
||||
)))
|
||||
});
|
||||
self.vendors_by_scope = config
|
||||
.tree
|
||||
.data_by_scope()
|
||||
.iter()
|
||||
.map(|(scope, config_data)| {
|
||||
(
|
||||
scope.clone(),
|
||||
config_data.vendor_dir.as_ref().map(|v| {
|
||||
Arc::new(LocalLspHttpCache::new(v.clone(), self.global.clone()))
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
pub fn deno_dir(&self) -> &DenoDir {
|
||||
|
@ -129,15 +138,59 @@ impl LspCache {
|
|||
&self.global
|
||||
}
|
||||
|
||||
pub fn root_vendor(&self) -> Option<&Arc<LocalLspHttpCache>> {
|
||||
self.root_vendor.as_ref()
|
||||
}
|
||||
|
||||
pub fn root_vendor_or_global(&self) -> Arc<dyn HttpCache> {
|
||||
pub fn for_specifier(
|
||||
&self,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Arc<dyn HttpCache> {
|
||||
let Some(file_referrer) = file_referrer else {
|
||||
return self.global.clone();
|
||||
};
|
||||
self
|
||||
.root_vendor
|
||||
.as_ref()
|
||||
.map(|v| v.clone() as _)
|
||||
.vendors_by_scope
|
||||
.iter()
|
||||
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))
|
||||
.and_then(|(_, v)| v.clone().map(|v| v as _))
|
||||
.unwrap_or(self.global.clone() as _)
|
||||
}
|
||||
|
||||
pub fn vendored_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let file_referrer = file_referrer?;
|
||||
if !matches!(specifier.scheme(), "http" | "https") {
|
||||
return None;
|
||||
}
|
||||
let vendor = self
|
||||
.vendors_by_scope
|
||||
.iter()
|
||||
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))?
|
||||
.1
|
||||
.as_ref()?;
|
||||
vendor.get_file_url(specifier)
|
||||
}
|
||||
|
||||
pub fn unvendored_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let path = specifier_to_file_path(specifier).ok()?;
|
||||
let vendor = self
|
||||
.vendors_by_scope
|
||||
.iter()
|
||||
.rfind(|(s, _)| specifier.as_str().starts_with(s.as_str()))?
|
||||
.1
|
||||
.as_ref()?;
|
||||
vendor.get_remote_url(&path)
|
||||
}
|
||||
|
||||
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
if let Ok(path) = specifier_to_file_path(specifier) {
|
||||
if !path.starts_with(&self.deno_dir().root) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
|
||||
use super::analysis::source_range_to_lsp_range;
|
||||
use super::config::CodeLensSettings;
|
||||
use super::language_server;
|
||||
|
@ -27,6 +29,7 @@ use std::cell::RefCell;
|
|||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use tower_lsp::jsonrpc::Error as LspError;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
static ABSTRACT_MODIFIER: Lazy<Regex> = lazy_regex!(r"\babstract\b");
|
||||
|
@ -260,7 +263,11 @@ async fn resolve_implementation_code_lens(
|
|||
data.specifier.clone(),
|
||||
line_index.offset_tsc(code_lens.range.start)?,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(|err| {
|
||||
lsp_warn!("{err}");
|
||||
LspError::internal_error()
|
||||
})?;
|
||||
if let Some(implementations) = maybe_implementations {
|
||||
let mut locations = Vec::new();
|
||||
for implementation in implementations {
|
||||
|
@ -340,7 +347,7 @@ async fn resolve_references_code_lens(
|
|||
locations.push(
|
||||
reference
|
||||
.entry
|
||||
.to_location(asset_or_doc.line_index(), &language_server.url_map),
|
||||
.to_location(asset_or_doc.line_index(), language_server),
|
||||
);
|
||||
}
|
||||
Ok(locations)
|
||||
|
@ -357,7 +364,11 @@ async fn resolve_references_code_lens(
|
|||
data.specifier.clone(),
|
||||
line_index.offset_tsc(code_lens.range.start)?,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(|err| {
|
||||
lsp_warn!("Unable to find references: {err}");
|
||||
LspError::internal_error()
|
||||
})?;
|
||||
let locations = get_locations(maybe_referenced_symbols, language_server)?;
|
||||
let title = if locations.len() == 1 {
|
||||
"1 reference".to_string()
|
||||
|
|
|
@ -804,7 +804,7 @@ mod tests {
|
|||
fs_sources: &[(&str, &str)],
|
||||
) -> Documents {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache = LspCache::new(Some(temp_dir.uri()));
|
||||
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
|
||||
let mut documents = Documents::default();
|
||||
documents.update_config(
|
||||
&Default::default(),
|
||||
|
|
1284
cli/lsp/config.rs
1284
cli/lsp/config.rs
File diff suppressed because it is too large
Load diff
|
@ -5,6 +5,7 @@ use super::client::Client;
|
|||
use super::config::Config;
|
||||
use super::documents;
|
||||
use super::documents::Document;
|
||||
use super::documents::Documents;
|
||||
use super::documents::DocumentsFilter;
|
||||
use super::language_server;
|
||||
use super::language_server::StateSnapshot;
|
||||
|
@ -14,15 +15,18 @@ use super::tsc::TsServer;
|
|||
use super::urls::LspClientUrl;
|
||||
use super::urls::LspUrlMap;
|
||||
|
||||
use crate::args::LintOptions;
|
||||
use crate::graph_util;
|
||||
use crate::graph_util::enhanced_resolution_error_message;
|
||||
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
|
||||
use crate::resolver::SloppyImportsResolution;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use crate::tools::lint::CliLinterOptions;
|
||||
use crate::tools::lint::LintRuleProvider;
|
||||
use crate::util::path::to_percent_decoded_str;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::deno_json::LintConfig;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::RwLock;
|
||||
|
@ -35,11 +39,10 @@ use deno_core::unsync::spawn_blocking;
|
|||
use deno_core::unsync::JoinHandle;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::Resolution;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_lint::linter::LintConfig;
|
||||
use deno_lint::rules::LintRule;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::tokio_util::create_basic_runtime;
|
||||
|
@ -47,9 +50,11 @@ use deno_semver::jsr::JsrPackageReqReference;
|
|||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
use import_map::ImportMap;
|
||||
use import_map::ImportMapError;
|
||||
use log::error;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
@ -120,6 +125,7 @@ impl DiagnosticsPublisher {
|
|||
source: DiagnosticSource,
|
||||
diagnostics: DiagnosticVec,
|
||||
url_map: &LspUrlMap,
|
||||
documents: &Documents,
|
||||
token: &CancellationToken,
|
||||
) -> usize {
|
||||
let mut diagnostics_by_specifier =
|
||||
|
@ -153,11 +159,12 @@ impl DiagnosticsPublisher {
|
|||
self
|
||||
.state
|
||||
.update(&record.specifier, version, &all_specifier_diagnostics);
|
||||
let file_referrer = documents.get_file_referrer(&record.specifier);
|
||||
self
|
||||
.client
|
||||
.publish_diagnostics(
|
||||
url_map
|
||||
.normalize_specifier(&record.specifier)
|
||||
.normalize_specifier(&record.specifier, file_referrer.as_deref())
|
||||
.unwrap_or(LspClientUrl::new(record.specifier)),
|
||||
all_specifier_diagnostics,
|
||||
version,
|
||||
|
@ -183,11 +190,12 @@ impl DiagnosticsPublisher {
|
|||
if let Some(removed_value) = maybe_removed_value {
|
||||
// clear out any diagnostics for this specifier
|
||||
self.state.update(specifier, removed_value.version, &[]);
|
||||
let file_referrer = documents.get_file_referrer(specifier);
|
||||
self
|
||||
.client
|
||||
.publish_diagnostics(
|
||||
url_map
|
||||
.normalize_specifier(specifier)
|
||||
.normalize_specifier(specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| LspClientUrl::new(specifier.clone())),
|
||||
Vec::new(),
|
||||
removed_value.version,
|
||||
|
@ -519,6 +527,7 @@ impl DiagnosticsServer {
|
|||
DiagnosticSource::Ts,
|
||||
diagnostics,
|
||||
&url_map,
|
||||
snapshot.documents.as_ref(),
|
||||
&token,
|
||||
)
|
||||
.await;
|
||||
|
@ -556,6 +565,7 @@ impl DiagnosticsServer {
|
|||
let mark = performance.mark("lsp.update_diagnostics_deps");
|
||||
let diagnostics = spawn_blocking({
|
||||
let token = token.clone();
|
||||
let snapshot = snapshot.clone();
|
||||
move || generate_deno_diagnostics(&snapshot, &config, token)
|
||||
})
|
||||
.await
|
||||
|
@ -568,6 +578,7 @@ impl DiagnosticsServer {
|
|||
DiagnosticSource::Deno,
|
||||
diagnostics,
|
||||
&url_map,
|
||||
snapshot.documents.as_ref(),
|
||||
&token,
|
||||
)
|
||||
.await;
|
||||
|
@ -605,6 +616,7 @@ impl DiagnosticsServer {
|
|||
let mark = performance.mark("lsp.update_diagnostics_lint");
|
||||
let diagnostics = spawn_blocking({
|
||||
let token = token.clone();
|
||||
let snapshot = snapshot.clone();
|
||||
move || generate_lint_diagnostics(&snapshot, &config, token)
|
||||
})
|
||||
.await
|
||||
|
@ -617,6 +629,7 @@ impl DiagnosticsServer {
|
|||
DiagnosticSource::Lint,
|
||||
diagnostics,
|
||||
&url_map,
|
||||
snapshot.documents.as_ref(),
|
||||
&token,
|
||||
)
|
||||
.await;
|
||||
|
@ -805,25 +818,25 @@ fn generate_lint_diagnostics(
|
|||
continue;
|
||||
}
|
||||
let version = document.maybe_lsp_version();
|
||||
let (lint_options, lint_config, lint_rules) = config
|
||||
let (lint_config, linter) = config
|
||||
.tree
|
||||
.scope_for_specifier(specifier)
|
||||
.and_then(|s| config_data_by_scope.get(s))
|
||||
.map(|d| {
|
||||
(
|
||||
d.lint_options.clone(),
|
||||
d.lint_config.clone(),
|
||||
d.lint_rules.clone(),
|
||||
)
|
||||
})
|
||||
.map(|d| (d.lint_config.clone(), d.linter.clone()))
|
||||
.unwrap_or_else(|| {
|
||||
(
|
||||
Arc::default(),
|
||||
LintConfig {
|
||||
default_jsx_factory: None,
|
||||
default_jsx_fragment_factory: None,
|
||||
},
|
||||
Arc::default(),
|
||||
Arc::new(LintConfig::new_with_base(PathBuf::from("/"))),
|
||||
Arc::new(CliLinter::new(CliLinterOptions {
|
||||
configured_rules: {
|
||||
let lint_rule_provider = LintRuleProvider::new(None, None);
|
||||
lint_rule_provider.resolve_lint_rules(Default::default(), None)
|
||||
},
|
||||
fix: false,
|
||||
deno_lint_config: deno_lint::linter::LintConfig {
|
||||
default_jsx_factory: None,
|
||||
default_jsx_fragment_factory: None,
|
||||
},
|
||||
})),
|
||||
)
|
||||
});
|
||||
diagnostics_vec.push(DiagnosticRecord {
|
||||
|
@ -832,9 +845,8 @@ fn generate_lint_diagnostics(
|
|||
version,
|
||||
diagnostics: generate_document_lint_diagnostics(
|
||||
&document,
|
||||
&lint_options,
|
||||
lint_config,
|
||||
lint_rules.rules.clone(),
|
||||
&lint_config,
|
||||
&linter,
|
||||
),
|
||||
},
|
||||
});
|
||||
|
@ -844,17 +856,16 @@ fn generate_lint_diagnostics(
|
|||
|
||||
fn generate_document_lint_diagnostics(
|
||||
document: &Document,
|
||||
lint_options: &LintOptions,
|
||||
lint_config: LintConfig,
|
||||
lint_rules: Vec<&'static dyn LintRule>,
|
||||
lint_config: &LintConfig,
|
||||
linter: &CliLinter,
|
||||
) -> Vec<lsp::Diagnostic> {
|
||||
if !lint_options.files.matches_specifier(document.specifier()) {
|
||||
if !lint_config.files.matches_specifier(document.specifier()) {
|
||||
return Vec::new();
|
||||
}
|
||||
match document.maybe_parsed_source() {
|
||||
Some(Ok(parsed_source)) => {
|
||||
if let Ok(references) =
|
||||
analysis::get_lint_references(parsed_source, lint_rules, lint_config)
|
||||
analysis::get_lint_references(parsed_source, linter)
|
||||
{
|
||||
references
|
||||
.into_iter()
|
||||
|
@ -1024,7 +1035,7 @@ impl DenoDiagnostic {
|
|||
"invalid-local-import"
|
||||
}
|
||||
ResolutionError::InvalidSpecifier { error, .. } => match error {
|
||||
SpecifierError::ImportPrefixMissing(_, _) => {
|
||||
SpecifierError::ImportPrefixMissing { .. } => {
|
||||
"import-prefix-missing"
|
||||
}
|
||||
SpecifierError::InvalidUrl(_) => "invalid-url",
|
||||
|
@ -1223,16 +1234,14 @@ impl DenoDiagnostic {
|
|||
pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic {
|
||||
fn no_local_message(
|
||||
specifier: &ModuleSpecifier,
|
||||
sloppy_resolution: SloppyImportsResolution,
|
||||
maybe_sloppy_resolution: Option<&SloppyImportsResolution>,
|
||||
) -> String {
|
||||
let mut message = format!(
|
||||
"Unable to load a local module: {}\n",
|
||||
to_percent_decoded_str(specifier.as_ref())
|
||||
);
|
||||
if let Some(additional_message) =
|
||||
sloppy_resolution.as_suggestion_message()
|
||||
{
|
||||
message.push_str(&additional_message);
|
||||
if let Some(res) = maybe_sloppy_resolution {
|
||||
message.push_str(&res.as_suggestion_message());
|
||||
message.push('.');
|
||||
} else {
|
||||
message.push_str("Please check the file path.");
|
||||
|
@ -1249,23 +1258,36 @@ impl DenoDiagnostic {
|
|||
Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoLocal(specifier) => {
|
||||
let sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
|
||||
let data = sloppy_resolution.as_lsp_quick_fix_message().map(|message| {
|
||||
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
|
||||
let data = maybe_sloppy_resolution.as_ref().map(|res| {
|
||||
json!({
|
||||
"specifier": specifier,
|
||||
"to": sloppy_resolution.as_specifier(),
|
||||
"message": message,
|
||||
"to": res.as_specifier(),
|
||||
"message": res.as_quick_fix_message(),
|
||||
})
|
||||
});
|
||||
(lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, sloppy_resolution), data)
|
||||
(lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, maybe_sloppy_resolution.as_ref()), data)
|
||||
},
|
||||
Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))),
|
||||
Self::ResolutionError(err) => (
|
||||
Self::ResolutionError(err) => {
|
||||
let mut message;
|
||||
message = enhanced_resolution_error_message(err);
|
||||
if let deno_graph::ResolutionError::ResolverError {error, ..} = err{
|
||||
if let ResolveError::Other(resolve_error, ..) = (*error).as_ref() {
|
||||
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = resolve_error.downcast_ref::<ImportMapError>() {
|
||||
if specifier.chars().next().unwrap_or('\0') == '@'{
|
||||
let hint = format!("\nHint: Use [deno add {}] to add the dependency.", specifier);
|
||||
message.push_str(hint.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(
|
||||
lsp::DiagnosticSeverity::ERROR,
|
||||
enhanced_resolution_error_message(err),
|
||||
message,
|
||||
graph_util::get_resolution_error_bare_node_specifier(err)
|
||||
.map(|specifier| json!({ "specifier": specifier }))
|
||||
),
|
||||
)},
|
||||
Self::InvalidNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unknown Node built-in module: {}", specifier.path()), None),
|
||||
Self::BareNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::WARNING, format!("\"{}\" is resolved to \"node:{}\". If you want to use a built-in Node module, add a \"node:\" prefix.", specifier, specifier), Some(json!({ "specifier": specifier }))),
|
||||
};
|
||||
|
@ -1466,7 +1488,11 @@ fn diagnose_dependency(
|
|||
return; // ignore, surface typescript errors instead
|
||||
}
|
||||
|
||||
let import_map = snapshot.config.tree.root_import_map();
|
||||
let import_map = snapshot
|
||||
.config
|
||||
.tree
|
||||
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
|
||||
.and_then(|d| d.resolver.maybe_import_map());
|
||||
if let Some(import_map) = import_map {
|
||||
if let Resolution::Ok(resolved) = &dependency.maybe_code {
|
||||
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
|
||||
|
@ -1517,7 +1543,7 @@ fn diagnose_dependency(
|
|||
dependency.is_dynamic,
|
||||
dependency.maybe_attribute_type.as_deref(),
|
||||
referrer_doc,
|
||||
import_map.map(|i| i.as_ref()),
|
||||
import_map,
|
||||
)
|
||||
.iter()
|
||||
.flat_map(|diag| {
|
||||
|
@ -1541,7 +1567,7 @@ fn diagnose_dependency(
|
|||
dependency.is_dynamic,
|
||||
dependency.maybe_attribute_type.as_deref(),
|
||||
referrer_doc,
|
||||
import_map.map(|i| i.as_ref()),
|
||||
import_map,
|
||||
)
|
||||
.iter()
|
||||
.map(|diag| diag.to_lsp_diagnostic(&range)),
|
||||
|
@ -1593,7 +1619,6 @@ fn generate_deno_diagnostics(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::lsp::cache::LspCache;
|
||||
use crate::lsp::config::Config;
|
||||
|
@ -1603,7 +1628,8 @@ mod tests {
|
|||
use crate::lsp::documents::LanguageId;
|
||||
use crate::lsp::language_server::StateSnapshot;
|
||||
use crate::lsp::resolver::LspResolver;
|
||||
use deno_config::ConfigFile;
|
||||
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::sync::Arc;
|
||||
use test_util::TempDir;
|
||||
|
@ -1633,16 +1659,17 @@ mod tests {
|
|||
async fn setup(
|
||||
sources: &[(&str, &str, i32, LanguageId)],
|
||||
maybe_import_map: Option<(&str, &str)>,
|
||||
) -> StateSnapshot {
|
||||
) -> (TempDir, StateSnapshot) {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache = LspCache::new(Some(temp_dir.uri()));
|
||||
let mut config = Config::new_with_roots([resolve_url("file:///").unwrap()]);
|
||||
if let Some((base_url, json_string)) = maybe_import_map {
|
||||
let base_url = resolve_url(base_url).unwrap();
|
||||
let root_uri = temp_dir.uri();
|
||||
let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap()));
|
||||
let mut config = Config::new_with_roots([root_uri.clone()]);
|
||||
if let Some((relative_path, json_string)) = maybe_import_map {
|
||||
let base_url = root_uri.join(relative_path).unwrap();
|
||||
let config_file = ConfigFile::new(
|
||||
json_string,
|
||||
base_url,
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::deno_json::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap();
|
||||
config.tree.inject_config_file(config_file).await;
|
||||
|
@ -1651,9 +1678,8 @@ mod tests {
|
|||
Arc::new(LspResolver::from_config(&config, &cache, None).await);
|
||||
let mut documents = Documents::default();
|
||||
documents.update_config(&config, &resolver, &cache, &Default::default());
|
||||
for (specifier, source, version, language_id) in sources {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
for (relative_path, source, version, language_id) in sources {
|
||||
let specifier = root_uri.join(relative_path).unwrap();
|
||||
documents.open(
|
||||
specifier.clone(),
|
||||
*version,
|
||||
|
@ -1662,20 +1688,23 @@ mod tests {
|
|||
None,
|
||||
);
|
||||
}
|
||||
StateSnapshot {
|
||||
project_version: 0,
|
||||
documents: Arc::new(documents),
|
||||
assets: Default::default(),
|
||||
config: Arc::new(config),
|
||||
resolver,
|
||||
}
|
||||
(
|
||||
temp_dir,
|
||||
StateSnapshot {
|
||||
project_version: 0,
|
||||
documents: Arc::new(documents),
|
||||
assets: Default::default(),
|
||||
config: Arc::new(config),
|
||||
resolver,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_enabled_then_disabled_specifier() {
|
||||
let snapshot = setup(
|
||||
let (_, snapshot) = setup(
|
||||
&[(
|
||||
"file:///a.ts",
|
||||
"a.ts",
|
||||
r#"import * as b from "./b.ts";
|
||||
let a: any = "a";
|
||||
let c: number = "a";
|
||||
|
@ -1768,23 +1797,23 @@ let c: number = "a";
|
|||
|
||||
#[tokio::test]
|
||||
async fn test_deno_diagnostics_with_import_map() {
|
||||
let snapshot = setup(
|
||||
let (temp_dir, snapshot) = setup(
|
||||
&[
|
||||
(
|
||||
"file:///std/assert/mod.ts",
|
||||
"std/assert/mod.ts",
|
||||
"export function assert() {}",
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
),
|
||||
(
|
||||
"file:///a/file.ts",
|
||||
"a/file.ts",
|
||||
"import { assert } from \"../std/assert/mod.ts\";\n\nassert();\n",
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
),
|
||||
],
|
||||
Some((
|
||||
"file:///a/import-map.json",
|
||||
"a/deno.json",
|
||||
r#"{
|
||||
"imports": {
|
||||
"/~/std/": "../std/"
|
||||
|
@ -1798,11 +1827,13 @@ let c: number = "a";
|
|||
let actual = generate_deno_diagnostics(&snapshot, &config, token);
|
||||
assert_eq!(actual.len(), 2);
|
||||
for record in actual {
|
||||
match record.specifier.as_str() {
|
||||
"file:///std/assert/mod.ts" => {
|
||||
let relative_specifier =
|
||||
temp_dir.uri().make_relative(&record.specifier).unwrap();
|
||||
match relative_specifier.as_str() {
|
||||
"std/assert/mod.ts" => {
|
||||
assert_eq!(json!(record.versioned.diagnostics), json!([]))
|
||||
}
|
||||
"file:///a/file.ts" => assert_eq!(
|
||||
"a/file.ts" => assert_eq!(
|
||||
json!(record.versioned.diagnostics),
|
||||
json!([
|
||||
{
|
||||
|
@ -1904,9 +1935,9 @@ let c: number = "a";
|
|||
|
||||
#[tokio::test]
|
||||
async fn duplicate_diagnostics_for_duplicate_imports() {
|
||||
let snapshot = setup(
|
||||
let (_, snapshot) = setup(
|
||||
&[(
|
||||
"file:///a.ts",
|
||||
"a.ts",
|
||||
r#"
|
||||
// @deno-types="bad.d.ts"
|
||||
import "bad.js";
|
||||
|
@ -1980,9 +2011,9 @@ let c: number = "a";
|
|||
|
||||
#[tokio::test]
|
||||
async fn unable_to_load_a_local_module() {
|
||||
let snapshot = setup(
|
||||
let (temp_dir, snapshot) = setup(
|
||||
&[(
|
||||
"file:///a.ts",
|
||||
"a.ts",
|
||||
r#"
|
||||
import { 東京 } from "./🦕.ts";
|
||||
"#,
|
||||
|
@ -2014,7 +2045,10 @@ let c: number = "a";
|
|||
"severity": 1,
|
||||
"code": "no-local",
|
||||
"source": "deno",
|
||||
"message": "Unable to load a local module: file:///🦕.ts\nPlease check the file path.",
|
||||
"message": format!(
|
||||
"Unable to load a local module: {}🦕.ts\nPlease check the file path.",
|
||||
temp_dir.uri(),
|
||||
),
|
||||
}
|
||||
])
|
||||
);
|
||||
|
|
|
@ -144,6 +144,20 @@ impl AssetOrDocument {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn file_referrer(&self) -> Option<&ModuleSpecifier> {
|
||||
match self {
|
||||
AssetOrDocument::Asset(_) => None,
|
||||
AssetOrDocument::Document(doc) => doc.file_referrer(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scope(&self) -> Option<&ModuleSpecifier> {
|
||||
match self {
|
||||
AssetOrDocument::Asset(asset_doc) => Some(asset_doc.specifier()),
|
||||
AssetOrDocument::Document(doc) => doc.scope(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn maybe_semantic_tokens(&self) -> Option<lsp::SemanticTokens> {
|
||||
match self {
|
||||
AssetOrDocument::Asset(_) => None,
|
||||
|
@ -303,6 +317,10 @@ impl Document {
|
|||
cache: &Arc<LspCache>,
|
||||
file_referrer: Option<ModuleSpecifier>,
|
||||
) -> Arc<Self> {
|
||||
let file_referrer = Some(&specifier)
|
||||
.filter(|s| cache.is_valid_file_referrer(s))
|
||||
.cloned()
|
||||
.or(file_referrer);
|
||||
let media_type = resolve_media_type(
|
||||
&specifier,
|
||||
maybe_headers.as_ref(),
|
||||
|
@ -336,9 +354,13 @@ impl Document {
|
|||
Arc::new(Self {
|
||||
config,
|
||||
dependencies,
|
||||
file_referrer: file_referrer.filter(|_| specifier.scheme() != "file"),
|
||||
maybe_fs_version: calculate_fs_version(
|
||||
cache,
|
||||
&specifier,
|
||||
file_referrer.as_ref(),
|
||||
),
|
||||
file_referrer,
|
||||
maybe_types_dependency,
|
||||
maybe_fs_version: calculate_fs_version(cache, &specifier),
|
||||
line_index,
|
||||
maybe_language_id,
|
||||
maybe_headers,
|
||||
|
@ -540,7 +562,11 @@ impl Document {
|
|||
config: self.config.clone(),
|
||||
specifier: self.specifier.clone(),
|
||||
file_referrer: self.file_referrer.clone(),
|
||||
maybe_fs_version: calculate_fs_version(cache, &self.specifier),
|
||||
maybe_fs_version: calculate_fs_version(
|
||||
cache,
|
||||
&self.specifier,
|
||||
self.file_referrer.as_ref(),
|
||||
),
|
||||
maybe_language_id: self.maybe_language_id,
|
||||
dependencies: self.dependencies.clone(),
|
||||
maybe_types_dependency: self.maybe_types_dependency.clone(),
|
||||
|
@ -563,7 +589,11 @@ impl Document {
|
|||
config: self.config.clone(),
|
||||
specifier: self.specifier.clone(),
|
||||
file_referrer: self.file_referrer.clone(),
|
||||
maybe_fs_version: calculate_fs_version(cache, &self.specifier),
|
||||
maybe_fs_version: calculate_fs_version(
|
||||
cache,
|
||||
&self.specifier,
|
||||
self.file_referrer.as_ref(),
|
||||
),
|
||||
maybe_language_id: self.maybe_language_id,
|
||||
dependencies: self.dependencies.clone(),
|
||||
maybe_types_dependency: self.maybe_types_dependency.clone(),
|
||||
|
@ -589,6 +619,13 @@ impl Document {
|
|||
self.file_referrer.as_ref()
|
||||
}
|
||||
|
||||
pub fn scope(&self) -> Option<&ModuleSpecifier> {
|
||||
self
|
||||
.file_referrer
|
||||
.as_ref()
|
||||
.and_then(|r| self.config.tree.scope_for_specifier(r))
|
||||
}
|
||||
|
||||
pub fn content(&self) -> &Arc<str> {
|
||||
&self.text
|
||||
}
|
||||
|
@ -766,7 +803,10 @@ impl FileSystemDocuments {
|
|||
cache: &Arc<LspCache>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<Arc<Document>> {
|
||||
let new_fs_version = calculate_fs_version(cache, specifier);
|
||||
let file_referrer = Some(specifier)
|
||||
.filter(|s| cache.is_valid_file_referrer(s))
|
||||
.or(file_referrer);
|
||||
let new_fs_version = calculate_fs_version(cache, specifier, file_referrer);
|
||||
let old_doc = self.docs.get(specifier).map(|v| v.value().clone());
|
||||
let dirty = match &old_doc {
|
||||
None => true,
|
||||
|
@ -830,7 +870,7 @@ impl FileSystemDocuments {
|
|||
file_referrer.cloned(),
|
||||
)
|
||||
} else {
|
||||
let http_cache = cache.root_vendor_or_global();
|
||||
let http_cache = cache.for_specifier(file_referrer);
|
||||
let cache_key = http_cache.cache_item_key(specifier).ok()?;
|
||||
let bytes = http_cache
|
||||
.read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY)
|
||||
|
@ -907,9 +947,9 @@ pub struct Documents {
|
|||
/// The npm package requirements found in npm specifiers.
|
||||
npm_reqs_by_scope:
|
||||
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
|
||||
/// Gets if any document had a node: specifier such that a @types/node package
|
||||
/// should be injected.
|
||||
has_injected_types_node_package: bool,
|
||||
/// Config scopes that contain a node: specifier such that a @types/node
|
||||
/// package should be injected.
|
||||
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
|
||||
}
|
||||
|
||||
impl Documents {
|
||||
|
@ -1011,13 +1051,16 @@ impl Documents {
|
|||
&self,
|
||||
specifier: &'a ModuleSpecifier,
|
||||
) -> Option<Cow<'a, ModuleSpecifier>> {
|
||||
if specifier.scheme() == "file" {
|
||||
Some(Cow::Borrowed(specifier))
|
||||
} else {
|
||||
self
|
||||
.get(specifier)
|
||||
.and_then(|d| d.file_referrer().cloned().map(Cow::Owned))
|
||||
if self.is_valid_file_referrer(specifier) {
|
||||
return Some(Cow::Borrowed(specifier));
|
||||
}
|
||||
self
|
||||
.get(specifier)
|
||||
.and_then(|d| d.file_referrer().cloned().map(Cow::Owned))
|
||||
}
|
||||
|
||||
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.cache.is_valid_file_referrer(specifier)
|
||||
}
|
||||
|
||||
/// Return `true` if the provided specifier can be resolved to a document,
|
||||
|
@ -1089,7 +1132,7 @@ impl Documents {
|
|||
.map(|p| p.is_file())
|
||||
.unwrap_or(false);
|
||||
}
|
||||
if self.cache.root_vendor_or_global().contains(&specifier) {
|
||||
if self.cache.for_specifier(file_referrer).contains(&specifier) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -1103,10 +1146,10 @@ impl Documents {
|
|||
self.npm_reqs_by_scope.clone()
|
||||
}
|
||||
|
||||
/// Returns if a @types/node package was injected into the npm
|
||||
/// resolver based on the state of the documents.
|
||||
pub fn has_injected_types_node_package(&self) -> bool {
|
||||
self.has_injected_types_node_package
|
||||
pub fn scopes_with_node_specifier(
|
||||
&self,
|
||||
) -> &Arc<HashSet<Option<ModuleSpecifier>>> {
|
||||
&self.scopes_with_node_specifier
|
||||
}
|
||||
|
||||
/// Return a document for the specifier.
|
||||
|
@ -1203,9 +1246,13 @@ impl Documents {
|
|||
&self,
|
||||
specifiers: &[String],
|
||||
referrer: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||
let document = self.get(referrer);
|
||||
let file_referrer = document.as_ref().and_then(|d| d.file_referrer());
|
||||
let file_referrer = document
|
||||
.as_ref()
|
||||
.and_then(|d| d.file_referrer())
|
||||
.or(file_referrer);
|
||||
let dependencies = document.as_ref().map(|d| d.dependencies());
|
||||
let mut results = Vec::new();
|
||||
for specifier in specifiers {
|
||||
|
@ -1327,21 +1374,18 @@ impl Documents {
|
|||
/// document.
|
||||
fn calculate_npm_reqs_if_dirty(&mut self) {
|
||||
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default();
|
||||
let mut scopes_with_node_builtin_specifier = HashSet::new();
|
||||
let mut scopes_with_specifier = HashSet::new();
|
||||
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
|
||||
if !is_fs_docs_dirty && !self.dirty {
|
||||
return;
|
||||
}
|
||||
let mut visit_doc = |doc: &Arc<Document>| {
|
||||
let scope = doc
|
||||
.file_referrer()
|
||||
.and_then(|r| self.config.tree.scope_for_specifier(r))
|
||||
.or(self.config.tree.root_scope());
|
||||
let scope = doc.scope();
|
||||
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default();
|
||||
for dependency in doc.dependencies().values() {
|
||||
if let Some(dep) = dependency.get_code() {
|
||||
if dep.scheme() == "node" {
|
||||
scopes_with_node_builtin_specifier.insert(scope.cloned());
|
||||
scopes_with_specifier.insert(scope.cloned());
|
||||
}
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
|
@ -1367,21 +1411,15 @@ impl Documents {
|
|||
}
|
||||
|
||||
// fill the reqs from the lockfile
|
||||
// TODO(nayeemrmn): Iterate every lockfile here for multi-deno.json.
|
||||
if let Some(lockfile) = self
|
||||
.config
|
||||
.tree
|
||||
.root_data()
|
||||
.and_then(|d| d.lockfile.as_ref())
|
||||
{
|
||||
let reqs = npm_reqs_by_scope
|
||||
.entry(self.config.tree.root_scope().cloned())
|
||||
.or_default();
|
||||
let lockfile = lockfile.lock();
|
||||
for key in lockfile.content.packages.specifiers.keys() {
|
||||
if let Some(key) = key.strip_prefix("npm:") {
|
||||
if let Ok(req) = PackageReq::from_str(key) {
|
||||
reqs.insert(req);
|
||||
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
|
||||
if let Some(lockfile) = config_data.lockfile.as_ref() {
|
||||
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
|
||||
let lockfile = lockfile.lock();
|
||||
for key in lockfile.content.packages.specifiers.keys() {
|
||||
if let Some(key) = key.strip_prefix("npm:") {
|
||||
if let Ok(req) = PackageReq::from_str(key) {
|
||||
reqs.insert(req);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1390,15 +1428,15 @@ impl Documents {
|
|||
// Ensure a @types/node package exists when any module uses a node: specifier.
|
||||
// Unlike on the command line, here we just add @types/node to the npm package
|
||||
// requirements since this won't end up in the lockfile.
|
||||
for scope in scopes_with_node_builtin_specifier {
|
||||
let reqs = npm_reqs_by_scope.entry(scope).or_default();
|
||||
for scope in &scopes_with_specifier {
|
||||
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default();
|
||||
if !reqs.iter().any(|r| r.name == "@types/node") {
|
||||
self.has_injected_types_node_package = true;
|
||||
reqs.insert(PackageReq::from_str("@types/node").unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope);
|
||||
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier);
|
||||
self.dirty = false;
|
||||
}
|
||||
|
||||
|
@ -1416,20 +1454,25 @@ impl Documents {
|
|||
return Some((specifier.clone(), MediaType::Dts));
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
|
||||
return self
|
||||
.resolver
|
||||
.npm_to_file_url(&npm_ref, referrer, file_referrer);
|
||||
let mut specifier = specifier.clone();
|
||||
let mut media_type = None;
|
||||
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) {
|
||||
let (s, mt) =
|
||||
self
|
||||
.resolver
|
||||
.npm_to_file_url(&npm_ref, referrer, file_referrer)?;
|
||||
specifier = s;
|
||||
media_type = Some(mt);
|
||||
}
|
||||
let Some(doc) = self.get_or_load(specifier, referrer) else {
|
||||
return Some((specifier.clone(), MediaType::from_specifier(specifier)));
|
||||
let Some(doc) = self.get_or_load(&specifier, referrer) else {
|
||||
let media_type =
|
||||
media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier));
|
||||
return Some((specifier, media_type));
|
||||
};
|
||||
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
|
||||
self.resolve_dependency(types, specifier, file_referrer)
|
||||
self.resolve_dependency(types, &specifier, file_referrer)
|
||||
} else {
|
||||
let media_type = doc.media_type();
|
||||
Some((doc.specifier().clone(), media_type))
|
||||
Some((doc.specifier().clone(), doc.media_type()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1554,7 +1597,9 @@ fn analyze_module(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use crate::lsp::cache::LspCache;
|
||||
use deno_config::ConfigFile;
|
||||
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::deno_json::ConfigParseOptions;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
@ -1562,7 +1607,8 @@ mod tests {
|
|||
|
||||
async fn setup() -> (Documents, LspCache, TempDir) {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache = LspCache::new(Some(temp_dir.uri()));
|
||||
temp_dir.create_dir_all(".deno_dir");
|
||||
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
|
||||
let config = Config::default();
|
||||
let resolver =
|
||||
Arc::new(LspResolver::from_config(&config, &cache, None).await);
|
||||
|
@ -1707,7 +1753,7 @@ console.log(b, "hello deno");
|
|||
})
|
||||
.to_string(),
|
||||
config.root_uri().unwrap().join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1751,7 +1797,7 @@ console.log(b, "hello deno");
|
|||
})
|
||||
.to_string(),
|
||||
config.root_uri().unwrap().join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
|
|
@ -24,7 +24,6 @@ use std::borrow::Cow;
|
|||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::config::Config;
|
||||
use super::config::ConfigData;
|
||||
use super::search::PackageSearchApi;
|
||||
|
||||
|
@ -44,26 +43,31 @@ impl JsrCacheResolver {
|
|||
pub fn new(
|
||||
cache: Arc<dyn HttpCache>,
|
||||
config_data: Option<&ConfigData>,
|
||||
config: &Config,
|
||||
) -> Self {
|
||||
let nv_by_req = DashMap::new();
|
||||
let info_by_nv = DashMap::new();
|
||||
let info_by_name = DashMap::new();
|
||||
let mut workspace_scope_by_name = HashMap::new();
|
||||
if let Some(config_data) = config_data {
|
||||
let config_data_by_scope = config.tree.data_by_scope();
|
||||
for member_scope in config_data.workspace_members.as_ref() {
|
||||
let Some(member_data) = config_data_by_scope.get(member_scope) else {
|
||||
for jsr_pkg_config in config_data.member_dir.workspace.jsr_packages() {
|
||||
let Some(exports) = &jsr_pkg_config.config_file.json.exports else {
|
||||
continue;
|
||||
};
|
||||
let Some(package_config) = member_data.package_config.as_ref() else {
|
||||
let Some(version) = &jsr_pkg_config.config_file.json.version else {
|
||||
continue;
|
||||
};
|
||||
let Ok(version) = Version::parse_standard(version) else {
|
||||
continue;
|
||||
};
|
||||
let nv = PackageNv {
|
||||
name: jsr_pkg_config.name.clone(),
|
||||
version: version.clone(),
|
||||
};
|
||||
info_by_name.insert(
|
||||
package_config.nv.name.clone(),
|
||||
nv.name.clone(),
|
||||
Some(Arc::new(JsrPackageInfo {
|
||||
versions: [(
|
||||
package_config.nv.version.clone(),
|
||||
nv.version.clone(),
|
||||
JsrPackageInfoVersion { yanked: false },
|
||||
)]
|
||||
.into_iter()
|
||||
|
@ -71,16 +75,21 @@ impl JsrCacheResolver {
|
|||
})),
|
||||
);
|
||||
info_by_nv.insert(
|
||||
package_config.nv.clone(),
|
||||
nv.clone(),
|
||||
Some(Arc::new(JsrPackageVersionInfo {
|
||||
exports: package_config.exports.clone(),
|
||||
exports: exports.clone(),
|
||||
module_graph_1: None,
|
||||
module_graph_2: None,
|
||||
manifest: Default::default(),
|
||||
})),
|
||||
);
|
||||
workspace_scope_by_name
|
||||
.insert(package_config.nv.name.clone(), member_scope.clone());
|
||||
workspace_scope_by_name.insert(
|
||||
nv.name.clone(),
|
||||
ModuleSpecifier::from_directory_path(
|
||||
jsr_pkg_config.config_file.dir_path(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,12 +1,15 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::package_json;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::DENO_FUTURE;
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::lsp::config::Config;
|
||||
use crate::lsp::config::ConfigData;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::npm::create_cli_npm_resolver_for_lsp;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::CliNpmResolverByonmCreateOptions;
|
||||
|
@ -14,25 +17,24 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::WorkerCliNpmGraphResolver;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use dashmap::DashMap;
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::HttpCache;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::GraphImport;
|
||||
use deno_graph::ModuleSpecifier;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolution;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
@ -41,30 +43,32 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use indexmap::IndexMap;
|
||||
use package_json::PackageJsonDepsProvider;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::NodeResolution;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use node_resolver::NpmResolver;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::cache::LspCache;
|
||||
use super::jsr::JsrCacheResolver;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LspResolver {
|
||||
struct LspScopeResolver {
|
||||
graph_resolver: Arc<CliGraphResolver>,
|
||||
jsr_resolver: Option<Arc<JsrCacheResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
redirect_resolver: Option<Arc<RedirectResolver>>,
|
||||
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
||||
config: Arc<Config>,
|
||||
config_data: Option<Arc<ConfigData>>,
|
||||
}
|
||||
|
||||
impl Default for LspResolver {
|
||||
impl Default for LspScopeResolver {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
graph_resolver: create_graph_resolver(None, None, None),
|
||||
|
@ -73,43 +77,44 @@ impl Default for LspResolver {
|
|||
node_resolver: None,
|
||||
redirect_resolver: None,
|
||||
graph_imports: Default::default(),
|
||||
config: Default::default(),
|
||||
config_data: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LspResolver {
|
||||
pub async fn from_config(
|
||||
config: &Config,
|
||||
impl LspScopeResolver {
|
||||
async fn from_config_data(
|
||||
config_data: Option<&Arc<ConfigData>>,
|
||||
cache: &LspCache,
|
||||
http_client_provider: Option<&Arc<HttpClientProvider>>,
|
||||
) -> Self {
|
||||
let config_data = config.tree.root_data();
|
||||
let mut npm_resolver = None;
|
||||
let mut node_resolver = None;
|
||||
if let (Some(http_client), Some(config_data)) =
|
||||
(http_client_provider, config_data)
|
||||
{
|
||||
npm_resolver = create_npm_resolver(config_data, cache, http_client).await;
|
||||
if let Some(http_client) = http_client_provider {
|
||||
npm_resolver = create_npm_resolver(
|
||||
config_data.map(|d| d.as_ref()),
|
||||
cache,
|
||||
http_client,
|
||||
)
|
||||
.await;
|
||||
node_resolver = create_node_resolver(npm_resolver.as_ref());
|
||||
}
|
||||
let graph_resolver = create_graph_resolver(
|
||||
config_data,
|
||||
config_data.map(|d| d.as_ref()),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
);
|
||||
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
|
||||
cache.root_vendor_or_global(),
|
||||
config_data,
|
||||
config,
|
||||
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
||||
config_data.map(|d| d.as_ref()),
|
||||
)));
|
||||
let redirect_resolver = Some(Arc::new(RedirectResolver::new(
|
||||
cache.root_vendor_or_global(),
|
||||
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
||||
config_data.and_then(|d| d.lockfile.clone()),
|
||||
)));
|
||||
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver();
|
||||
let graph_imports = config_data
|
||||
.and_then(|d| d.config_file.as_ref())
|
||||
.and_then(|cf| cf.to_maybe_imports().ok())
|
||||
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
|
||||
.map(|imports| {
|
||||
Arc::new(
|
||||
imports
|
||||
|
@ -135,16 +140,16 @@ impl LspResolver {
|
|||
node_resolver,
|
||||
redirect_resolver,
|
||||
graph_imports,
|
||||
config: Arc::new(config.clone()),
|
||||
config_data: config_data.cloned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn snapshot(&self) -> Arc<Self> {
|
||||
fn snapshot(&self) -> Arc<Self> {
|
||||
let npm_resolver =
|
||||
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
|
||||
let node_resolver = create_node_resolver(npm_resolver.as_ref());
|
||||
let graph_resolver = create_graph_resolver(
|
||||
self.config.tree.root_data(),
|
||||
self.config_data.as_deref(),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
);
|
||||
|
@ -155,58 +160,121 @@ impl LspResolver {
|
|||
node_resolver,
|
||||
redirect_resolver: self.redirect_resolver.clone(),
|
||||
graph_imports: self.graph_imports.clone(),
|
||||
config: self.config.clone(),
|
||||
config_data: self.config_data.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct LspResolver {
|
||||
unscoped: Arc<LspScopeResolver>,
|
||||
by_scope: BTreeMap<ModuleSpecifier, Arc<LspScopeResolver>>,
|
||||
}
|
||||
|
||||
impl LspResolver {
|
||||
pub async fn from_config(
|
||||
config: &Config,
|
||||
cache: &LspCache,
|
||||
http_client_provider: Option<&Arc<HttpClientProvider>>,
|
||||
) -> Self {
|
||||
let mut by_scope = BTreeMap::new();
|
||||
for (scope, config_data) in config.tree.data_by_scope().as_ref() {
|
||||
by_scope.insert(
|
||||
scope.clone(),
|
||||
Arc::new(
|
||||
LspScopeResolver::from_config_data(
|
||||
Some(config_data),
|
||||
cache,
|
||||
http_client_provider,
|
||||
)
|
||||
.await,
|
||||
),
|
||||
);
|
||||
}
|
||||
Self {
|
||||
unscoped: Arc::new(
|
||||
LspScopeResolver::from_config_data(None, cache, http_client_provider)
|
||||
.await,
|
||||
),
|
||||
by_scope,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn snapshot(&self) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
unscoped: self.unscoped.snapshot(),
|
||||
by_scope: self
|
||||
.by_scope
|
||||
.iter()
|
||||
.map(|(s, r)| (s.clone(), r.snapshot()))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn did_cache(&self) {
|
||||
self.jsr_resolver.as_ref().inspect(|r| r.did_cache());
|
||||
for resolver in
|
||||
std::iter::once(&self.unscoped).chain(self.by_scope.values())
|
||||
{
|
||||
resolver.jsr_resolver.as_ref().inspect(|r| r.did_cache());
|
||||
resolver
|
||||
.redirect_resolver
|
||||
.as_ref()
|
||||
.inspect(|r| r.did_cache());
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set_npm_reqs(
|
||||
&self,
|
||||
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>,
|
||||
) -> Result<(), AnyError> {
|
||||
let reqs = reqs
|
||||
.values()
|
||||
.flatten()
|
||||
.collect::<BTreeSet<_>>()
|
||||
) {
|
||||
for (scope, resolver) in [(None, &self.unscoped)]
|
||||
.into_iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
if let Some(npm_resolver) = self.npm_resolver.as_ref() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
return npm_resolver.set_package_reqs(&reqs).await;
|
||||
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
|
||||
{
|
||||
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
let reqs = reqs
|
||||
.get(&scope.cloned())
|
||||
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
|
||||
.unwrap_or_default();
|
||||
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
|
||||
lsp_warn!("Could not set npm package requirements: {:#}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn as_graph_resolver(
|
||||
&self,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> &dyn Resolver {
|
||||
self.graph_resolver.as_ref()
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.graph_resolver.as_ref()
|
||||
}
|
||||
|
||||
pub fn create_graph_npm_resolver(
|
||||
&self,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> WorkerCliNpmGraphResolver {
|
||||
self.graph_resolver.create_graph_npm_resolver()
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.graph_resolver.create_graph_npm_resolver()
|
||||
}
|
||||
|
||||
pub fn maybe_managed_npm_resolver(
|
||||
&self,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<&ManagedCliNpmResolver> {
|
||||
self.npm_resolver.as_ref().and_then(|r| r.as_managed())
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.npm_resolver.as_ref().and_then(|r| r.as_managed())
|
||||
}
|
||||
|
||||
pub fn graph_imports_by_referrer(
|
||||
&self,
|
||||
file_referrer: &ModuleSpecifier,
|
||||
) -> IndexMap<&ModuleSpecifier, Vec<&ModuleSpecifier>> {
|
||||
self
|
||||
let resolver = self.get_scope_resolver(Some(file_referrer));
|
||||
resolver
|
||||
.graph_imports
|
||||
.iter()
|
||||
.map(|(s, i)| {
|
||||
|
@ -224,35 +292,42 @@ impl LspResolver {
|
|||
pub fn jsr_to_resource_url(
|
||||
&self,
|
||||
req_ref: &JsrPackageReqReference,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
self.jsr_resolver.as_ref()?.jsr_to_resource_url(req_ref)
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.jsr_resolver.as_ref()?.jsr_to_resource_url(req_ref)
|
||||
}
|
||||
|
||||
pub fn jsr_lookup_export_for_path(
|
||||
&self,
|
||||
nv: &PackageNv,
|
||||
path: &str,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<String> {
|
||||
self.jsr_resolver.as_ref()?.lookup_export_for_path(nv, path)
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver
|
||||
.jsr_resolver
|
||||
.as_ref()?
|
||||
.lookup_export_for_path(nv, path)
|
||||
}
|
||||
|
||||
pub fn jsr_lookup_req_for_nv(
|
||||
&self,
|
||||
nv: &PackageNv,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<PackageReq> {
|
||||
self.jsr_resolver.as_ref()?.lookup_req_for_nv(nv)
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.jsr_resolver.as_ref()?.lookup_req_for_nv(nv)
|
||||
}
|
||||
|
||||
pub fn npm_to_file_url(
|
||||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
referrer: &ModuleSpecifier,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
let node_resolver = self.node_resolver.as_ref()?;
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let node_resolver = resolver.node_resolver.as_ref()?;
|
||||
Some(NodeResolution::into_specifier_and_media_type(
|
||||
node_resolver
|
||||
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
|
||||
|
@ -261,17 +336,37 @@ impl LspResolver {
|
|||
}
|
||||
|
||||
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
if let Some(npm_resolver) = &self.npm_resolver {
|
||||
return npm_resolver.in_npm_package(specifier);
|
||||
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
|
||||
// consider any /node_modules/ directory as being in the node_modules
|
||||
// folder for the LSP because it's pretty complicated to deal with multiple scopes
|
||||
specifier.scheme() == "file"
|
||||
&& specifier
|
||||
.path()
|
||||
.to_ascii_lowercase()
|
||||
.contains("/node_modules/")
|
||||
}
|
||||
false
|
||||
|
||||
let global_npm_resolver = self
|
||||
.get_scope_resolver(Some(specifier))
|
||||
.npm_resolver
|
||||
.as_ref()
|
||||
.and_then(|npm_resolver| npm_resolver.as_managed())
|
||||
.filter(|r| r.root_node_modules_path().is_none());
|
||||
if let Some(npm_resolver) = &global_npm_resolver {
|
||||
if npm_resolver.in_npm_package(specifier) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
has_node_modules_dir(specifier)
|
||||
}
|
||||
|
||||
pub fn node_media_type(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<MediaType> {
|
||||
let node_resolver = self.node_resolver.as_ref()?;
|
||||
let resolver = self.get_scope_resolver(Some(specifier));
|
||||
let node_resolver = resolver.node_resolver.as_ref()?;
|
||||
let resolution = node_resolver
|
||||
.url_to_node_resolution(specifier.clone())
|
||||
.ok()?;
|
||||
|
@ -281,22 +376,21 @@ impl LspResolver {
|
|||
pub fn get_closest_package_json(
|
||||
&self,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<Option<Rc<PackageJson>>, AnyError> {
|
||||
let Some(node_resolver) = self.node_resolver.as_ref() else {
|
||||
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
|
||||
let resolver = self.get_scope_resolver(Some(referrer));
|
||||
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
|
||||
return Ok(None);
|
||||
};
|
||||
node_resolver.get_closest_package_json(
|
||||
referrer,
|
||||
&mut deno_runtime::deno_node::AllowAllNodePermissions,
|
||||
)
|
||||
node_resolver.get_closest_package_json(referrer)
|
||||
}
|
||||
|
||||
pub fn resolve_redirects(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let Some(redirect_resolver) = self.redirect_resolver.as_ref() else {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let Some(redirect_resolver) = resolver.redirect_resolver.as_ref() else {
|
||||
return Some(specifier.clone());
|
||||
};
|
||||
redirect_resolver.resolve(specifier)
|
||||
|
@ -305,9 +399,10 @@ impl LspResolver {
|
|||
pub fn redirect_chain_headers(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_file_referrer: Option<&ModuleSpecifier>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Vec<(ModuleSpecifier, Arc<HashMap<String, String>>)> {
|
||||
let Some(redirect_resolver) = self.redirect_resolver.as_ref() else {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let Some(redirect_resolver) = resolver.redirect_resolver.as_ref() else {
|
||||
return vec![];
|
||||
};
|
||||
redirect_resolver
|
||||
|
@ -316,26 +411,44 @@ impl LspResolver {
|
|||
.map(|(s, e)| (s, e.headers.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_scope_resolver(
|
||||
&self,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> &LspScopeResolver {
|
||||
let Some(file_referrer) = file_referrer else {
|
||||
return self.unscoped.as_ref();
|
||||
};
|
||||
self
|
||||
.by_scope
|
||||
.iter()
|
||||
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))
|
||||
.map(|(_, r)| r.as_ref())
|
||||
.unwrap_or(self.unscoped.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_npm_resolver(
|
||||
config_data: &ConfigData,
|
||||
config_data: Option<&ConfigData>,
|
||||
cache: &LspCache,
|
||||
http_client_provider: &Arc<HttpClientProvider>,
|
||||
) -> Option<Arc<dyn CliNpmResolver>> {
|
||||
let node_modules_dir = config_data
|
||||
.node_modules_dir
|
||||
.clone()
|
||||
.or_else(|| specifier_to_file_path(&config_data.scope).ok())?;
|
||||
let options = if config_data.byonm {
|
||||
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(*DENO_FUTURE);
|
||||
let options = if enable_byonm {
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: Arc::new(deno_fs::RealFs),
|
||||
root_node_modules_dir: node_modules_dir.clone(),
|
||||
root_node_modules_dir: config_data.and_then(|config_data| {
|
||||
config_data.node_modules_dir.clone().or_else(|| {
|
||||
specifier_to_file_path(&config_data.scope)
|
||||
.ok()
|
||||
.map(|p| p.join("node_modules/"))
|
||||
})
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
snapshot: match config_data.lockfile.as_ref() {
|
||||
snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) {
|
||||
Some(lockfile) => {
|
||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
|
||||
lockfile.clone(),
|
||||
|
@ -354,17 +467,17 @@ async fn create_npm_resolver(
|
|||
// the user is typing.
|
||||
cache_setting: CacheSetting::Only,
|
||||
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
|
||||
maybe_node_modules_path: config_data.node_modules_dir.clone(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
|
||||
config_data.package_json.as_ref().map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
}),
|
||||
)),
|
||||
maybe_node_modules_path: config_data
|
||||
.and_then(|d| d.node_modules_dir.clone()),
|
||||
// only used for top level install, so we can ignore this
|
||||
package_json_deps_provider: Arc::new(
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
npmrc: config_data
|
||||
.npmrc
|
||||
.clone()
|
||||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc),
|
||||
npm_system_info: NpmSystemInfo::default(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
})
|
||||
};
|
||||
Some(create_cli_npm_resolver_for_lsp(options).await)
|
||||
|
@ -373,14 +486,21 @@ async fn create_npm_resolver(
|
|||
fn create_node_resolver(
|
||||
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
|
||||
) -> Option<Arc<CliNodeResolver>> {
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
// it's not ideal to share this across all scopes and to
|
||||
// never clear it, but it's fine for the time being
|
||||
static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> =
|
||||
Lazy::new(Default::default);
|
||||
|
||||
let npm_resolver = npm_resolver?;
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let node_resolver_inner = Arc::new(NodeResolver::new(
|
||||
fs.clone(),
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
));
|
||||
Some(Arc::new(CliNodeResolver::new(
|
||||
None,
|
||||
CJS_RESOLUTIONS.clone(),
|
||||
fs,
|
||||
node_resolver_inner,
|
||||
npm_resolver.clone(),
|
||||
|
@ -392,29 +512,29 @@ fn create_graph_resolver(
|
|||
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
) -> Arc<CliGraphResolver> {
|
||||
let config_file = config_data.and_then(|d| d.config_file.as_deref());
|
||||
let unstable_sloppy_imports =
|
||||
config_file.is_some_and(|cf| cf.has_unstable("sloppy-imports"));
|
||||
let workspace = config_data.map(|d| &d.member_dir.workspace);
|
||||
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
|
||||
node_resolver: node_resolver.cloned(),
|
||||
npm_resolver: npm_resolver.cloned(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
|
||||
config_data
|
||||
.and_then(|d| d.package_json.as_ref())
|
||||
.map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
}),
|
||||
)),
|
||||
maybe_jsx_import_source_config: config_file
|
||||
.and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()),
|
||||
maybe_import_map: config_data.and_then(|d| d.import_map.clone()),
|
||||
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
bare_node_builtins_enabled: config_file
|
||||
.map(|cf| cf.has_unstable("bare-node-builtins"))
|
||||
.unwrap_or(false),
|
||||
sloppy_imports_resolver: unstable_sloppy_imports.then(|| {
|
||||
SloppyImportsResolver::new_without_stat_cache(Arc::new(deno_fs::RealFs))
|
||||
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else(
|
||||
|| {
|
||||
Arc::new(WorkspaceResolver::new_raw(
|
||||
// this is fine because this is only used before initialization
|
||||
Arc::new(ModuleSpecifier::parse("file:///").unwrap()),
|
||||
None,
|
||||
Vec::new(),
|
||||
PackageJsonDepResolution::Disabled,
|
||||
))
|
||||
},
|
||||
),
|
||||
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
|
||||
workspace.to_maybe_jsx_import_source_config().ok().flatten()
|
||||
}),
|
||||
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
bare_node_builtins_enabled: workspace
|
||||
.is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")),
|
||||
sloppy_imports_resolver: config_data
|
||||
.and_then(|d| d.sloppy_imports_resolver.clone()),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -443,13 +563,36 @@ impl std::fmt::Debug for RedirectResolver {
|
|||
}
|
||||
|
||||
impl RedirectResolver {
|
||||
fn new(cache: Arc<dyn HttpCache>) -> Self {
|
||||
fn new(
|
||||
cache: Arc<dyn HttpCache>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
) -> Self {
|
||||
let entries = DashMap::new();
|
||||
if let Some(lockfile) = lockfile {
|
||||
for (source, destination) in &lockfile.lock().content.redirects {
|
||||
let Ok(source) = ModuleSpecifier::parse(source) else {
|
||||
continue;
|
||||
};
|
||||
let Ok(destination) = ModuleSpecifier::parse(destination) else {
|
||||
continue;
|
||||
};
|
||||
entries.insert(
|
||||
source,
|
||||
Some(Arc::new(RedirectEntry {
|
||||
headers: Default::default(),
|
||||
target: destination.clone(),
|
||||
destination: Some(destination.clone()),
|
||||
})),
|
||||
);
|
||||
entries.insert(destination, None);
|
||||
}
|
||||
}
|
||||
Self {
|
||||
get_headers: Box::new(move |specifier| {
|
||||
let cache_key = cache.cache_item_key(specifier).ok()?;
|
||||
cache.read_headers(&cache_key).ok().flatten()
|
||||
}),
|
||||
entries: Default::default(),
|
||||
entries,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -529,6 +672,10 @@ impl RedirectResolver {
|
|||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn did_cache(&self) {
|
||||
self.entries.retain(|_, entry| entry.is_some());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -212,13 +212,15 @@ impl TestRun {
|
|||
) -> Result<(), AnyError> {
|
||||
let args = self.get_args();
|
||||
lsp_log!("Executing test run with arguments: {}", args.join(" "));
|
||||
let flags = flags_from_vec(args.into_iter().map(From::from).collect())?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let flags =
|
||||
Arc::new(flags_from_vec(args.into_iter().map(From::from).collect())?);
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&factory.cli_options().permissions_options()?)?;
|
||||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
let main_graph_container = factory.main_module_graph_container().await?;
|
||||
test::check_specifiers(
|
||||
factory.file_fetcher()?,
|
||||
|
@ -231,19 +233,18 @@ impl TestRun {
|
|||
)
|
||||
.await?;
|
||||
|
||||
let (concurrent_jobs, fail_fast) = if let DenoSubcommand::Test(test_flags) =
|
||||
factory.cli_options().sub_command()
|
||||
{
|
||||
(
|
||||
test_flags
|
||||
.concurrent_jobs
|
||||
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
|
||||
.into(),
|
||||
test_flags.fail_fast,
|
||||
)
|
||||
} else {
|
||||
unreachable!("Should always be Test subcommand.");
|
||||
};
|
||||
let (concurrent_jobs, fail_fast) =
|
||||
if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() {
|
||||
(
|
||||
test_flags
|
||||
.concurrent_jobs
|
||||
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
|
||||
.into(),
|
||||
test_flags.fail_fast,
|
||||
)
|
||||
} else {
|
||||
unreachable!("Should always be Test subcommand.");
|
||||
};
|
||||
|
||||
// TODO(mmastrac): Temporarily limit concurrency in windows testing to avoid named pipe issue:
|
||||
// *** Unexpected server pipe failure '"\\\\.\\pipe\\deno_pipe_e30f45c9df61b1e4.1198.222\\0"': 3
|
||||
|
|
965
cli/lsp/tsc.rs
965
cli/lsp/tsc.rs
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::cache::LocalLspHttpCache;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -12,6 +10,8 @@ use once_cell::sync::Lazy;
|
|||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::cache::LspCache;
|
||||
|
||||
/// Used in situations where a default URL needs to be used where otherwise a
|
||||
/// panic is undesired.
|
||||
pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> =
|
||||
|
@ -156,13 +156,13 @@ pub enum LspUrlKind {
|
|||
/// to allow the Deno language server to manage these as virtual documents.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct LspUrlMap {
|
||||
local_http_cache: Option<Arc<LocalLspHttpCache>>,
|
||||
cache: LspCache,
|
||||
inner: Arc<Mutex<LspUrlMapInner>>,
|
||||
}
|
||||
|
||||
impl LspUrlMap {
|
||||
pub fn set_cache(&mut self, http_cache: Option<Arc<LocalLspHttpCache>>) {
|
||||
self.local_http_cache = http_cache;
|
||||
pub fn set_cache(&mut self, cache: &LspCache) {
|
||||
self.cache = cache.clone();
|
||||
}
|
||||
|
||||
/// Normalize a specifier that is used internally within Deno (or tsc) to a
|
||||
|
@ -170,13 +170,12 @@ impl LspUrlMap {
|
|||
pub fn normalize_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<LspClientUrl, AnyError> {
|
||||
if let Some(cache) = &self.local_http_cache {
|
||||
if matches!(specifier.scheme(), "http" | "https") {
|
||||
if let Some(file_url) = cache.get_file_url(specifier) {
|
||||
return Ok(LspClientUrl(file_url));
|
||||
}
|
||||
}
|
||||
if let Some(file_url) =
|
||||
self.cache.vendored_specifier(specifier, file_referrer)
|
||||
{
|
||||
return Ok(LspClientUrl(file_url));
|
||||
}
|
||||
let mut inner = self.inner.lock();
|
||||
if let Some(url) = inner.get_url(specifier).cloned() {
|
||||
|
@ -220,14 +219,8 @@ impl LspUrlMap {
|
|||
/// so we need to force it to in the mapping and nee to explicitly state whether
|
||||
/// this is a file or directory url.
|
||||
pub fn normalize_url(&self, url: &Url, kind: LspUrlKind) -> ModuleSpecifier {
|
||||
if let Some(cache) = &self.local_http_cache {
|
||||
if url.scheme() == "file" {
|
||||
if let Ok(path) = url.to_file_path() {
|
||||
if let Some(remote_url) = cache.get_remote_url(&path) {
|
||||
return remote_url;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(remote_url) = self.cache.unvendored_specifier(url) {
|
||||
return remote_url;
|
||||
}
|
||||
let mut inner = self.inner.lock();
|
||||
if let Some(specifier) = inner.get_specifier(url).cloned() {
|
||||
|
@ -296,7 +289,7 @@ mod tests {
|
|||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture)
|
||||
.normalize_specifier(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url =
|
||||
Url::parse("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
|
||||
|
@ -318,7 +311,7 @@ mod tests {
|
|||
assert_eq!(&actual_specifier, &expected_specifier);
|
||||
|
||||
let actual_url = map
|
||||
.normalize_specifier(&actual_specifier)
|
||||
.normalize_specifier(&actual_specifier, None)
|
||||
.unwrap()
|
||||
.as_url()
|
||||
.clone();
|
||||
|
@ -331,7 +324,7 @@ mod tests {
|
|||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture)
|
||||
.normalize_specifier(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url = Url::parse("deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
|
@ -346,7 +339,7 @@ mod tests {
|
|||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture)
|
||||
.normalize_specifier(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
|
@ -361,7 +354,7 @@ mod tests {
|
|||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture)
|
||||
.normalize_specifier(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url =
|
||||
Url::parse("deno:/http/localhost%3A8000/mod.ts").unwrap();
|
||||
|
|
26
cli/main.rs
26
cli/main.rs
|
@ -21,6 +21,7 @@ mod npm;
|
|||
mod ops;
|
||||
mod resolver;
|
||||
mod standalone;
|
||||
mod task_runner;
|
||||
mod tools;
|
||||
mod tsc;
|
||||
mod util;
|
||||
|
@ -52,6 +53,7 @@ use factory::CliFactory;
|
|||
use std::env;
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
|
||||
trait SubcommandOutput {
|
||||
|
@ -89,7 +91,7 @@ fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
|
|||
)
|
||||
}
|
||||
|
||||
async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
||||
async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
||||
let handle = match flags.subcommand.clone() {
|
||||
DenoSubcommand::Add(add_flags) => spawn_subcommand(async {
|
||||
tools::registry::add(flags, add_flags).await
|
||||
|
@ -111,7 +113,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
tools::run::eval_command(flags, eval_flags).await
|
||||
}),
|
||||
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let emitter = factory.emitter()?;
|
||||
let main_graph_container =
|
||||
factory.main_module_graph_container().await?;
|
||||
|
@ -121,7 +123,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
emitter.cache_module_emits(&main_graph_container.graph()).await
|
||||
}),
|
||||
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let main_graph_container =
|
||||
factory.main_module_graph_container().await?;
|
||||
main_graph_container
|
||||
|
@ -230,7 +232,6 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
|
||||
tools::vendor::vendor(flags, vendor_flags).await
|
||||
}),
|
||||
// TODO:
|
||||
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
|
||||
tools::registry::publish(flags, publish_flags).await
|
||||
}),
|
||||
|
@ -329,7 +330,7 @@ pub fn main() {
|
|||
// initialize the V8 platform on a parent thread of all threads that will spawn
|
||||
// V8 isolates.
|
||||
let flags = resolve_flags_and_init(args)?;
|
||||
run_subcommand(flags).await
|
||||
run_subcommand(Arc::new(flags)).await
|
||||
};
|
||||
|
||||
match create_and_run_current_thread_with_maybe_metrics(future) {
|
||||
|
@ -347,7 +348,8 @@ fn resolve_flags_and_init(
|
|||
if err.kind() == clap::error::ErrorKind::DisplayHelp
|
||||
|| err.kind() == clap::error::ErrorKind::DisplayVersion =>
|
||||
{
|
||||
err.print().unwrap();
|
||||
// Ignore results to avoid BrokenPipe errors.
|
||||
let _ = err.print();
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(err) => exit_for_error(AnyError::from(err)),
|
||||
|
@ -384,10 +386,14 @@ fn resolve_flags_and_init(
|
|||
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
|
||||
vec!["--no-harmony-import-assertions".to_string()]
|
||||
} else {
|
||||
// If we're still in v1.X version we want to support import assertions.
|
||||
// V8 12.6 unshipped the support by default, so force it by passing a
|
||||
// flag.
|
||||
vec!["--harmony-import-assertions".to_string()]
|
||||
vec![
|
||||
// If we're still in v1.X version we want to support import assertions.
|
||||
// V8 12.6 unshipped the support by default, so force it by passing a
|
||||
// flag.
|
||||
"--harmony-import-assertions".to_string(),
|
||||
// Verify with DENO_FUTURE for now.
|
||||
"--no-maglev".to_string(),
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -18,6 +18,7 @@ mod js;
|
|||
mod node;
|
||||
mod npm;
|
||||
mod resolver;
|
||||
mod task_runner;
|
||||
mod util;
|
||||
mod version;
|
||||
mod worker;
|
||||
|
@ -31,6 +32,7 @@ pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
|||
use deno_terminal::colors;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::env::current_exe;
|
||||
|
||||
|
@ -70,6 +72,14 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
|
|||
}
|
||||
}
|
||||
|
||||
fn load_env_vars(env_vars: &HashMap<String, String>) {
|
||||
env_vars.iter().for_each(|env_var| {
|
||||
if env::var(env_var.0).is_err() {
|
||||
std::env::set_var(env_var.0, env_var.1);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args: Vec<_> = env::args_os().collect();
|
||||
let current_exe_path = current_exe().unwrap();
|
||||
|
@ -79,6 +89,8 @@ fn main() {
|
|||
match standalone {
|
||||
Ok(Some(future)) => {
|
||||
let (metadata, eszip) = future.await?;
|
||||
util::logger::init(metadata.log_level);
|
||||
load_env_vars(&metadata.env_vars_from_env_file);
|
||||
let exit_code = standalone::run(eszip, metadata).await?;
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use std::str;
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::write_lockfile_if_has_changes;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::TsTypeLib;
|
||||
|
@ -45,7 +45,6 @@ use deno_core::error::generic_error;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
use deno_core::futures::Future;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::ModuleCodeString;
|
||||
use deno_core::ModuleLoader;
|
||||
|
@ -56,7 +55,6 @@ use deno_core::ModuleType;
|
|||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceCodeCacheInfo;
|
||||
use deno_core::SourceMapGetter;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::GraphKind;
|
||||
|
@ -65,22 +63,26 @@ use deno_graph::JsonModule;
|
|||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Resolution;
|
||||
use deno_lockfile::Lockfile;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
|
||||
pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let cli_options = factory.cli_options()?;
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
// TODO(nathanwhit): we call `cache_packages` if the lockfile is modified,
|
||||
// so by calling it here it's possible we end up calling it twice
|
||||
npm_resolver.cache_packages().await?;
|
||||
if !npm_resolver.ensure_top_level_package_json_install().await? {
|
||||
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||
lockfile.error_if_changed()?;
|
||||
}
|
||||
|
||||
npm_resolver.cache_packages().await?;
|
||||
}
|
||||
}
|
||||
// cache as many entries in the import map as we can
|
||||
if let Some(import_map) = factory.maybe_import_map().await? {
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
let roots = import_map
|
||||
.imports()
|
||||
.entries()
|
||||
|
@ -105,7 +107,7 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
|
|||
graph,
|
||||
&roots,
|
||||
false,
|
||||
factory.cli_options().ts_type_lib_window(),
|
||||
factory.cli_options()?.ts_type_lib_window(),
|
||||
deno_runtime::deno_permissions::PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await?;
|
||||
|
@ -116,7 +118,7 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
|
|||
|
||||
pub struct ModuleLoadPreparer {
|
||||
options: Arc<CliOptions>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
progress_bar: ProgressBar,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
|
@ -126,7 +128,7 @@ impl ModuleLoadPreparer {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
options: Arc<CliOptions>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
progress_bar: ProgressBar,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
|
@ -177,7 +179,7 @@ impl ModuleLoadPreparer {
|
|||
|
||||
// write the lockfile if there is one
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
write_lockfile_if_has_changes(&mut lockfile.lock())?;
|
||||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
drop(_pb_clear_guard);
|
||||
|
@ -291,8 +293,7 @@ impl CliModuleLoaderFactory {
|
|||
shared: self.shared.clone(),
|
||||
})));
|
||||
ModuleLoaderAndSourceMapGetter {
|
||||
module_loader: loader.clone(),
|
||||
source_map_getter: Some(loader),
|
||||
module_loader: loader,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -445,15 +446,14 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
if let Some(result) = self.shared.node_resolver.resolve_if_in_npm_package(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
) {
|
||||
return match result? {
|
||||
Some(res) => Ok(res.into_url()),
|
||||
None => Err(generic_error("not found")),
|
||||
};
|
||||
if self.shared.node_resolver.in_npm_package(referrer) {
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(specifier, referrer, NodeResolutionMode::Execution)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
|
@ -509,22 +509,19 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
.as_managed()
|
||||
.unwrap() // byonm won't create a Module::Npm
|
||||
.resolve_pkg_folder_from_deno_module(module.nv_reference.nv())?;
|
||||
let maybe_resolution = self
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
&package_folder,
|
||||
module.nv_reference.sub_path(),
|
||||
referrer,
|
||||
Some(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
})?;
|
||||
match maybe_resolution {
|
||||
Some(res) => res.into_url(),
|
||||
None => return Err(generic_error("not found")),
|
||||
}
|
||||
})?
|
||||
.into_url()
|
||||
}
|
||||
Some(Module::Node(module)) => module.specifier.clone(),
|
||||
Some(Module::Js(module)) => module.specifier.clone(),
|
||||
|
@ -615,7 +612,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<CodeOrDeferredEmit<'graph>, AnyError> {
|
||||
if specifier.scheme() == "node" {
|
||||
unreachable!(); // Node built-in modules should be handled internally.
|
||||
// Node built-in modules should be handled internally.
|
||||
unreachable!("Deno bug. {} was misconfigured internally.", specifier);
|
||||
}
|
||||
|
||||
match graph.get(specifier) {
|
||||
|
@ -829,11 +827,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
}
|
||||
std::future::ready(()).boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
|
||||
for CliModuleLoader<TGraphContainer>
|
||||
{
|
||||
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
|
||||
let specifier = resolve_url(file_name).ok()?;
|
||||
match specifier.scheme() {
|
||||
|
@ -846,7 +840,7 @@ impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
|
|||
source_map_from_code(source.code.as_bytes())
|
||||
}
|
||||
|
||||
fn get_source_line(
|
||||
fn get_source_mapped_source_line(
|
||||
&self,
|
||||
file_name: &str,
|
||||
line_number: usize,
|
||||
|
|
|
@ -1180,27 +1180,57 @@ fn napi_create_string_utf16(
|
|||
#[napi_sym]
|
||||
fn node_api_create_external_string_latin1(
|
||||
env_ptr: *mut Env,
|
||||
_string: *const c_char,
|
||||
_length: usize,
|
||||
_nogc_finalize_callback: napi_finalize,
|
||||
_finalize_hint: *mut c_void,
|
||||
_result: *mut napi_value,
|
||||
_copied: *mut bool,
|
||||
string: *const c_char,
|
||||
length: usize,
|
||||
nogc_finalize_callback: Option<napi_finalize>,
|
||||
finalize_hint: *mut c_void,
|
||||
result: *mut napi_value,
|
||||
copied: *mut bool,
|
||||
) -> napi_status {
|
||||
return napi_set_last_error(env_ptr, napi_generic_failure);
|
||||
let status =
|
||||
unsafe { napi_create_string_latin1(env_ptr, string, length, result) };
|
||||
|
||||
if status == napi_ok {
|
||||
unsafe {
|
||||
*copied = true;
|
||||
}
|
||||
|
||||
if let Some(finalize) = nogc_finalize_callback {
|
||||
unsafe {
|
||||
finalize(env_ptr as napi_env, string as *mut c_void, finalize_hint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
status
|
||||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn node_api_create_external_string_utf16(
|
||||
env_ptr: *mut Env,
|
||||
_string: *const u16,
|
||||
_length: usize,
|
||||
_nogc_finalize_callback: napi_finalize,
|
||||
_finalize_hint: *mut c_void,
|
||||
_result: *mut napi_value,
|
||||
_copied: *mut bool,
|
||||
string: *const u16,
|
||||
length: usize,
|
||||
nogc_finalize_callback: Option<napi_finalize>,
|
||||
finalize_hint: *mut c_void,
|
||||
result: *mut napi_value,
|
||||
copied: *mut bool,
|
||||
) -> napi_status {
|
||||
return napi_set_last_error(env_ptr, napi_generic_failure);
|
||||
let status =
|
||||
unsafe { napi_create_string_utf16(env_ptr, string, length, result) };
|
||||
|
||||
if status == napi_ok {
|
||||
unsafe {
|
||||
*copied = true;
|
||||
}
|
||||
|
||||
if let Some(finalize) = nogc_finalize_callback {
|
||||
unsafe {
|
||||
finalize(env_ptr as napi_env, string as *mut c_void, finalize_hint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
status
|
||||
}
|
||||
|
||||
#[napi_sym]
|
||||
|
@ -2793,8 +2823,8 @@ fn napi_instanceof(
|
|||
unsafe {
|
||||
napi_throw_type_error(
|
||||
env,
|
||||
"ERR_NAPI_CONS_FUNCTION\0".as_ptr() as _,
|
||||
"Constructor must be a function\0".as_ptr() as _,
|
||||
c"ERR_NAPI_CONS_FUNCTION".as_ptr(),
|
||||
c"Constructor must be a function".as_ptr(),
|
||||
);
|
||||
}
|
||||
return napi_function_expected;
|
||||
|
@ -3147,8 +3177,8 @@ fn napi_create_dataview<'s>(
|
|||
unsafe {
|
||||
return napi_throw_range_error(
|
||||
env,
|
||||
"ERR_NAPI_INVALID_DATAVIEW_ARGS\0".as_ptr() as _,
|
||||
"byte_offset + byte_length should be less than or equal to the size in bytes of the array passed in\0".as_ptr() as _,
|
||||
c"ERR_NAPI_INVALID_DATAVIEW_ARGS".as_ptr(),
|
||||
c"byte_offset + byte_length should be less than or equal to the size in bytes of the array passed in".as_ptr(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -426,22 +426,14 @@ fn napi_get_buffer_info(
|
|||
let env = check_env!(env);
|
||||
check_arg!(env, value);
|
||||
|
||||
// NB: Any TypedArray instance seems to be accepted by this function
|
||||
// in Node.js.
|
||||
let Some(ta) =
|
||||
value.and_then(|v| v8::Local::<v8::TypedArray>::try_from(v).ok())
|
||||
else {
|
||||
return napi_set_last_error(env, napi_invalid_arg);
|
||||
};
|
||||
|
||||
let buffer_constructor =
|
||||
v8::Local::new(&mut env.scope(), &env.buffer_constructor);
|
||||
|
||||
if !ta
|
||||
.instance_of(&mut env.scope(), buffer_constructor.into())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return napi_set_last_error(env, napi_invalid_arg);
|
||||
}
|
||||
|
||||
if !data.is_null() {
|
||||
unsafe {
|
||||
*data = ta.data();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.86.0"
|
||||
version = "0.93.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
12
cli/node.rs
12
cli/node.rs
|
@ -6,10 +6,11 @@ use deno_ast::MediaType;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
|
||||
use deno_runtime::deno_node::analyze::CjsAnalysisExports;
|
||||
use deno_runtime::deno_node::analyze::CjsCodeAnalyzer;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
|
||||
use node_resolver::analyze::CjsAnalysisExports;
|
||||
use node_resolver::analyze::CjsCodeAnalyzer;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
|
@ -17,7 +18,8 @@ use crate::cache::CacheDBHash;
|
|||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
pub type CliNodeCodeTranslator = NodeCodeTranslator<CliCjsCodeAnalyzer>;
|
||||
pub type CliNodeCodeTranslator =
|
||||
NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>;
|
||||
|
||||
/// Resolves a specifier that is pointing into a node_modules folder.
|
||||
///
|
||||
|
|
281
cli/npm/byonm.rs
281
cli/npm/byonm.rs
|
@ -3,20 +3,27 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::DenoPkgJsonFsAdapter;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NpmResolver;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageJsonLoadError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::load_pkg_json;
|
||||
use node_resolver::NpmResolver;
|
||||
|
||||
use crate::args::package_json::get_local_package_json_version_reqs;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -27,7 +34,8 @@ use super::InnerCliNpmResolverRef;
|
|||
|
||||
pub struct CliNpmResolverByonmCreateOptions {
|
||||
pub fs: Arc<dyn FileSystem>,
|
||||
pub root_node_modules_dir: PathBuf,
|
||||
// todo(dsherret): investigate removing this
|
||||
pub root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub fn create_byonm_npm_resolver(
|
||||
|
@ -42,7 +50,16 @@ pub fn create_byonm_npm_resolver(
|
|||
#[derive(Debug)]
|
||||
pub struct ByonmCliNpmResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
root_node_modules_dir: PathBuf,
|
||||
root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl ByonmCliNpmResolver {
|
||||
fn load_pkg_json(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
|
||||
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
|
||||
}
|
||||
}
|
||||
|
||||
impl ByonmCliNpmResolver {
|
||||
|
@ -51,14 +68,12 @@ impl ByonmCliNpmResolver {
|
|||
&self,
|
||||
dep_name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Option<Rc<PackageJson>> {
|
||||
) -> Option<Arc<PackageJson>> {
|
||||
let referrer_path = referrer.to_file_path().ok()?;
|
||||
let mut current_folder = referrer_path.parent()?;
|
||||
loop {
|
||||
let pkg_json_path = current_folder.join("package.json");
|
||||
if let Ok(pkg_json) =
|
||||
PackageJson::load_skip_read_permission(self.fs.as_ref(), pkg_json_path)
|
||||
{
|
||||
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
|
||||
if let Some(deps) = &pkg_json.dependencies {
|
||||
if deps.contains_key(dep_name) {
|
||||
return Some(pkg_json);
|
||||
|
@ -78,59 +93,126 @@ impl ByonmCliNpmResolver {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_pkg_json_and_alias_for_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<(Arc<PackageJson>, String), AnyError> {
|
||||
fn resolve_alias_from_pkg_json(
|
||||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
) -> Option<String> {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
for (key, value) in deps {
|
||||
if let Ok(value) = value {
|
||||
match value {
|
||||
PackageJsonDepValue::Req(dep_req) => {
|
||||
if dep_req.name == req.name
|
||||
&& dep_req.version_req.intersects(&req.version_req)
|
||||
{
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_workspace) => {
|
||||
if key == req.name && req.version_req.tag() == Some("workspace") {
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// attempt to resolve the npm specifier from the referrer's package.json,
|
||||
if let Ok(file_path) = specifier_to_file_path(referrer) {
|
||||
let mut current_path = file_path.as_path();
|
||||
while let Some(dir_path) = current_path.parent() {
|
||||
let package_json_path = dir_path.join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
|
||||
if let Some(alias) =
|
||||
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok((pkg_json, alias));
|
||||
}
|
||||
}
|
||||
current_path = dir_path;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, fall fallback to the project's package.json
|
||||
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
|
||||
let root_pkg_json_path =
|
||||
root_node_modules_dir.parent().unwrap().join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
|
||||
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok((pkg_json, alias));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in a package.json file. ",
|
||||
"You must specify this as a package.json dependency when the ",
|
||||
"node_modules folder is not managed by Deno.",
|
||||
),
|
||||
req,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl NpmResolver for ByonmCliNpmResolver {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: Some(
|
||||
self.root_node_modules_dir.to_string_lossy().to_string(),
|
||||
),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
fn inner(
|
||||
fs: &dyn FileSystem,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let referrer_file = specifier_to_file_path(referrer)?;
|
||||
let mut current_folder = referrer_file.parent().unwrap();
|
||||
loop {
|
||||
let node_modules_folder = if current_folder.ends_with("node_modules") {
|
||||
Cow::Borrowed(current_folder)
|
||||
} else {
|
||||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
let maybe_referrer_file = specifier_to_file_path(referrer).ok();
|
||||
let maybe_start_folder =
|
||||
maybe_referrer_file.as_ref().and_then(|f| f.parent());
|
||||
if let Some(start_folder) = maybe_start_folder {
|
||||
for current_folder in start_folder.ancestors() {
|
||||
let node_modules_folder = if current_folder.ends_with("node_modules")
|
||||
{
|
||||
Cow::Borrowed(current_folder)
|
||||
} else {
|
||||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
|
||||
if let Some(parent) = current_folder.parent() {
|
||||
current_folder = parent;
|
||||
} else {
|
||||
break;
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!(
|
||||
"could not find package '{}' from referrer '{}'.",
|
||||
name,
|
||||
referrer
|
||||
);
|
||||
Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
let path = inner(&*self.fs, name, referrer)?;
|
||||
Ok(self.fs.realpath_sync(&path)?)
|
||||
self.fs.realpath_sync(&path).map_err(|err| {
|
||||
PackageFolderResolveIoError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
source: err.into_io_error(),
|
||||
}
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
|
@ -140,7 +222,9 @@ impl NpmResolver for ByonmCliNpmResolver {
|
|||
.to_ascii_lowercase()
|
||||
.contains("/node_modules/")
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeRequireResolver for ByonmCliNpmResolver {
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
@ -156,11 +240,34 @@ impl NpmResolver for ByonmCliNpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl NpmProcessStateProvider for ByonmCliNpmResolver {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: self
|
||||
.root_node_modules_dir
|
||||
.as_ref()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl CliNpmResolver for ByonmCliNpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_process_state_provider(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmProcessStateProvider> {
|
||||
self
|
||||
}
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
||||
Arc::new(Self {
|
||||
fs: self.fs.clone(),
|
||||
|
@ -173,7 +280,7 @@ impl CliNpmResolver for ByonmCliNpmResolver {
|
|||
}
|
||||
|
||||
fn root_node_modules_path(&self) -> Option<&PathBuf> {
|
||||
Some(&self.root_node_modules_dir)
|
||||
self.root_node_modules_dir.as_ref()
|
||||
}
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
|
@ -181,68 +288,34 @@ impl CliNpmResolver for ByonmCliNpmResolver {
|
|||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
fn resolve_from_package_json(
|
||||
req: &PackageReq,
|
||||
fs: &dyn FileSystem,
|
||||
path: PathBuf,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let package_json = PackageJson::load_skip_read_permission(fs, path)?;
|
||||
let deps = get_local_package_json_version_reqs(&package_json);
|
||||
for (key, value) in deps {
|
||||
if let Ok(value) = value {
|
||||
if value.name == req.name
|
||||
&& value.version_req.intersects(&req.version_req)
|
||||
{
|
||||
let package_path = package_json
|
||||
.path
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("node_modules")
|
||||
.join(key);
|
||||
return Ok(canonicalize_path_maybe_not_exists_with_fs(
|
||||
&package_path,
|
||||
fs,
|
||||
)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in '{}'. ",
|
||||
"You must specify this as a package.json dependency when the ",
|
||||
"node_modules folder is not managed by Deno.",
|
||||
),
|
||||
req,
|
||||
package_json.path.display()
|
||||
);
|
||||
}
|
||||
|
||||
// attempt to resolve the npm specifier from the referrer's package.json,
|
||||
// but otherwise fallback to the project's package.json
|
||||
if let Ok(file_path) = specifier_to_file_path(referrer) {
|
||||
let mut current_path = file_path.as_path();
|
||||
while let Some(dir_path) = current_path.parent() {
|
||||
let package_json_path = dir_path.join("package.json");
|
||||
if self.fs.exists_sync(&package_json_path) {
|
||||
return resolve_from_package_json(
|
||||
req,
|
||||
self.fs.as_ref(),
|
||||
package_json_path,
|
||||
);
|
||||
}
|
||||
current_path = dir_path;
|
||||
// resolve the pkg json and alias
|
||||
let (pkg_json, alias) =
|
||||
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
|
||||
// now try node resolution
|
||||
for ancestor in pkg_json.path.parent().unwrap().ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, &alias);
|
||||
if self.fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(canonicalize_path_maybe_not_exists_with_fs(
|
||||
&sub_dir,
|
||||
self.fs.as_ref(),
|
||||
)?);
|
||||
}
|
||||
}
|
||||
|
||||
resolve_from_package_json(
|
||||
req,
|
||||
self.fs.as_ref(),
|
||||
self
|
||||
.root_node_modules_dir
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("package.json"),
|
||||
)
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `{}`?"
|
||||
),
|
||||
alias,
|
||||
if *crate::args::DENO_FUTURE {
|
||||
"deno install"
|
||||
} else {
|
||||
"npm install"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
fn check_state_hash(&self) -> Option<u64> {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_npm::npm_rc::RegistryConfig;
|
||||
use reqwest::header;
|
||||
use http::header;
|
||||
|
||||
// TODO(bartlomieju): support more auth methods besides token and basic auth
|
||||
pub fn maybe_auth_header_for_npm_registry(
|
||||
|
|
4
cli/npm/managed/cache/tarball.rs
vendored
4
cli/npm/managed/cache/tarball.rs
vendored
|
@ -11,12 +11,12 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::registry::NpmPackageVersionDistInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_semver::package::PackageNv;
|
||||
use reqwest::StatusCode;
|
||||
use reqwest::Url;
|
||||
use http::StatusCode;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::http_util::DownloadError;
|
||||
|
|
|
@ -9,7 +9,6 @@ use cache::TarballCache;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
|
@ -22,15 +21,20 @@ use deno_npm::NpmResolutionPackage;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NpmResolver;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::NpmResolver;
|
||||
use resolution::AddPkgReqsResult;
|
||||
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -53,13 +57,13 @@ mod resolution;
|
|||
mod resolvers;
|
||||
|
||||
pub enum CliNpmResolverManagedSnapshotOption {
|
||||
ResolveFromLockfile(Arc<Mutex<Lockfile>>),
|
||||
ResolveFromLockfile(Arc<CliLockfile>),
|
||||
Specified(Option<ValidSerializedNpmResolutionSnapshot>),
|
||||
}
|
||||
|
||||
pub struct CliNpmResolverManagedCreateOptions {
|
||||
pub snapshot: CliNpmResolverManagedSnapshotOption,
|
||||
pub maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
pub maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
|
||||
pub npm_global_cache_dir: PathBuf,
|
||||
|
@ -67,8 +71,9 @@ pub struct CliNpmResolverManagedCreateOptions {
|
|||
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
pub maybe_node_modules_path: Option<PathBuf>,
|
||||
pub npm_system_info: NpmSystemInfo,
|
||||
pub package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
pub npmrc: Arc<ResolvedNpmRc>,
|
||||
pub lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
pub async fn create_managed_npm_resolver_for_lsp(
|
||||
|
@ -97,6 +102,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
|||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
snapshot,
|
||||
options.lifecycle_scripts,
|
||||
)
|
||||
})
|
||||
.await
|
||||
|
@ -121,6 +127,7 @@ pub async fn create_managed_npm_resolver(
|
|||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
snapshot,
|
||||
options.lifecycle_scripts,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -128,15 +135,16 @@ pub async fn create_managed_npm_resolver(
|
|||
fn create_inner(
|
||||
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
node_modules_dir_path: Option<PathBuf>,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Arc<dyn CliNpmResolver> {
|
||||
let resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
|
@ -153,11 +161,13 @@ fn create_inner(
|
|||
let fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
npm_cache.clone(),
|
||||
&package_json_deps_provider,
|
||||
&text_only_progress_bar,
|
||||
resolution.clone(),
|
||||
tarball_cache.clone(),
|
||||
node_modules_dir_path,
|
||||
npm_system_info.clone(),
|
||||
lifecycle_scripts.clone(),
|
||||
);
|
||||
Arc::new(ManagedCliNpmResolver::new(
|
||||
fs,
|
||||
|
@ -170,6 +180,7 @@ fn create_inner(
|
|||
tarball_cache,
|
||||
text_only_progress_bar,
|
||||
npm_system_info,
|
||||
lifecycle_scripts,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -205,14 +216,11 @@ async fn resolve_snapshot(
|
|||
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, AnyError> {
|
||||
match snapshot {
|
||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
|
||||
if !lockfile.lock().overwrite {
|
||||
if !lockfile.overwrite() {
|
||||
let snapshot = snapshot_from_lockfile(lockfile.clone(), api)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed reading lockfile '{}'",
|
||||
lockfile.lock().filename.display()
|
||||
)
|
||||
format!("failed reading lockfile '{}'", lockfile.filename.display())
|
||||
})?;
|
||||
Ok(Some(snapshot))
|
||||
} else {
|
||||
|
@ -224,7 +232,7 @@ async fn resolve_snapshot(
|
|||
}
|
||||
|
||||
async fn snapshot_from_lockfile(
|
||||
lockfile: Arc<Mutex<Lockfile>>,
|
||||
lockfile: Arc<CliLockfile>,
|
||||
api: &dyn NpmRegistryApi,
|
||||
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
|
||||
let (incomplete_snapshot, skip_integrity_check) = {
|
||||
|
@ -250,15 +258,16 @@ async fn snapshot_from_lockfile(
|
|||
pub struct ManagedCliNpmResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
top_level_install_flag: AtomicFlag,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ManagedCliNpmResolver {
|
||||
|
@ -274,14 +283,15 @@ impl ManagedCliNpmResolver {
|
|||
pub fn new(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
|
@ -295,6 +305,7 @@ impl ManagedCliNpmResolver {
|
|||
tarball_cache,
|
||||
npm_system_info,
|
||||
top_level_install_flag: Default::default(),
|
||||
lifecycle_scripts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -365,12 +376,15 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
|
||||
/// Adds package requirements to the resolver and ensures everything is setup.
|
||||
/// This includes setting up the `node_modules` directory, if applicable.
|
||||
pub async fn add_package_reqs(
|
||||
&self,
|
||||
packages: &[PackageReq],
|
||||
) -> Result<(), AnyError> {
|
||||
let result = self.add_package_reqs_raw(packages).await;
|
||||
result.dependencies_result
|
||||
self
|
||||
.add_package_reqs_raw(packages)
|
||||
.await
|
||||
.dependencies_result
|
||||
}
|
||||
|
||||
pub async fn add_package_reqs_raw(
|
||||
|
@ -385,6 +399,12 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
|
||||
let mut result = self.resolution.add_package_reqs(packages).await;
|
||||
|
||||
if result.dependencies_result.is_ok() {
|
||||
if let Some(lockfile) = self.maybe_lockfile.as_ref() {
|
||||
result.dependencies_result = lockfile.error_if_changed();
|
||||
}
|
||||
}
|
||||
if result.dependencies_result.is_ok() {
|
||||
result.dependencies_result =
|
||||
self.cache_packages().await.map_err(AnyError::from);
|
||||
|
@ -446,29 +466,41 @@ impl ManagedCliNpmResolver {
|
|||
self.resolution.resolve_pkg_id_from_pkg_req(req)
|
||||
}
|
||||
|
||||
/// Ensures that the top level `package.json` dependencies are installed.
|
||||
/// This may set up the `node_modules` directory.
|
||||
///
|
||||
/// Returns `true` if any changes (such as caching packages) were made.
|
||||
/// If this returns `false`, `node_modules` has _not_ been set up.
|
||||
pub async fn ensure_top_level_package_json_install(
|
||||
&self,
|
||||
) -> Result<(), AnyError> {
|
||||
let Some(reqs) = self.package_json_deps_provider.reqs() else {
|
||||
return Ok(());
|
||||
};
|
||||
) -> Result<bool, AnyError> {
|
||||
if !self.top_level_install_flag.raise() {
|
||||
return Ok(()); // already did this
|
||||
return Ok(false); // already did this
|
||||
}
|
||||
let pkg_json_remote_pkgs = self.package_json_deps_provider.remote_pkgs();
|
||||
if pkg_json_remote_pkgs.is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// check if something needs resolving before bothering to load all
|
||||
// the package information (which is slow)
|
||||
if reqs
|
||||
.iter()
|
||||
.all(|req| self.resolution.resolve_pkg_id_from_pkg_req(req).is_ok())
|
||||
{
|
||||
if pkg_json_remote_pkgs.iter().all(|pkg| {
|
||||
self
|
||||
.resolution
|
||||
.resolve_pkg_id_from_pkg_req(&pkg.req)
|
||||
.is_ok()
|
||||
}) {
|
||||
log::debug!(
|
||||
"All package.json deps resolvable. Skipping top level install."
|
||||
);
|
||||
return Ok(()); // everything is already resolvable
|
||||
return Ok(false); // everything is already resolvable
|
||||
}
|
||||
|
||||
let reqs = reqs.into_iter().cloned().collect::<Vec<_>>();
|
||||
self.add_package_reqs(&reqs).await
|
||||
let pkg_reqs = pkg_json_remote_pkgs
|
||||
.iter()
|
||||
.map(|pkg| pkg.req.clone())
|
||||
.collect::<Vec<_>>();
|
||||
self.add_package_reqs(&pkg_reqs).await.map(|_| true)
|
||||
}
|
||||
|
||||
pub async fn cache_package_info(
|
||||
|
@ -488,34 +520,34 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl NpmResolver for ManagedCliNpmResolver {
|
||||
/// Gets the state of npm for the process.
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Snapshot(
|
||||
self
|
||||
.resolution
|
||||
.serialized_valid_snapshot()
|
||||
.into_serialized(),
|
||||
),
|
||||
local_node_modules_path: self
|
||||
.fs_resolver
|
||||
.node_modules_path()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
fn npm_process_state(
|
||||
snapshot: ValidSerializedNpmResolutionSnapshot,
|
||||
node_modules_path: Option<&Path>,
|
||||
) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
|
||||
local_node_modules_path: node_modules_path
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
impl NpmResolver for ManagedCliNpmResolver {
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
let path = self
|
||||
.fs_resolver
|
||||
.resolve_package_folder_from_package(name, referrer)?;
|
||||
let path =
|
||||
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())?;
|
||||
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())
|
||||
.map_err(|err| PackageFolderResolveIoError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
source: err,
|
||||
})?;
|
||||
log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
|
||||
Ok(path)
|
||||
}
|
||||
|
@ -525,7 +557,9 @@ impl NpmResolver for ManagedCliNpmResolver {
|
|||
debug_assert!(root_dir_url.as_str().ends_with('/'));
|
||||
specifier.as_ref().starts_with(root_dir_url.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeRequireResolver for ManagedCliNpmResolver {
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
@ -535,11 +569,30 @@ impl NpmResolver for ManagedCliNpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl NpmProcessStateProvider for ManagedCliNpmResolver {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
npm_process_state(
|
||||
self.resolution.serialized_valid_snapshot(),
|
||||
self.fs_resolver.node_modules_path().map(|p| p.as_path()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl CliNpmResolver for ManagedCliNpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_process_state_provider(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmProcessStateProvider> {
|
||||
self
|
||||
}
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
||||
// create a new snapshotted npm resolution and resolver
|
||||
let npm_resolution = Arc::new(NpmResolution::new(
|
||||
|
@ -553,11 +606,13 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
create_npm_fs_resolver(
|
||||
self.fs.clone(),
|
||||
self.npm_cache.clone(),
|
||||
&self.package_json_deps_provider,
|
||||
&self.text_only_progress_bar,
|
||||
npm_resolution.clone(),
|
||||
self.tarball_cache.clone(),
|
||||
self.root_node_modules_path().map(ToOwned::to_owned),
|
||||
self.npm_system_info.clone(),
|
||||
self.lifecycle_scripts.clone(),
|
||||
),
|
||||
self.maybe_lockfile.clone(),
|
||||
self.npm_api.clone(),
|
||||
|
@ -567,6 +622,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
self.tarball_cache.clone(),
|
||||
self.text_only_progress_bar.clone(),
|
||||
self.npm_system_info.clone(),
|
||||
self.lifecycle_scripts.clone(),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::collections::HashSet;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_lockfile::NpmPackageDependencyLockfileInfo;
|
||||
use deno_lockfile::NpmPackageLockfileInfo;
|
||||
use deno_npm::registry::NpmRegistryApi;
|
||||
|
@ -27,7 +26,7 @@ use deno_semver::package::PackageNv;
|
|||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReq;
|
||||
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::util::sync::SyncReadAsyncWriteLock;
|
||||
|
||||
use super::CliNpmRegistryApi;
|
||||
|
@ -50,7 +49,7 @@ pub struct AddPkgReqsResult {
|
|||
pub struct NpmResolution {
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
snapshot: SyncReadAsyncWriteLock<NpmResolutionSnapshot>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for NpmResolution {
|
||||
|
@ -66,7 +65,7 @@ impl NpmResolution {
|
|||
pub fn from_serialized(
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
) -> Self {
|
||||
let snapshot =
|
||||
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
|
||||
|
@ -76,7 +75,7 @@ impl NpmResolution {
|
|||
pub fn new(
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
initial_snapshot: NpmResolutionSnapshot,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
api,
|
||||
|
@ -262,7 +261,7 @@ impl NpmResolution {
|
|||
async fn add_package_reqs_to_snapshot(
|
||||
api: &CliNpmRegistryApi,
|
||||
package_reqs: &[PackageReq],
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
|
||||
) -> deno_npm::resolution::AddPkgReqsResult {
|
||||
let snapshot = get_new_snapshot();
|
||||
|
@ -301,9 +300,8 @@ async fn add_package_reqs_to_snapshot(
|
|||
};
|
||||
|
||||
if let Ok(snapshot) = &result.dep_graph_result {
|
||||
if let Some(lockfile_mutex) = maybe_lockfile {
|
||||
let mut lockfile = lockfile_mutex.lock();
|
||||
populate_lockfile_from_snapshot(&mut lockfile, snapshot);
|
||||
if let Some(lockfile) = maybe_lockfile {
|
||||
populate_lockfile_from_snapshot(&lockfile, snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -326,9 +324,10 @@ fn get_npm_pending_resolver(
|
|||
}
|
||||
|
||||
fn populate_lockfile_from_snapshot(
|
||||
lockfile: &mut Lockfile,
|
||||
lockfile: &CliLockfile,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
) {
|
||||
let mut lockfile = lockfile.lock();
|
||||
for (package_req, nv) in snapshot.package_reqs() {
|
||||
lockfile.insert_package_specifier(
|
||||
format!("npm:{}", package_req),
|
||||
|
|
|
@ -19,6 +19,7 @@ use deno_npm::NpmPackageId;
|
|||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
|
||||
use crate::npm::managed::cache::TarballCache;
|
||||
|
||||
|
@ -31,16 +32,25 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
/// The local node_modules folder if it is applicable to the implementation.
|
||||
fn node_modules_path(&self) -> Option<&PathBuf>;
|
||||
|
||||
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
|
||||
|
||||
fn package_folder(
|
||||
&self,
|
||||
package_id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
self.maybe_package_folder(package_id).ok_or_else(|| {
|
||||
deno_core::anyhow::anyhow!(
|
||||
"Package folder not found for '{}'",
|
||||
package_id.as_serialized()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PackageFolderResolveError>;
|
||||
|
||||
fn resolve_package_cache_folder_id_from_specifier(
|
||||
&self,
|
||||
|
|
|
@ -8,7 +8,6 @@ use std::sync::Arc;
|
|||
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
|
@ -16,6 +15,9 @@ use deno_npm::NpmPackageId;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::errors::ReferrerNotFoundError;
|
||||
|
||||
use super::super::cache::NpmCache;
|
||||
use super::super::cache::TarballCache;
|
||||
|
@ -65,29 +67,71 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
None
|
||||
}
|
||||
|
||||
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
|
||||
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
|
||||
let folder_id = self
|
||||
.resolution
|
||||
.resolve_pkg_cache_folder_id_from_pkg_id(id)
|
||||
.unwrap();
|
||||
Ok(self.cache.package_folder_for_id(&folder_id))
|
||||
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
|
||||
Some(self.cache.package_folder_for_id(&folder_id))
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let Some(referrer_pkg_id) = self
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
||||
let Some(referrer_cache_folder_id) = self
|
||||
.cache
|
||||
.resolve_package_folder_id_from_specifier(referrer)
|
||||
else {
|
||||
bail!("could not find npm package for '{}'", referrer);
|
||||
return Err(
|
||||
ReferrerNotFoundError {
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
};
|
||||
let pkg = self
|
||||
let resolve_result = self
|
||||
.resolution
|
||||
.resolve_package_from_package(name, &referrer_pkg_id)?;
|
||||
self.package_folder(&pkg.id)
|
||||
.resolve_package_from_package(name, &referrer_cache_folder_id);
|
||||
match resolve_result {
|
||||
Ok(pkg) => match self.maybe_package_folder(&pkg.id) {
|
||||
Some(folder) => Ok(folder),
|
||||
None => Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: Some(format!(
|
||||
"{} -> {}",
|
||||
referrer_cache_folder_id,
|
||||
pkg.id.as_serialized()
|
||||
)),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
},
|
||||
Err(err) => match *err {
|
||||
PackageNotFoundFromReferrerError::Referrer(cache_folder_id) => Err(
|
||||
ReferrerNotFoundError {
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: Some(cache_folder_id.to_string()),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
PackageNotFoundFromReferrerError::Package {
|
||||
name,
|
||||
referrer: cache_folder_id_referrer,
|
||||
} => Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name,
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: Some(cache_folder_id_referrer.to_string()),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_package_cache_folder_id_from_specifier(
|
||||
|
|
|
@ -7,26 +7,20 @@ mod bin_entries;
|
|||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::util::fs::clone_dir_recursive;
|
||||
use crate::util::fs::symlink_dir;
|
||||
use crate::util::fs::LaxSingleProcessFsFlag;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressMessagePrompt;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackagesAllowedScripts;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
|
@ -40,10 +34,24 @@ use deno_npm::NpmSystemInfo;
|
|||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_semver::package::PackageNv;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::errors::ReferrerNotFoundError;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_encode;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::util::fs::clone_dir_recursive;
|
||||
use crate::util::fs::symlink_dir;
|
||||
use crate::util::fs::LaxSingleProcessFsFlag;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressMessagePrompt;
|
||||
|
||||
use super::super::cache::NpmCache;
|
||||
use super::super::cache::TarballCache;
|
||||
|
@ -57,6 +65,7 @@ use super::common::RegistryReadPermissionChecker;
|
|||
pub struct LocalNpmPackageResolver {
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -64,21 +73,26 @@ pub struct LocalNpmPackageResolver {
|
|||
root_node_modules_url: Url,
|
||||
system_info: NpmSystemInfo,
|
||||
registry_read_permission_checker: RegistryReadPermissionChecker,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
impl LocalNpmPackageResolver {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
node_modules_folder: PathBuf,
|
||||
system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
fs: fs.clone(),
|
||||
pkg_json_deps_provider,
|
||||
progress_bar,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
@ -90,6 +104,7 @@ impl LocalNpmPackageResolver {
|
|||
.unwrap(),
|
||||
root_node_modules_path: node_modules_folder,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,7 +123,7 @@ impl LocalNpmPackageResolver {
|
|||
fn resolve_folder_for_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
) -> Result<Option<PathBuf>, std::io::Error> {
|
||||
let Some(relative_url) =
|
||||
self.root_node_modules_url.make_relative(specifier)
|
||||
else {
|
||||
|
@ -125,7 +140,6 @@ impl LocalNpmPackageResolver {
|
|||
// in `node_modules` directory of the referrer.
|
||||
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())
|
||||
.map(Some)
|
||||
.map_err(|err| err.into())
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_specifier(
|
||||
|
@ -150,32 +164,42 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
Some(&self.root_node_modules_path)
|
||||
}
|
||||
|
||||
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
|
||||
match self.resolution.resolve_pkg_cache_folder_id_from_pkg_id(id) {
|
||||
// package is stored at:
|
||||
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
|
||||
Some(cache_folder_id) => Ok(
|
||||
self
|
||||
.root_node_modules_path
|
||||
.join(".deno")
|
||||
.join(get_package_folder_id_folder_name(&cache_folder_id))
|
||||
.join("node_modules")
|
||||
.join(&cache_folder_id.nv.name),
|
||||
),
|
||||
None => bail!(
|
||||
"Could not find package information for '{}'",
|
||||
id.as_serialized()
|
||||
),
|
||||
}
|
||||
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
|
||||
let cache_folder_id = self
|
||||
.resolution
|
||||
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
|
||||
// package is stored at:
|
||||
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
|
||||
Some(
|
||||
self
|
||||
.root_node_modules_path
|
||||
.join(".deno")
|
||||
.join(get_package_folder_id_folder_name(&cache_folder_id))
|
||||
.join("node_modules")
|
||||
.join(&cache_folder_id.nv.name),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let Some(local_path) = self.resolve_folder_for_specifier(referrer)? else {
|
||||
bail!("could not find npm package for '{}'", referrer);
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
let maybe_local_path = self
|
||||
.resolve_folder_for_specifier(referrer)
|
||||
.map_err(|err| PackageFolderResolveIoError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
source: err,
|
||||
})?;
|
||||
let Some(local_path) = maybe_local_path else {
|
||||
return Err(
|
||||
ReferrerNotFoundError {
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
};
|
||||
let package_root_path = self.resolve_package_root(&local_path);
|
||||
let mut current_folder = package_root_path.as_path();
|
||||
|
@ -197,11 +221,14 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
bail!(
|
||||
"could not find package '{}' from referrer '{}'.",
|
||||
name,
|
||||
referrer
|
||||
);
|
||||
Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_package_cache_folder_id_from_specifier(
|
||||
|
@ -221,10 +248,12 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
sync_resolution_with_fs(
|
||||
&self.resolution.snapshot(),
|
||||
&self.cache,
|
||||
&self.pkg_json_deps_provider,
|
||||
&self.progress_bar,
|
||||
&self.tarball_cache,
|
||||
&self.root_node_modules_path,
|
||||
&self.system_info,
|
||||
&self.lifecycle_scripts,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
@ -240,16 +269,157 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
// take in all (non copy) packages from snapshot,
|
||||
// and resolve the set of available binaries to create
|
||||
// custom commands available to the task runner
|
||||
fn resolve_baseline_custom_commands(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
|
||||
custom_commands
|
||||
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
|
||||
|
||||
custom_commands.insert(
|
||||
"node-gyp".to_string(),
|
||||
Rc::new(crate::task_runner::NodeGypCommand),
|
||||
);
|
||||
|
||||
// TODO: this recreates the bin entries which could be redoing some work, but the ones
|
||||
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
|
||||
// doing it for packages that are set up already.
|
||||
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
|
||||
resolve_custom_commands_from_packages(
|
||||
custom_commands,
|
||||
snapshot,
|
||||
packages,
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
// resolves the custom commands from an iterator of packages
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands
|
||||
fn resolve_custom_commands_from_packages<
|
||||
'a,
|
||||
P: IntoIterator<Item = &'a NpmResolutionPackage>,
|
||||
>(
|
||||
mut commands: crate::task_runner::TaskCustomCommands,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: P,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = bin_entries::BinEntries::new();
|
||||
for package in packages {
|
||||
let package_path =
|
||||
local_node_modules_package_path(local_registry_dir, package);
|
||||
|
||||
if package.bin.is_some() {
|
||||
bin_entries.add(package.clone(), package_path);
|
||||
}
|
||||
}
|
||||
let bins = bin_entries.into_bin_files(snapshot);
|
||||
for (bin_name, script_path) in bins {
|
||||
commands.insert(
|
||||
bin_name.clone(),
|
||||
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
|
||||
command_name: bin_name,
|
||||
path: script_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
fn local_node_modules_package_path(
|
||||
local_registry_dir: &Path,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> PathBuf {
|
||||
local_registry_dir
|
||||
.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
))
|
||||
.join("node_modules")
|
||||
.join(&package.id.nv.name)
|
||||
}
|
||||
|
||||
// resolves the custom commands from the dependencies of a package
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands.
|
||||
fn resolve_custom_commands_from_deps(
|
||||
baseline: crate::task_runner::TaskCustomCommands,
|
||||
package: &NpmResolutionPackage,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
resolve_custom_commands_from_packages(
|
||||
baseline,
|
||||
snapshot,
|
||||
package
|
||||
.dependencies
|
||||
.values()
|
||||
.map(|id| snapshot.package_from_id(id).unwrap()),
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
fn can_run_scripts(
|
||||
allow_scripts: &PackagesAllowedScripts,
|
||||
package_nv: &PackageNv,
|
||||
) -> bool {
|
||||
match allow_scripts {
|
||||
PackagesAllowedScripts::All => true,
|
||||
// TODO: make this more correct
|
||||
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
|
||||
let s = s.strip_prefix("npm:").unwrap_or(s);
|
||||
s == package_nv.name || s == package_nv.to_string()
|
||||
}),
|
||||
PackagesAllowedScripts::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
|
||||
// but it always fails if the package excludes the `binding.gyp` file when they publish.
|
||||
// (for example, `fsevents` hits this)
|
||||
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
|
||||
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
|
||||
}
|
||||
|
||||
fn has_lifecycle_scripts(
|
||||
package: &NpmResolutionPackage,
|
||||
package_path: &Path,
|
||||
) -> bool {
|
||||
if let Some(install) = package.scripts.get("install") {
|
||||
// default script
|
||||
if !is_broken_default_install_script(install, package_path) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
package.scripts.contains_key("preinstall")
|
||||
|| package.scripts.contains_key("postinstall")
|
||||
}
|
||||
|
||||
/// Creates a pnpm style folder structure.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &PackageJsonInstallDepsProvider,
|
||||
progress_bar: &ProgressBar,
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
system_info: &NpmSystemInfo,
|
||||
lifecycle_scripts: &LifecycleScriptsConfig,
|
||||
) -> Result<(), AnyError> {
|
||||
if snapshot.is_empty() {
|
||||
if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
|
||||
return Ok(()); // don't create the directory
|
||||
}
|
||||
|
||||
|
@ -286,6 +456,8 @@ async fn sync_resolution_with_fs(
|
|||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
||||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
||||
let mut packages_with_scripts = Vec::with_capacity(2);
|
||||
let mut packages_with_scripts_not_run = Vec::new();
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
newest_packages_by_name.get_mut(&package.id.nv.name)
|
||||
|
@ -310,6 +482,7 @@ async fn sync_resolution_with_fs(
|
|||
// are forced to be recreated
|
||||
setup_cache.remove_dep(&package_folder_name);
|
||||
|
||||
let folder_path = folder_path.clone();
|
||||
let bin_entries_to_setup = bin_entries.clone();
|
||||
cache_futures.push(async move {
|
||||
tarball_cache
|
||||
|
@ -347,6 +520,25 @@ async fn sync_resolution_with_fs(
|
|||
Ok::<_, AnyError>(())
|
||||
});
|
||||
}
|
||||
|
||||
let sub_node_modules = folder_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
if has_lifecycle_scripts(package, &package_path) {
|
||||
let scripts_run = folder_path.join(".scripts-run");
|
||||
let has_warned = folder_path.join(".scripts-warned");
|
||||
if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) {
|
||||
if !scripts_run.exists() {
|
||||
packages_with_scripts.push((
|
||||
package.clone(),
|
||||
package_path,
|
||||
scripts_run,
|
||||
));
|
||||
}
|
||||
} else if !scripts_run.exists() && !has_warned.exists() {
|
||||
packages_with_scripts_not_run.push((has_warned, package.id.nv.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(result) = cache_futures.next().await {
|
||||
|
@ -415,16 +607,81 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
// 4. Create all the top level packages in the node_modules folder, which are symlinks.
|
||||
//
|
||||
// Symlink node_modules/<package_name> to
|
||||
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
||||
let mut found_names = HashSet::new();
|
||||
let mut ids = snapshot.top_level_packages().collect::<Vec<_>>();
|
||||
let mut found_names: HashMap<&String, &PackageNv> = HashMap::new();
|
||||
|
||||
// 4. Create symlinks for package json dependencies
|
||||
{
|
||||
for remote in pkg_json_deps_provider.remote_pkgs() {
|
||||
let Some(remote_id) = snapshot
|
||||
.resolve_best_package_id(&remote.req.name, &remote.req.version_req)
|
||||
else {
|
||||
continue; // skip, package not found
|
||||
};
|
||||
let remote_pkg = snapshot.package_from_id(&remote_id).unwrap();
|
||||
let alias_clashes = remote.req.name != remote.alias
|
||||
&& newest_packages_by_name.contains_key(&remote.alias);
|
||||
let install_in_child = {
|
||||
// we'll install in the child if the alias is taken by another package, or
|
||||
// if there's already a package with the same name but different version
|
||||
// linked into the root
|
||||
match found_names.entry(&remote.alias) {
|
||||
Entry::Occupied(nv) => {
|
||||
alias_clashes
|
||||
|| remote.req.name != nv.get().name // alias to a different package (in case of duplicate aliases)
|
||||
|| !remote.req.version_req.matches(&nv.get().version) // incompatible version
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(&remote_pkg.id.nv);
|
||||
alias_clashes
|
||||
}
|
||||
}
|
||||
};
|
||||
let target_folder_name = get_package_folder_id_folder_name(
|
||||
&remote_pkg.get_package_cache_folder_id(),
|
||||
);
|
||||
let local_registry_package_path = join_package_name(
|
||||
&deno_local_registry_dir
|
||||
.join(&target_folder_name)
|
||||
.join("node_modules"),
|
||||
&remote_pkg.id.nv.name,
|
||||
);
|
||||
if install_in_child {
|
||||
// symlink the dep into the package's child node_modules folder
|
||||
let dest_path =
|
||||
remote.base_dir.join("node_modules").join(&remote.alias);
|
||||
|
||||
symlink_package_dir(&local_registry_package_path, &dest_path)?;
|
||||
} else {
|
||||
// symlink the package into `node_modules/<alias>`
|
||||
if setup_cache
|
||||
.insert_root_symlink(&remote_pkg.id.nv.name, &target_folder_name)
|
||||
{
|
||||
symlink_package_dir(
|
||||
&local_registry_package_path,
|
||||
&join_package_name(root_node_modules_dir_path, &remote.alias),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Create symlinks for the remaining top level packages in the node_modules folder.
|
||||
// (These may be present if they are not in the package.json dependencies, such as )
|
||||
// Symlink node_modules/.deno/<package_id>/node_modules/<package_name> to
|
||||
// node_modules/<package_name>
|
||||
let mut ids = snapshot
|
||||
.top_level_packages()
|
||||
.filter(|f| !found_names.contains_key(&f.nv.name))
|
||||
.collect::<Vec<_>>();
|
||||
ids.sort_by(|a, b| b.cmp(a)); // create determinism and only include the latest version
|
||||
for id in ids {
|
||||
if !found_names.insert(&id.nv.name) {
|
||||
continue; // skip, already handled
|
||||
match found_names.entry(&id.nv.name) {
|
||||
Entry::Occupied(_) => {
|
||||
continue; // skip, already handled
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(&id.nv);
|
||||
}
|
||||
}
|
||||
let package = snapshot.package_from_id(id).unwrap();
|
||||
let target_folder_name =
|
||||
|
@ -444,11 +701,16 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
// 5. Create a node_modules/.deno/node_modules/<package-name> directory with
|
||||
// 6. Create a node_modules/.deno/node_modules/<package-name> directory with
|
||||
// the remaining packages
|
||||
for package in newest_packages_by_name.values() {
|
||||
if !found_names.insert(&package.id.nv.name) {
|
||||
continue; // skip, already handled
|
||||
match found_names.entry(&package.id.nv.name) {
|
||||
Entry::Occupied(_) => {
|
||||
continue; // skip, already handled
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(&package.id.nv);
|
||||
}
|
||||
}
|
||||
|
||||
let target_folder_name =
|
||||
|
@ -469,12 +731,109 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
// 6. Set up `node_modules/.bin` entries for packages that need it.
|
||||
// 7. Set up `node_modules/.bin` entries for packages that need it.
|
||||
{
|
||||
let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut());
|
||||
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
|
||||
}
|
||||
|
||||
// 8. Create symlinks for the workspace packages
|
||||
{
|
||||
// todo(#24419): this is not exactly correct because it should
|
||||
// install correctly for a workspace (potentially in sub directories),
|
||||
// but this is good enough for a first pass
|
||||
for workspace in pkg_json_deps_provider.workspace_pkgs() {
|
||||
symlink_package_dir(
|
||||
&workspace.target_dir,
|
||||
&root_node_modules_dir_path.join(&workspace.alias),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts.is_empty() {
|
||||
// get custom commands for each bin available in the node_modules dir (essentially
|
||||
// the scripts that are in `node_modules/.bin`)
|
||||
let base = resolve_baseline_custom_commands(
|
||||
snapshot,
|
||||
&package_partitions.packages,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap();
|
||||
let process_state = crate::npm::managed::npm_process_state(
|
||||
snapshot.as_valid_serialized(),
|
||||
Some(root_node_modules_dir_path),
|
||||
);
|
||||
|
||||
let mut env_vars = crate::task_runner::real_env_vars();
|
||||
env_vars.insert(
|
||||
crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(),
|
||||
process_state,
|
||||
);
|
||||
for (package, package_path, scripts_run_path) in packages_with_scripts {
|
||||
// add custom commands for binaries from the package's dependencies. this will take precedence over the
|
||||
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
|
||||
// correct bin will be used.
|
||||
let custom_commands = resolve_custom_commands_from_deps(
|
||||
base.clone(),
|
||||
&package,
|
||||
snapshot,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
for script_name in ["preinstall", "install", "postinstall"] {
|
||||
if let Some(script) = package.scripts.get(script_name) {
|
||||
if script_name == "install"
|
||||
&& is_broken_default_install_script(script, &package_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let exit_code =
|
||||
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
|
||||
task_name: script_name,
|
||||
script,
|
||||
cwd: &package_path,
|
||||
env_vars: env_vars.clone(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
init_cwd,
|
||||
argv: &[],
|
||||
root_node_modules_dir: Some(root_node_modules_dir_path),
|
||||
})
|
||||
.await?;
|
||||
if exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"script '{}' in '{}' failed with exit code {}",
|
||||
script_name,
|
||||
package.id.nv,
|
||||
exit_code,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
fs::write(scripts_run_path, "")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts_not_run.is_empty() {
|
||||
let (maybe_install, maybe_install_example) = if *crate::args::DENO_FUTURE {
|
||||
(
|
||||
" or `deno install`",
|
||||
" or `deno install --allow-scripts=pkg1,pkg2`",
|
||||
)
|
||||
} else {
|
||||
("", "")
|
||||
};
|
||||
let packages = packages_with_scripts_not_run
|
||||
.iter()
|
||||
.map(|(_, p)| format!("npm:{p}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
log::warn!("{}: Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
|
||||
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache`{maybe_install}
|
||||
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>`{maybe_install_example}):\n {packages}", crate::colors::yellow("warning"));
|
||||
for (scripts_warned_path, _) in packages_with_scripts_not_run {
|
||||
let _ignore_err = fs::write(scripts_warned_path, "");
|
||||
}
|
||||
}
|
||||
|
||||
setup_cache.save();
|
||||
drop(single_process_lock);
|
||||
drop(pb_clear_guard);
|
||||
|
@ -482,10 +841,13 @@ async fn sync_resolution_with_fs(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
|
||||
// the file generated from this datastructure is deterministic.
|
||||
// See: https://github.com/denoland/deno/issues/24479
|
||||
/// Represents a dependency at `node_modules/.deno/<package_id>/`
|
||||
struct SetupCacheDep<'a> {
|
||||
previous: Option<&'a HashMap<String, String>>,
|
||||
current: &'a mut HashMap<String, String>,
|
||||
previous: Option<&'a BTreeMap<String, String>>,
|
||||
current: &'a mut BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl<'a> SetupCacheDep<'a> {
|
||||
|
@ -501,11 +863,14 @@ impl<'a> SetupCacheDep<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
|
||||
// the file generated from this datastructure is deterministic.
|
||||
// See: https://github.com/denoland/deno/issues/24479
|
||||
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
struct SetupCacheData {
|
||||
root_symlinks: HashMap<String, String>,
|
||||
deno_symlinks: HashMap<String, String>,
|
||||
dep_symlinks: HashMap<String, HashMap<String, String>>,
|
||||
root_symlinks: BTreeMap<String, String>,
|
||||
deno_symlinks: BTreeMap<String, String>,
|
||||
dep_symlinks: BTreeMap<String, BTreeMap<String, String>>,
|
||||
}
|
||||
|
||||
/// It is very slow to try to re-setup the symlinks each time, so this will
|
||||
|
@ -676,6 +1041,7 @@ fn junction_or_symlink_dir(
|
|||
old_path: &Path,
|
||||
new_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
use deno_core::anyhow::bail;
|
||||
// Use junctions because they're supported on ntfs file systems without
|
||||
// needing to elevate privileges on Windows
|
||||
|
||||
|
|
|
@ -71,19 +71,16 @@ impl BinEntries {
|
|||
self.entries.push((package, package_path));
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub(super) fn finish(
|
||||
mut self,
|
||||
fn for_each_entry(
|
||||
&mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
mut f: impl FnMut(
|
||||
&NpmResolutionPackage,
|
||||
&Path,
|
||||
&str, // bin name
|
||||
&str, // bin script
|
||||
) -> Result<(), AnyError>,
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
|
||||
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|
||||
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
|
||||
)?;
|
||||
}
|
||||
|
||||
if !self.collisions.is_empty() {
|
||||
// walking the dependency tree to find out the depth of each package
|
||||
// is sort of expensive, so we only do it if there's a collision
|
||||
|
@ -101,13 +98,7 @@ impl BinEntries {
|
|||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)?;
|
||||
f(package, package_path, name, script)?;
|
||||
}
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
|
||||
for (name, script) in entries {
|
||||
|
@ -115,13 +106,7 @@ impl BinEntries {
|
|||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)?;
|
||||
f(package, package_path, name, script)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,6 +115,47 @@ impl BinEntries {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Collect the bin entries into a vec of (name, script path)
|
||||
pub(super) fn into_bin_files(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
) -> Vec<(String, PathBuf)> {
|
||||
let mut bins = Vec::new();
|
||||
self
|
||||
.for_each_entry(snapshot, |_, package_path, name, script| {
|
||||
bins.push((name.to_string(), package_path.join(script)));
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
bins
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub(super) fn finish(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
|
||||
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|
||||
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
|
||||
)?;
|
||||
}
|
||||
|
||||
self.for_each_entry(snapshot, |package, package_path, name, script| {
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// walk the dependency tree to find out the depth of each package
|
||||
|
|
|
@ -10,6 +10,8 @@ use std::sync::Arc;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
pub use self::common::NpmPackageFsResolver;
|
||||
|
@ -21,24 +23,29 @@ use super::cache::NpmCache;
|
|||
use super::cache::TarballCache;
|
||||
use super::resolution::NpmResolution;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn create_npm_fs_resolver(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: &ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
maybe_node_modules_path: Option<PathBuf>,
|
||||
system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Arc<dyn NpmPackageFsResolver> {
|
||||
match maybe_node_modules_path {
|
||||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
fs,
|
||||
pkg_json_deps_provider.clone(),
|
||||
progress_bar.clone(),
|
||||
resolution,
|
||||
tarball_cache,
|
||||
node_modules_folder,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
)),
|
||||
None => Arc::new(GlobalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
|
|
|
@ -13,10 +13,12 @@ use deno_ast::ModuleSpecifier;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_runtime::deno_node::NpmResolver;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::NpmResolver;
|
||||
|
||||
use crate::args::npm_registry_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
@ -63,6 +65,10 @@ pub enum InnerCliNpmResolverRef<'a> {
|
|||
|
||||
pub trait CliNpmResolver: NpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>;
|
||||
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver>;
|
||||
fn into_process_state_provider(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmProcessStateProvider>;
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
|
||||
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// NOTE(bartlomieju): unfortunately it appears that clippy is broken
|
||||
// and can't allow a single line ignore for `await_holding_lock`.
|
||||
#![allow(clippy::await_holding_lock)]
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use jupyter_runtime::InputRequest;
|
||||
use jupyter_runtime::JupyterMessage;
|
||||
use jupyter_runtime::JupyterMessageContent;
|
||||
use jupyter_runtime::KernelIoPubConnection;
|
||||
|
@ -11,14 +16,17 @@ use jupyter_runtime::StreamContent;
|
|||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::OpState;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::tools::jupyter::server::StdinConnectionProxy;
|
||||
|
||||
deno_core::extension!(deno_jupyter,
|
||||
ops = [
|
||||
op_jupyter_broadcast,
|
||||
op_jupyter_input,
|
||||
],
|
||||
options = {
|
||||
sender: mpsc::UnboundedSender<StreamContent>,
|
||||
|
@ -32,6 +40,61 @@ deno_core::extension!(deno_jupyter,
|
|||
},
|
||||
);
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_jupyter_input(
|
||||
state: &mut OpState,
|
||||
#[string] prompt: String,
|
||||
is_password: bool,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let (last_execution_request, stdin_connection_proxy) = {
|
||||
(
|
||||
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
|
||||
state.borrow::<Arc<Mutex<StdinConnectionProxy>>>().clone(),
|
||||
)
|
||||
};
|
||||
|
||||
let maybe_last_request = last_execution_request.lock().clone();
|
||||
if let Some(last_request) = maybe_last_request {
|
||||
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
if !msg.allow_stdin {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let content = InputRequest {
|
||||
prompt,
|
||||
password: is_password,
|
||||
};
|
||||
|
||||
let msg = JupyterMessage::new(content, Some(&last_request));
|
||||
|
||||
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// Need to spawn a separate thread here, because `blocking_recv()` can't
|
||||
// be used from the Tokio runtime context.
|
||||
let join_handle = std::thread::spawn(move || {
|
||||
stdin_connection_proxy.lock().rx.blocking_recv()
|
||||
});
|
||||
let Ok(Some(response)) = join_handle.join() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let JupyterMessageContent::InputReply(msg) = response.content else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
return Ok(Some(msg.value));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_jupyter_broadcast(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -45,11 +108,11 @@ pub async fn op_jupyter_broadcast(
|
|||
|
||||
(
|
||||
s.borrow::<Arc<Mutex<KernelIoPubConnection>>>().clone(),
|
||||
s.borrow::<Rc<RefCell<Option<JupyterMessage>>>>().clone(),
|
||||
s.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
|
||||
)
|
||||
};
|
||||
|
||||
let maybe_last_request = last_execution_request.borrow().clone();
|
||||
let maybe_last_request = last_execution_request.lock().clone();
|
||||
if let Some(last_request) = maybe_last_request {
|
||||
let content = JupyterMessageContent::from_type_and_content(
|
||||
&message_type,
|
||||
|
@ -69,9 +132,7 @@ pub async fn op_jupyter_broadcast(
|
|||
.with_metadata(metadata)
|
||||
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
|
||||
|
||||
(iopub_connection.lock().await)
|
||||
.send(jupyter_message)
|
||||
.await?;
|
||||
iopub_connection.lock().send(jupyter_message).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -86,13 +147,13 @@ pub fn op_print(
|
|||
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
|
||||
|
||||
if is_err {
|
||||
if let Err(err) = sender.send(StreamContent::stderr(msg.into())) {
|
||||
if let Err(err) = sender.send(StreamContent::stderr(msg)) {
|
||||
log::error!("Failed to send stderr message: {}", err);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Err(err) = sender.send(StreamContent::stdout(msg.into())) {
|
||||
if let Err(err) = sender.send(StreamContent::stdout(msg)) {
|
||||
log::error!("Failed to send stdout message: {}", err);
|
||||
}
|
||||
Ok(())
|
||||
|
|
896
cli/resolver.rs
896
cli/resolver.rs
File diff suppressed because it is too large
Load diff
|
@ -604,12 +604,28 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"workspaces": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The members of this workspace."
|
||||
"workspace": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The members of this workspace."
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"members": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The members of this workspace."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::env::current_exe;
|
||||
use std::ffi::OsString;
|
||||
use std::fs;
|
||||
|
@ -15,6 +17,9 @@ use std::path::PathBuf;
|
|||
use std::process::Command;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -24,19 +29,20 @@ use deno_core::futures::AsyncSeekExt;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReqSpecifierParseError;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use indexmap::IndexMap;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::package_json::PackageJsonDepValueParseError;
|
||||
use crate::args::package_json::PackageJsonDeps;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
|
@ -44,6 +50,8 @@ use crate::file_fetcher::FileFetcher;
|
|||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::standalone::virtual_fs::VfsEntry;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
|
@ -54,81 +62,30 @@ use super::virtual_fs::VirtualDirectory;
|
|||
|
||||
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializablePackageJsonDepValueParseError {
|
||||
VersionReq(String),
|
||||
Unsupported { scheme: String },
|
||||
}
|
||||
|
||||
impl SerializablePackageJsonDepValueParseError {
|
||||
pub fn from_err(err: PackageJsonDepValueParseError) -> Self {
|
||||
match err {
|
||||
PackageJsonDepValueParseError::VersionReq(err) => {
|
||||
Self::VersionReq(err.source.to_string())
|
||||
}
|
||||
PackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
Self::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_err(self) -> PackageJsonDepValueParseError {
|
||||
match self {
|
||||
SerializablePackageJsonDepValueParseError::VersionReq(source) => {
|
||||
PackageJsonDepValueParseError::VersionReq(NpmVersionReqParseError {
|
||||
source: monch::ParseErrorFailureError::new(source),
|
||||
})
|
||||
}
|
||||
SerializablePackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
PackageJsonDepValueParseError::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializablePackageJsonDeps(
|
||||
BTreeMap<
|
||||
String,
|
||||
Result<PackageReq, SerializablePackageJsonDepValueParseError>,
|
||||
>,
|
||||
);
|
||||
|
||||
impl SerializablePackageJsonDeps {
|
||||
pub fn from_deps(deps: PackageJsonDeps) -> Self {
|
||||
Self(
|
||||
deps
|
||||
.into_iter()
|
||||
.map(|(name, req)| {
|
||||
let res =
|
||||
req.map_err(SerializablePackageJsonDepValueParseError::from_err);
|
||||
(name, res)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_deps(self) -> PackageJsonDeps {
|
||||
self
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|(name, res)| (name, res.map_err(|err| err.into_err())))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
Managed {
|
||||
/// Whether this uses a node_modules directory (true) or the global cache (false).
|
||||
node_modules_dir: bool,
|
||||
package_json_deps: Option<SerializablePackageJsonDeps>,
|
||||
/// Relative path for the node_modules directory in the vfs.
|
||||
node_modules_dir: Option<String>,
|
||||
},
|
||||
Byonm {
|
||||
package_json_deps: Option<SerializablePackageJsonDeps>,
|
||||
root_node_modules_dir: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolverImportMap {
|
||||
pub specifier: String,
|
||||
pub json: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolver {
|
||||
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
|
||||
pub package_jsons: BTreeMap<String, serde_json::Value>,
|
||||
pub pkg_json_resolution: PackageJsonDepResolution,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
|
@ -140,8 +97,9 @@ pub struct Metadata {
|
|||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub maybe_import_map: Option<(Url, String)>,
|
||||
pub entrypoint: ModuleSpecifier,
|
||||
pub env_vars_from_env_file: HashMap<String, String>,
|
||||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub disable_deprecated_api_warning: bool,
|
||||
pub unstable_config: UnstableConfig,
|
||||
|
@ -415,13 +373,14 @@ pub fn unpack_into_dir(
|
|||
fs::remove_file(&archive_path)?;
|
||||
Ok(exe_path)
|
||||
}
|
||||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
deno_dir: &'a DenoDir,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileBinaryWriter<'a> {
|
||||
|
@ -431,16 +390,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
) -> Self {
|
||||
Self {
|
||||
deno_dir,
|
||||
file_fetcher,
|
||||
http_client_provider,
|
||||
npm_resolver,
|
||||
workspace_resolver,
|
||||
npm_system_info,
|
||||
package_json_deps_provider,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -448,7 +407,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
&self,
|
||||
writer: &mut impl Write,
|
||||
eszip: eszip::EszipV2,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
compile_flags: &CompileFlags,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
|
@ -465,17 +425,15 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
set_windows_binary_to_gui(&mut original_binary)?;
|
||||
}
|
||||
|
||||
self
|
||||
.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
eszip,
|
||||
module_specifier,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
.await
|
||||
self.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
entrypoint,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_base_binary(
|
||||
|
@ -537,7 +495,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
self
|
||||
.http_client_provider
|
||||
.get_or_create()?
|
||||
.download_with_progress(download_url, None, &progress)
|
||||
.download_with_progress(download_url.parse()?, None, &progress)
|
||||
.await?
|
||||
};
|
||||
let bytes = match maybe_bytes {
|
||||
|
@ -557,11 +515,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
|
||||
/// This functions creates a standalone deno binary by appending a bundle
|
||||
/// and magic trailer to the currently executing binary.
|
||||
async fn write_standalone_binary(
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn write_standalone_binary(
|
||||
&self,
|
||||
writer: &mut impl Write,
|
||||
original_bin: Vec<u8>,
|
||||
mut eszip: eszip::EszipV2,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
cli_options: &CliOptions,
|
||||
compile_flags: &CompileFlags,
|
||||
|
@ -574,48 +534,66 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
||||
None => None,
|
||||
};
|
||||
let maybe_import_map = cli_options
|
||||
.resolve_import_map(self.file_fetcher)
|
||||
.await?
|
||||
.map(|import_map| (import_map.base_url().clone(), import_map.to_json()));
|
||||
let (npm_vfs, npm_files, node_modules) =
|
||||
match self.npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
|
||||
eszip.add_npm_snapshot(snapshot);
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self
|
||||
.npm_resolver
|
||||
.root_node_modules_path()
|
||||
.is_some(),
|
||||
package_json_deps: self.package_json_deps_provider.deps().map(
|
||||
|deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
|
||||
),
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
(None, Vec::new(), None)
|
||||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(_) => {
|
||||
let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
|
||||
let root_path = root_dir_url.inner().to_file_path().unwrap();
|
||||
let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner()
|
||||
{
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
eszip.add_npm_snapshot(snapshot);
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Byonm {
|
||||
package_json_deps: self.package_json_deps_provider.deps().map(
|
||||
|deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|
||||
|path| {
|
||||
root_dir_url
|
||||
.specifier_key(
|
||||
&ModuleSpecifier::from_directory_path(path).unwrap(),
|
||||
)
|
||||
.into_owned()
|
||||
},
|
||||
),
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
(None, Vec::new(), None)
|
||||
}
|
||||
};
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(resolver) => {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Byonm {
|
||||
root_node_modules_dir: resolver.root_node_modules_path().map(
|
||||
|node_modules_dir| {
|
||||
root_dir_url
|
||||
.specifier_key(
|
||||
&ModuleSpecifier::from_directory_path(node_modules_dir)
|
||||
.unwrap(),
|
||||
)
|
||||
.into_owned()
|
||||
},
|
||||
),
|
||||
}),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let env_vars_from_env_file = match cli_options.env_file_name() {
|
||||
Some(env_filename) => {
|
||||
log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename);
|
||||
get_file_env_vars(env_filename.to_string())?
|
||||
}
|
||||
None => Default::default(),
|
||||
};
|
||||
|
||||
let metadata = Metadata {
|
||||
argv: compile_flags.args.clone(),
|
||||
|
@ -629,8 +607,34 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
log_level: cli_options.log_level(),
|
||||
ca_stores: cli_options.ca_stores().clone(),
|
||||
ca_data,
|
||||
entrypoint: entrypoint.clone(),
|
||||
maybe_import_map,
|
||||
env_vars_from_env_file,
|
||||
entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
|
||||
workspace_resolver: SerializedWorkspaceResolver {
|
||||
import_map: self.workspace_resolver.maybe_import_map().map(|i| {
|
||||
SerializedWorkspaceResolverImportMap {
|
||||
specifier: if i.base_url().scheme() == "file" {
|
||||
root_dir_url.specifier_key(i.base_url()).into_owned()
|
||||
} else {
|
||||
// just make a remote url local
|
||||
"deno.json".to_string()
|
||||
},
|
||||
json: i.to_json(),
|
||||
}
|
||||
}),
|
||||
package_jsons: self
|
||||
.workspace_resolver
|
||||
.package_jsons()
|
||||
.map(|pkg_json| {
|
||||
(
|
||||
root_dir_url
|
||||
.specifier_key(&pkg_json.specifier())
|
||||
.into_owned(),
|
||||
serde_json::to_value(pkg_json).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
pkg_json_resolution: self.workspace_resolver.pkg_json_dep_resolution(),
|
||||
},
|
||||
node_modules,
|
||||
disable_deprecated_api_warning: cli_options
|
||||
.disable_deprecated_api_warning,
|
||||
|
@ -653,7 +657,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
fn build_vfs(&self) -> Result<VfsBuilder, AnyError> {
|
||||
fn build_vfs(
|
||||
&self,
|
||||
root_path: &Path,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<VfsBuilder, AnyError> {
|
||||
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
|
||||
if system_info != &NpmSystemInfo::default() {
|
||||
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
|
||||
|
@ -664,7 +672,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
|
||||
maybe_warn_different_system(&self.npm_system_info);
|
||||
let mut builder = VfsBuilder::new(node_modules_path.clone())?;
|
||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
||||
builder.add_dir_recursive(node_modules_path)?;
|
||||
Ok(builder)
|
||||
} else {
|
||||
|
@ -678,23 +686,77 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
||||
builder.add_dir_recursive(&folder)?;
|
||||
}
|
||||
// overwrite the root directory's name to obscure the user's registry url
|
||||
builder.set_root_dir_name("node_modules".to_string());
|
||||
|
||||
// Flatten all the registries folders into a single "node_modules/localhost" folder
|
||||
// that will be used by denort when loading the npm cache. This avoids us exposing
|
||||
// the user's private registry information and means we don't have to bother
|
||||
// serializing all the different registry config into the binary.
|
||||
builder.with_root_dir(|root_dir| {
|
||||
root_dir.name = "node_modules".to_string();
|
||||
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
|
||||
let mut localhost_entries = IndexMap::new();
|
||||
for entry in std::mem::take(&mut root_dir.entries) {
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
for entry in dir.entries {
|
||||
log::debug!(
|
||||
"Flattening {} into node_modules",
|
||||
entry.name()
|
||||
);
|
||||
if let Some(existing) =
|
||||
localhost_entries.insert(entry.name().to_string(), entry)
|
||||
{
|
||||
panic!(
|
||||
"Unhandled scenario where a duplicate entry was found: {:?}",
|
||||
existing
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
|
||||
new_entries.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
new_entries.push(VfsEntry::Dir(VirtualDirectory {
|
||||
name: "localhost".to_string(),
|
||||
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
|
||||
}));
|
||||
// needs to be sorted by name
|
||||
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
|
||||
root_dir.entries = new_entries;
|
||||
});
|
||||
|
||||
Ok(builder)
|
||||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
|
||||
InnerCliNpmResolverRef::Byonm(_) => {
|
||||
maybe_warn_different_system(&self.npm_system_info);
|
||||
// the root_node_modules directory will always exist for byonm
|
||||
let node_modules_path = npm_resolver.root_node_modules_path().unwrap();
|
||||
let parent_path = node_modules_path.parent().unwrap();
|
||||
let mut builder = VfsBuilder::new(parent_path.to_path_buf())?;
|
||||
let package_json_path = parent_path.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
builder.add_file_at_path(&package_json_path)?;
|
||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
||||
for pkg_json in cli_options.workspace().package_jsons() {
|
||||
builder.add_file_at_path(&pkg_json.path)?;
|
||||
}
|
||||
if node_modules_path.exists() {
|
||||
builder.add_dir_recursive(node_modules_path)?;
|
||||
// traverse and add all the node_modules directories in the workspace
|
||||
let mut pending_dirs = VecDeque::new();
|
||||
pending_dirs.push_back(
|
||||
cli_options.workspace().root_dir().to_file_path().unwrap(),
|
||||
);
|
||||
while let Some(pending_dir) = pending_dirs.pop_front() {
|
||||
let entries = fs::read_dir(&pending_dir).with_context(|| {
|
||||
format!("Failed reading: {}", pending_dir.display())
|
||||
})?;
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if !path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
if path.ends_with("node_modules") {
|
||||
builder.add_dir_recursive(&path)?;
|
||||
} else {
|
||||
pending_dirs.push_back(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(builder)
|
||||
}
|
||||
|
@ -702,6 +764,21 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// This function returns the environment variables specified
|
||||
/// in the passed environment file.
|
||||
fn get_file_env_vars(
|
||||
filename: String,
|
||||
) -> Result<HashMap<String, String>, dotenvy::Error> {
|
||||
let mut file_env_vars = HashMap::new();
|
||||
for item in dotenvy::from_filename_iter(filename)? {
|
||||
let Ok((key, val)) = item else {
|
||||
continue; // this failure will be warned about on load
|
||||
};
|
||||
file_env_vars.insert(key, val);
|
||||
}
|
||||
Ok(file_env_vars)
|
||||
}
|
||||
|
||||
/// This function sets the subsystem field in the PE header to 2 (GUI subsystem)
|
||||
/// For more information about the PE header: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format
|
||||
fn set_windows_binary_to_gui(bin: &mut [u8]) -> Result<(), AnyError> {
|
||||
|
|
|
@ -145,6 +145,26 @@ impl FileSystem for DenoCompileFileSystem {
|
|||
RealFs.chown_async(path, uid, gid).await
|
||||
}
|
||||
|
||||
fn lchown_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.lchown_sync(path, uid, gid)
|
||||
}
|
||||
|
||||
async fn lchown_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.lchown_async(path, uid, gid).await
|
||||
}
|
||||
|
||||
fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.remove_sync(path, recursive)
|
||||
|
@ -349,4 +369,29 @@ impl FileSystem for DenoCompileFileSystem {
|
|||
.utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await
|
||||
}
|
||||
|
||||
fn lutime_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
}
|
||||
async fn lutime_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs
|
||||
.lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,12 +5,48 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::FeatureChecker;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use import_map::parse_from_json;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
|
@ -25,7 +61,6 @@ use crate::npm::CliNpmResolverManagedSnapshotOption;
|
|||
use crate::npm::NpmCacheDir;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
@ -34,34 +69,6 @@ use crate::worker::CliMainWorkerFactory;
|
|||
use crate::worker::CliMainWorkerOptions;
|
||||
use crate::worker::ModuleLoaderAndSourceMapGetter;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::FeatureChecker;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::parse_from_json;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub mod binary;
|
||||
mod file_system;
|
||||
|
@ -75,9 +82,43 @@ use self::binary::load_npm_vfs;
|
|||
use self::binary::Metadata;
|
||||
use self::file_system::DenoCompileFileSystem;
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
struct WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier,
|
||||
inner: eszip::Module,
|
||||
}
|
||||
|
||||
struct WorkspaceEszip {
|
||||
eszip: eszip::EszipV2,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver,
|
||||
root_dir_url: Arc<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl WorkspaceEszip {
|
||||
pub fn get_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<WorkspaceEszipModule> {
|
||||
if specifier.scheme() == "file" {
|
||||
let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
|
||||
.specifier_key(specifier);
|
||||
let module = self.eszip.get_module(&specifier_key)?;
|
||||
let specifier = self.root_dir_url.join(&module.specifier).unwrap();
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier,
|
||||
inner: module,
|
||||
})
|
||||
} else {
|
||||
let module = self.eszip.get_module(specifier.as_str())?;
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
|
||||
inner: module,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
eszip: WorkspaceEszip,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_module_loader: Arc<NpmModuleLoader>,
|
||||
}
|
||||
|
@ -111,55 +152,119 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
})?
|
||||
};
|
||||
|
||||
if let Some(result) = self.shared.node_resolver.resolve_if_in_npm_package(
|
||||
specifier,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
) {
|
||||
return match result? {
|
||||
Some(res) => Ok(res.into_url()),
|
||||
None => Err(generic_error("not found")),
|
||||
};
|
||||
if self.shared.node_resolver.in_npm_package(&referrer) {
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(specifier, &referrer, NodeResolutionMode::Execution)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
||||
let maybe_mapped = self
|
||||
.shared
|
||||
.mapped_specifier_resolver
|
||||
.resolve(specifier, &referrer)?
|
||||
.into_specifier();
|
||||
let mapped_resolution =
|
||||
self.shared.workspace_resolver.resolve(specifier, &referrer);
|
||||
|
||||
// npm specifier
|
||||
let specifier_text = maybe_mapped
|
||||
.as_ref()
|
||||
.map(|r| r.as_str())
|
||||
.unwrap_or(specifier);
|
||||
if let Ok(reference) = NpmPackageReqReference::from_str(specifier_text) {
|
||||
return self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
match mapped_resolution {
|
||||
Ok(MappedResolution::WorkspaceNpmPackage {
|
||||
target_pkg_json: pkg_json,
|
||||
sub_path,
|
||||
..
|
||||
}) => Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
),
|
||||
Ok(MappedResolution::PackageJson {
|
||||
dep_result,
|
||||
sub_path,
|
||||
alias,
|
||||
..
|
||||
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
|
||||
PackageJsonDepValue::Req(req) => self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_with_sub_path(
|
||||
req,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url()),
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
let pkg_folder = self
|
||||
.shared
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||
alias,
|
||||
version_req,
|
||||
)?;
|
||||
Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
)
|
||||
}
|
||||
},
|
||||
Ok(MappedResolution::Normal(specifier))
|
||||
| Ok(MappedResolution::ImportMap(specifier)) => {
|
||||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url());
|
||||
}
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(module) = self.shared.eszip.get_module(&specifier) {
|
||||
return Ok(module.specifier);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(&specifier)?
|
||||
.unwrap_or(specifier),
|
||||
)
|
||||
}
|
||||
Err(err)
|
||||
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
|
||||
{
|
||||
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
|
||||
specifier,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url());
|
||||
}
|
||||
|
||||
let specifier = match maybe_mapped {
|
||||
Some(resolved) => resolved,
|
||||
None => deno_core::resolve_import(specifier, referrer.as_str())?,
|
||||
};
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(module) = self.shared.eszip.get_module(specifier.as_str()) {
|
||||
return Ok(ModuleSpecifier::parse(&module.specifier).unwrap());
|
||||
)?;
|
||||
if let Some(res) = maybe_res {
|
||||
return Ok(res.into_url());
|
||||
}
|
||||
Err(err.into())
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(specifier)
|
||||
}
|
||||
|
||||
fn load(
|
||||
|
@ -215,27 +320,23 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
);
|
||||
}
|
||||
|
||||
let Some(module) =
|
||||
self.shared.eszip.get_module(original_specifier.as_str())
|
||||
else {
|
||||
let Some(module) = self.shared.eszip.get_module(original_specifier) else {
|
||||
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!(
|
||||
"Module not found: {}",
|
||||
original_specifier
|
||||
))));
|
||||
};
|
||||
let original_specifier = original_specifier.clone();
|
||||
let found_specifier =
|
||||
ModuleSpecifier::parse(&module.specifier).expect("invalid url in eszip");
|
||||
|
||||
deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code = module.source().await.ok_or_else(|| {
|
||||
let code = module.inner.source().await.ok_or_else(|| {
|
||||
type_error(format!("Module not found: {}", original_specifier))
|
||||
})?;
|
||||
let code = arc_u8_to_arc_str(code)
|
||||
.map_err(|_| type_error("Module source is not utf-8"))?;
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match module.kind {
|
||||
match module.inner.kind {
|
||||
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
|
||||
eszip::ModuleKind::Json => ModuleType::Json,
|
||||
eszip::ModuleKind::Jsonc => {
|
||||
|
@ -247,7 +348,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
},
|
||||
ModuleSourceCode::String(code.into()),
|
||||
&original_specifier,
|
||||
&found_specifier,
|
||||
&module.specifier,
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
@ -285,7 +386,6 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
|
|||
root_permissions,
|
||||
dynamic_permissions,
|
||||
}),
|
||||
source_map_getter: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,7 +400,6 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
|
|||
root_permissions,
|
||||
dynamic_permissions,
|
||||
}),
|
||||
source_map_getter: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -324,10 +423,10 @@ pub async fn run(
|
|||
mut eszip: eszip::EszipV2,
|
||||
metadata: Metadata,
|
||||
) -> Result<i32, AnyError> {
|
||||
let main_module = &metadata.entrypoint;
|
||||
let current_exe_path = std::env::current_exe().unwrap();
|
||||
let current_exe_name =
|
||||
current_exe_path.file_name().unwrap().to_string_lossy();
|
||||
let maybe_cwd = std::env::current_dir().ok();
|
||||
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
||||
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
|
||||
ca_stores: metadata.ca_stores,
|
||||
|
@ -341,123 +440,117 @@ pub async fn run(
|
|||
));
|
||||
// use a dummy npm registry url
|
||||
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
|
||||
let root_path = std::env::temp_dir()
|
||||
.join(format!("deno-compile-{}", current_exe_name))
|
||||
.join("node_modules");
|
||||
let npm_cache_dir =
|
||||
NpmCacheDir::new(root_path.clone(), vec![npm_registry_url.clone()]);
|
||||
let root_path =
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
|
||||
let root_dir_url =
|
||||
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
|
||||
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
|
||||
let root_node_modules_path = root_path.join("node_modules");
|
||||
let npm_cache_dir = NpmCacheDir::new(
|
||||
root_node_modules_path.clone(),
|
||||
vec![npm_registry_url.clone()],
|
||||
);
|
||||
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
|
||||
let cache_setting = CacheSetting::Only;
|
||||
let (package_json_deps_provider, fs, npm_resolver, maybe_vfs_root) =
|
||||
match metadata.node_modules {
|
||||
Some(binary::NodeModules::Managed {
|
||||
node_modules_dir,
|
||||
package_json_deps,
|
||||
}) => {
|
||||
// this will always have a snapshot
|
||||
let snapshot = eszip.take_npm_snapshot().unwrap();
|
||||
let vfs_root_dir_path = if node_modules_dir {
|
||||
root_path
|
||||
} else {
|
||||
npm_cache_dir.root_dir().to_owned()
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let maybe_node_modules_path = if node_modules_dir {
|
||||
Some(vfs.root().to_path_buf())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
package_json_deps.map(|serialized| serialized.into_deps()),
|
||||
));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
package_json_deps_provider: package_json_deps_provider.clone(),
|
||||
npm_system_info: Default::default(),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(
|
||||
package_json_deps_provider,
|
||||
fs,
|
||||
npm_resolver,
|
||||
Some(vfs_root_dir_path),
|
||||
)
|
||||
}
|
||||
Some(binary::NodeModules::Byonm { package_json_deps }) => {
|
||||
let vfs_root_dir_path = root_path;
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let node_modules_path = vfs.root().join("node_modules");
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
package_json_deps.map(|serialized| serialized.into_deps()),
|
||||
));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm(
|
||||
CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
root_node_modules_dir: node_modules_path,
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(
|
||||
package_json_deps_provider,
|
||||
fs,
|
||||
npm_resolver,
|
||||
Some(vfs_root_dir_path),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(None));
|
||||
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
package_json_deps_provider: package_json_deps_provider.clone(),
|
||||
npm_system_info: Default::default(),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(package_json_deps_provider, fs, npm_resolver, None)
|
||||
}
|
||||
};
|
||||
let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules {
|
||||
Some(binary::NodeModules::Managed { node_modules_dir }) => {
|
||||
// this will always have a snapshot
|
||||
let snapshot = eszip.take_npm_snapshot().unwrap();
|
||||
let vfs_root_dir_path = if node_modules_dir.is_some() {
|
||||
root_path.clone()
|
||||
} else {
|
||||
npm_cache_dir.root_dir().to_owned()
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let maybe_node_modules_path = node_modules_dir
|
||||
.map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
npmrc: Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: npm_registry_url.clone(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
}),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
}
|
||||
Some(binary::NodeModules::Byonm {
|
||||
root_node_modules_dir,
|
||||
}) => {
|
||||
let vfs_root_dir_path = root_path.clone();
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load vfs.")?;
|
||||
let root_node_modules_dir =
|
||||
root_node_modules_dir.map(|p| vfs.root().join(p));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver = create_cli_npm_resolver(
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
root_node_modules_dir,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
}
|
||||
None => {
|
||||
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, None)
|
||||
}
|
||||
};
|
||||
|
||||
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
|
||||
let node_resolver = Arc::new(NodeResolver::new(
|
||||
fs.clone(),
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
));
|
||||
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
|
||||
|
@ -467,26 +560,60 @@ pub async fn run(
|
|||
CliCjsCodeAnalyzer::new(node_analysis_cache, fs.clone());
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_code_analyzer,
|
||||
fs.clone(),
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
));
|
||||
let maybe_import_map = metadata.maybe_import_map.map(|(base, source)| {
|
||||
Arc::new(parse_from_json(&base, &source).unwrap().import_map)
|
||||
});
|
||||
let workspace_resolver = {
|
||||
let import_map = match metadata.workspace_resolver.import_map {
|
||||
Some(import_map) => Some(
|
||||
import_map::parse_from_json_with_options(
|
||||
root_dir_url.join(&import_map.specifier).unwrap(),
|
||||
&import_map.json,
|
||||
import_map::ImportMapOptions {
|
||||
address_hook: None,
|
||||
expand_imports: true,
|
||||
},
|
||||
)?
|
||||
.import_map,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
let pkg_jsons = metadata
|
||||
.workspace_resolver
|
||||
.package_jsons
|
||||
.into_iter()
|
||||
.map(|(relative_path, json)| {
|
||||
let path = root_dir_url
|
||||
.join(&relative_path)
|
||||
.unwrap()
|
||||
.to_file_path()
|
||||
.unwrap();
|
||||
let pkg_json =
|
||||
deno_package_json::PackageJson::load_from_value(path, json);
|
||||
Arc::new(pkg_json)
|
||||
})
|
||||
.collect();
|
||||
WorkspaceResolver::new_raw(
|
||||
root_dir_url.clone(),
|
||||
import_map,
|
||||
pkg_jsons,
|
||||
metadata.workspace_resolver.pkg_json_resolution,
|
||||
)
|
||||
};
|
||||
let cli_node_resolver = Arc::new(CliNodeResolver::new(
|
||||
Some(cjs_resolutions.clone()),
|
||||
cjs_resolutions.clone(),
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
eszip,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver::new(
|
||||
maybe_import_map.clone(),
|
||||
package_json_deps_provider.clone(),
|
||||
),
|
||||
eszip: WorkspaceEszip {
|
||||
eszip,
|
||||
root_dir_url,
|
||||
},
|
||||
workspace_resolver,
|
||||
node_resolver: cli_node_resolver.clone(),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
cjs_resolutions,
|
||||
|
@ -498,7 +625,6 @@ pub async fn run(
|
|||
};
|
||||
|
||||
let permissions = {
|
||||
let maybe_cwd = std::env::current_dir().ok();
|
||||
let mut permissions =
|
||||
metadata.permissions.to_options(maybe_cwd.as_deref())?;
|
||||
// if running with an npm vfs, grant read access to it
|
||||
|
@ -561,7 +687,7 @@ pub async fn run(
|
|||
is_npm_main: main_module.scheme() == "npm",
|
||||
skip_op_registration: true,
|
||||
location: metadata.location,
|
||||
argv0: NpmPackageReqReference::from_specifier(main_module)
|
||||
argv0: NpmPackageReqReference::from_specifier(&main_module)
|
||||
.ok()
|
||||
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref))
|
||||
.or(std::env::args().next()),
|
||||
|
@ -571,7 +697,6 @@ pub async fn run(
|
|||
unsafely_ignore_certificate_errors: metadata
|
||||
.unsafely_ignore_certificate_errors,
|
||||
unstable: metadata.unstable_config.legacy_flag_enabled,
|
||||
maybe_root_package_json_deps: package_json_deps_provider.deps().cloned(),
|
||||
create_hmr_runner: None,
|
||||
create_coverage_collector: None,
|
||||
},
|
||||
|
@ -592,11 +717,7 @@ pub async fn run(
|
|||
deno_core::JsRuntime::init_platform(None);
|
||||
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(
|
||||
WorkerExecutionMode::Run,
|
||||
main_module.clone(),
|
||||
permissions,
|
||||
)
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
|
||||
.await?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
|
|
|
@ -12,6 +12,7 @@ use std::path::PathBuf;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -55,9 +56,8 @@ impl VfsBuilder {
|
|||
root_dir: VirtualDirectory {
|
||||
name: root_path
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
.map(|s| s.to_string_lossy().into_owned())
|
||||
.unwrap_or("root".to_string()),
|
||||
entries: Vec::new(),
|
||||
},
|
||||
root_path,
|
||||
|
@ -67,13 +67,19 @@ impl VfsBuilder {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn set_root_dir_name(&mut self, name: String) {
|
||||
self.root_dir.name = name;
|
||||
pub fn with_root_dir<R>(
|
||||
&mut self,
|
||||
with_root: impl FnOnce(&mut VirtualDirectory) -> R,
|
||||
) -> R {
|
||||
with_root(&mut self.root_dir)
|
||||
}
|
||||
|
||||
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let path = canonicalize_path(path)?;
|
||||
self.add_dir_recursive_internal(&path)
|
||||
let target_path = canonicalize_path(path)?;
|
||||
if path != target_path {
|
||||
self.add_symlink(path, &target_path)?;
|
||||
}
|
||||
self.add_dir_recursive_internal(&target_path)
|
||||
}
|
||||
|
||||
fn add_dir_recursive_internal(
|
||||
|
@ -92,7 +98,7 @@ impl VfsBuilder {
|
|||
if file_type.is_dir() {
|
||||
self.add_dir_recursive_internal(&path)?;
|
||||
} else if file_type.is_file() {
|
||||
self.add_file_at_path(&path)?;
|
||||
self.add_file_at_path_not_symlink(&path)?;
|
||||
} else if file_type.is_symlink() {
|
||||
match util::fs::canonicalize_path(&path) {
|
||||
Ok(target) => {
|
||||
|
@ -175,6 +181,17 @@ impl VfsBuilder {
|
|||
}
|
||||
|
||||
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let target_path = canonicalize_path(path)?;
|
||||
if target_path != path {
|
||||
self.add_symlink(path, &target_path)?;
|
||||
}
|
||||
self.add_file_at_path_not_symlink(&target_path)
|
||||
}
|
||||
|
||||
pub fn add_file_at_path_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file(path, file_bytes)
|
||||
|
@ -195,7 +212,9 @@ impl VfsBuilder {
|
|||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
let data_len = data.len();
|
||||
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
|
||||
Ok(_) => unreachable!(),
|
||||
Ok(_) => {
|
||||
// already added, just ignore
|
||||
}
|
||||
Err(insert_index) => {
|
||||
dir.entries.insert(
|
||||
insert_index,
|
||||
|
@ -228,6 +247,10 @@ impl VfsBuilder {
|
|||
target.display()
|
||||
);
|
||||
let dest = self.path_relative_root(target)?;
|
||||
if dest == self.path_relative_root(path)? {
|
||||
// it's the same, ignore
|
||||
return Ok(());
|
||||
}
|
||||
let dir = self.add_dir(path.parent().unwrap())?;
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
|
||||
|
|
506
cli/task_runner.rs
Normal file
506
cli/task_runner.rs
Normal file
|
@ -0,0 +1,506 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_task_shell::ExecutableCommand;
|
||||
use deno_task_shell::ExecuteResult;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use deno_task_shell::ShellCommandContext;
|
||||
use lazy_regex::Lazy;
|
||||
use regex::Regex;
|
||||
use tokio::task::LocalSet;
|
||||
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
|
||||
pub fn get_script_with_args(script: &str, argv: &[String]) -> String {
|
||||
let additional_args = argv
|
||||
.iter()
|
||||
// surround all the additional arguments in double quotes
|
||||
// and sanitize any command substitution
|
||||
.map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$")))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let script = format!("{script} {additional_args}");
|
||||
script.trim().to_owned()
|
||||
}
|
||||
|
||||
pub struct RunTaskOptions<'a> {
|
||||
pub task_name: &'a str,
|
||||
pub script: &'a str,
|
||||
pub cwd: &'a Path,
|
||||
pub init_cwd: &'a Path,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub argv: &'a [String],
|
||||
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||
pub root_node_modules_dir: Option<&'a Path>,
|
||||
}
|
||||
|
||||
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
|
||||
|
||||
pub async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
||||
let script = get_script_with_args(opts.script, opts.argv);
|
||||
let seq_list = deno_task_shell::parser::parse(&script)
|
||||
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
|
||||
let env_vars =
|
||||
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
|
||||
let local = LocalSet::new();
|
||||
let future = deno_task_shell::execute(
|
||||
seq_list,
|
||||
env_vars,
|
||||
opts.cwd,
|
||||
opts.custom_commands,
|
||||
);
|
||||
Ok(local.run_until(future).await)
|
||||
}
|
||||
|
||||
fn prepare_env_vars(
|
||||
mut env_vars: HashMap<String, String>,
|
||||
initial_cwd: &Path,
|
||||
node_modules_dir: Option<&Path>,
|
||||
) -> HashMap<String, String> {
|
||||
const INIT_CWD_NAME: &str = "INIT_CWD";
|
||||
if !env_vars.contains_key(INIT_CWD_NAME) {
|
||||
// if not set, set an INIT_CWD env var that has the cwd
|
||||
env_vars.insert(
|
||||
INIT_CWD_NAME.to_string(),
|
||||
initial_cwd.to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
if let Some(node_modules_dir) = node_modules_dir {
|
||||
prepend_to_path(
|
||||
&mut env_vars,
|
||||
node_modules_dir.join(".bin").to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
env_vars
|
||||
}
|
||||
|
||||
fn prepend_to_path(env_vars: &mut HashMap<String, String>, value: String) {
|
||||
match env_vars.get_mut("PATH") {
|
||||
Some(path) => {
|
||||
if path.is_empty() {
|
||||
*path = value;
|
||||
} else {
|
||||
*path =
|
||||
format!("{}{}{}", value, if cfg!(windows) { ";" } else { ":" }, path);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
env_vars.insert("PATH".to_string(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn real_env_vars() -> HashMap<String, String> {
|
||||
std::env::vars()
|
||||
.map(|(k, v)| {
|
||||
if cfg!(windows) {
|
||||
(k.to_uppercase(), v)
|
||||
} else {
|
||||
(k, v)
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<String, String>>()
|
||||
}
|
||||
|
||||
// WARNING: Do not depend on this env var in user code. It's not stable API.
|
||||
pub(crate) const USE_PKG_JSON_HIDDEN_ENV_VAR_NAME: &str =
|
||||
"DENO_INTERNAL_TASK_USE_PKG_JSON";
|
||||
|
||||
pub struct NpmCommand;
|
||||
|
||||
impl ShellCommand for NpmCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if context.args.first().map(|s| s.as_str()) == Some("run")
|
||||
&& context.args.len() > 2
|
||||
// for now, don't run any npm scripts that have a flag because
|
||||
// we don't handle stuff like `--workspaces` properly
|
||||
&& !context.args.iter().any(|s| s.starts_with('-'))
|
||||
{
|
||||
// run with deno task instead
|
||||
let mut args = Vec::with_capacity(context.args.len());
|
||||
args.push("task".to_string());
|
||||
args.extend(context.args.iter().skip(1).cloned());
|
||||
|
||||
let mut state = context.state;
|
||||
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
|
||||
return ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
)
|
||||
.execute(ShellCommandContext {
|
||||
args,
|
||||
state,
|
||||
..context
|
||||
});
|
||||
}
|
||||
|
||||
// fallback to running the real npm command
|
||||
let npm_path = match context.state.resolve_command_path("npm") {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npm".to_string(), npm_path).execute(context)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NodeCommand;
|
||||
|
||||
impl ShellCommand for NodeCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
// run with deno if it's a simple invocation, fall back to node
|
||||
// if there are extra flags
|
||||
let mut args = Vec::with_capacity(context.args.len());
|
||||
if context.args.len() > 1
|
||||
&& (
|
||||
context.args[0].starts_with('-') // has a flag
|
||||
|| !matches!(
|
||||
MediaType::from_str(&context.args[0]),
|
||||
MediaType::Cjs | MediaType::Mjs | MediaType::JavaScript
|
||||
)
|
||||
// not a script file
|
||||
)
|
||||
{
|
||||
return ExecutableCommand::new(
|
||||
"node".to_string(),
|
||||
"node".to_string().into(),
|
||||
)
|
||||
.execute(context);
|
||||
}
|
||||
args.extend(["run", "-A"].into_iter().map(|s| s.to_string()));
|
||||
args.extend(context.args.iter().cloned());
|
||||
|
||||
let mut state = context.state;
|
||||
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
|
||||
ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap())
|
||||
.execute(ShellCommandContext {
|
||||
args,
|
||||
state,
|
||||
..context
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NodeGypCommand;
|
||||
|
||||
impl ShellCommand for NodeGypCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
// at the moment this shell command is just to give a warning if node-gyp is not found
|
||||
// in the future, we could try to run/install node-gyp for the user with deno
|
||||
if which::which("node-gyp").is_err() {
|
||||
log::warn!("{}: node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`)", crate::colors::yellow("warning"));
|
||||
}
|
||||
ExecutableCommand::new(
|
||||
"node-gyp".to_string(),
|
||||
"node-gyp".to_string().into(),
|
||||
)
|
||||
.execute(context)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NpxCommand;
|
||||
|
||||
impl ShellCommand for NpxCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if let Some(first_arg) = context.args.first().cloned() {
|
||||
if let Some(command) = context.state.resolve_custom_command(&first_arg) {
|
||||
let context = ShellCommandContext {
|
||||
args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(),
|
||||
..context
|
||||
};
|
||||
command.execute(context)
|
||||
} else {
|
||||
// can't find the command, so fallback to running the real npx command
|
||||
let npx_path = match context.state.resolve_command_path("npx") {
|
||||
Ok(npx) => npx,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npx".to_string(), npx_path).execute(context)
|
||||
}
|
||||
} else {
|
||||
let _ = context.stderr.write_line("npx: missing command");
|
||||
Box::pin(futures::future::ready(ExecuteResult::from_exit_code(1)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct NpmPackageBinCommand {
|
||||
name: String,
|
||||
npm_package: PackageNv,
|
||||
}
|
||||
|
||||
impl ShellCommand for NpmPackageBinCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"-A".to_string(),
|
||||
if self.npm_package.name == self.name {
|
||||
format!("npm:{}", self.npm_package)
|
||||
} else {
|
||||
format!("npm:{}/{}", self.npm_package, self.name)
|
||||
},
|
||||
];
|
||||
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs a module in the node_modules folder.
|
||||
#[derive(Clone)]
|
||||
pub struct NodeModulesFileRunCommand {
|
||||
pub command_name: String,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl ShellCommand for NodeModulesFileRunCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"--ext=js".to_string(),
|
||||
"-A".to_string(),
|
||||
self.path.to_string_lossy().to_string(),
|
||||
];
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
// set this environment variable so that the launched process knows the npm command name
|
||||
context
|
||||
.state
|
||||
.apply_env_var("DENO_INTERNAL_NPM_CMD_NAME", &self.command_name);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_custom_commands(
|
||||
npm_resolver: &dyn CliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut commands = match npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
|
||||
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
|
||||
resolve_npm_commands_from_bin_dir(node_modules_dir)
|
||||
}
|
||||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||
resolve_managed_npm_commands(npm_resolver, node_resolver)?
|
||||
}
|
||||
};
|
||||
commands.insert("npm".to_string(), Rc::new(NpmCommand));
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
pub fn resolve_npm_commands_from_bin_dir(
|
||||
node_modules_dir: &Path,
|
||||
) -> HashMap<String, Rc<dyn ShellCommand>> {
|
||||
let mut result = HashMap::<String, Rc<dyn ShellCommand>>::new();
|
||||
let bin_dir = node_modules_dir.join(".bin");
|
||||
log::debug!("Resolving commands in '{}'.", bin_dir.display());
|
||||
match std::fs::read_dir(&bin_dir) {
|
||||
Ok(entries) => {
|
||||
for entry in entries {
|
||||
let Ok(entry) = entry else {
|
||||
continue;
|
||||
};
|
||||
if let Some(command) = resolve_bin_dir_entry_command(entry) {
|
||||
result.insert(command.command_name.clone(), Rc::new(command));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Failed read_dir for '{}': {:#}", bin_dir.display(), err);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn resolve_bin_dir_entry_command(
|
||||
entry: std::fs::DirEntry,
|
||||
) -> Option<NodeModulesFileRunCommand> {
|
||||
if entry.path().extension().is_some() {
|
||||
return None; // only look at files without extensions (even on Windows)
|
||||
}
|
||||
let file_type = entry.file_type().ok()?;
|
||||
let path = if file_type.is_file() {
|
||||
entry.path()
|
||||
} else if file_type.is_symlink() {
|
||||
entry.path().canonicalize().ok()?
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let text = std::fs::read_to_string(&path).ok()?;
|
||||
let command_name = entry.file_name().to_string_lossy().to_string();
|
||||
if let Some(path) = resolve_execution_path_from_npx_shim(path, &text) {
|
||||
log::debug!(
|
||||
"Resolved npx command '{}' to '{}'.",
|
||||
command_name,
|
||||
path.display()
|
||||
);
|
||||
Some(NodeModulesFileRunCommand { command_name, path })
|
||||
} else {
|
||||
log::debug!("Failed resolving npx command '{}'.", command_name);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not ideal, but it works ok because it allows us to bypass
|
||||
/// the shebang and execute the script directly with Deno.
|
||||
fn resolve_execution_path_from_npx_shim(
|
||||
file_path: PathBuf,
|
||||
text: &str,
|
||||
) -> Option<PathBuf> {
|
||||
static SCRIPT_PATH_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
|
||||
|
||||
if text.starts_with("#!/usr/bin/env node") {
|
||||
// launch this file itself because it's a JS file
|
||||
Some(file_path)
|
||||
} else {
|
||||
// Search for...
|
||||
// > "$basedir/../next/dist/bin/next" "$@"
|
||||
// ...which is what it will look like on Windows
|
||||
SCRIPT_PATH_RE
|
||||
.captures(text)
|
||||
.and_then(|c| c.get(1))
|
||||
.map(|relative_path| {
|
||||
file_path.parent().unwrap().join(relative_path.as_str())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_managed_npm_commands(
|
||||
npm_resolver: &ManagedCliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut result = HashMap::new();
|
||||
let snapshot = npm_resolver.snapshot();
|
||||
for id in snapshot.top_level_packages() {
|
||||
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(id)?;
|
||||
let bin_commands =
|
||||
node_resolver.resolve_binary_commands(&package_folder)?;
|
||||
for bin_command in bin_commands {
|
||||
result.insert(
|
||||
bin_command.to_string(),
|
||||
Rc::new(NpmPackageBinCommand {
|
||||
name: bin_command,
|
||||
npm_package: id.nv.clone(),
|
||||
}) as Rc<dyn ShellCommand>,
|
||||
);
|
||||
}
|
||||
}
|
||||
if !result.contains_key("npx") {
|
||||
result.insert("npx".to_string(), Rc::new(NpxCommand));
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_prepend_to_path() {
|
||||
let mut env_vars = HashMap::new();
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example2".to_string());
|
||||
let separator = if cfg!(windows) { ";" } else { ":" };
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([(
|
||||
"PATH".to_string(),
|
||||
format!("/example2{}/example", separator)
|
||||
)])
|
||||
);
|
||||
|
||||
env_vars.get_mut("PATH").unwrap().clear();
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_execution_path_from_npx_shim() {
|
||||
// example shim on unix
|
||||
let unix_shim = r#"#!/usr/bin/env node
|
||||
"use strict";
|
||||
console.log('Hi!');
|
||||
"#;
|
||||
let path = PathBuf::from("/node_modules/.bin/example");
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
|
||||
path
|
||||
);
|
||||
// example shim on windows
|
||||
let windows_shim = r#"#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../example/bin/example" "$@"
|
||||
else
|
||||
exec node "$basedir/../example/bin/example" "$@"
|
||||
fi"#;
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), windows_shim).unwrap(),
|
||||
path.parent().unwrap().join("../example/bin/example")
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,24 +1,22 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::BenchFlags;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::colors;
|
||||
use crate::display::write_json_to_stdout;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::factory::CliFactoryBuilder;
|
||||
use crate::graph_util::has_graph_root_local_dependent_changed;
|
||||
use crate::ops;
|
||||
use crate::tools::test::format_test_error;
|
||||
use crate::tools::test::TestFilter;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::fs::collect_specifiers;
|
||||
use crate::util::fs::WalkEntry;
|
||||
use crate::util::path::is_script_ext;
|
||||
use crate::util::path::matches_pattern_or_exact_path;
|
||||
use crate::version::get_user_agent;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
|
||||
use deno_config::glob::WalkEntry;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::JsError;
|
||||
|
@ -403,24 +401,34 @@ fn has_supported_bench_path_name(path: &Path) -> bool {
|
|||
}
|
||||
|
||||
pub async fn run_benchmarks(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
bench_flags: BenchFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let workspace_bench_options =
|
||||
cli_options.resolve_workspace_bench_options(&bench_flags);
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
|
||||
let specifiers = collect_specifiers(
|
||||
bench_options.files,
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?;
|
||||
let members_with_bench_options =
|
||||
cli_options.resolve_bench_options_for_members(&bench_flags)?;
|
||||
let specifiers = members_with_bench_options
|
||||
.iter()
|
||||
.map(|(_, bench_options)| {
|
||||
collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if specifiers.is_empty() {
|
||||
return Err(generic_error("No bench modules found"));
|
||||
|
@ -429,7 +437,7 @@ pub async fn run_benchmarks(
|
|||
let main_graph_container = factory.main_module_graph_container().await?;
|
||||
main_graph_container.check_specifiers(&specifiers).await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
if workspace_bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -441,8 +449,8 @@ pub async fn run_benchmarks(
|
|||
&permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
filter: TestFilter::from_flag(&workspace_bench_options.filter),
|
||||
json: workspace_bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
|
@ -453,7 +461,7 @@ pub async fn run_benchmarks(
|
|||
|
||||
// TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs`
|
||||
pub async fn run_benchmarks_with_watch(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
bench_flags: BenchFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
file_watcher::watch_func(
|
||||
|
@ -469,27 +477,45 @@ pub async fn run_benchmarks_with_watch(
|
|||
move |flags, watcher_communicator, changed_paths| {
|
||||
let bench_flags = bench_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
watcher_communicator.clone(),
|
||||
);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let workspace_bench_options =
|
||||
cli_options.resolve_workspace_bench_options(&bench_flags);
|
||||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
if let Some(set) = &bench_options.files.include {
|
||||
let watch_paths = set.base_paths();
|
||||
if !watch_paths.is_empty() {
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
}
|
||||
}
|
||||
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
|
||||
let bench_modules = collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?;
|
||||
let members_with_bench_options =
|
||||
cli_options.resolve_bench_options_for_members(&bench_flags)?;
|
||||
let watch_paths = members_with_bench_options
|
||||
.iter()
|
||||
.filter_map(|(_, bench_options)| {
|
||||
bench_options
|
||||
.files
|
||||
.include
|
||||
.as_ref()
|
||||
.map(|set| set.base_paths())
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
let collected_bench_modules = members_with_bench_options
|
||||
.iter()
|
||||
.map(|(_, bench_options)| {
|
||||
collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
|
@ -498,7 +524,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
|
||||
let graph = module_graph_creator
|
||||
.create_graph(graph_kind, bench_modules)
|
||||
.create_graph(graph_kind, collected_bench_modules.clone())
|
||||
.await?;
|
||||
module_graph_creator.graph_valid(&graph)?;
|
||||
let bench_modules = &graph.roots;
|
||||
|
@ -524,16 +550,10 @@ pub async fn run_benchmarks_with_watch(
|
|||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
|
||||
// todo(dsherret): why are we collecting specifiers twice in a row?
|
||||
// Seems like a perf bug.
|
||||
let specifiers = collect_specifiers(
|
||||
bench_options.files,
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?
|
||||
.into_iter()
|
||||
.filter(|specifier| bench_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
let specifiers = collected_bench_modules
|
||||
.into_iter()
|
||||
.filter(|specifier| bench_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
|
||||
factory
|
||||
.main_module_graph_container()
|
||||
|
@ -541,7 +561,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
.check_specifiers(&specifiers)
|
||||
.await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
if workspace_bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -551,8 +571,8 @@ pub async fn run_benchmarks_with_watch(
|
|||
&permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
filter: TestFilter::from_flag(&workspace_bench_options.filter),
|
||||
json: workspace_bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::Module;
|
||||
|
@ -11,13 +12,12 @@ use crate::args::CliOptions;
|
|||
use crate::args::Flags;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::factory::CliFactoryBuilder;
|
||||
use crate::graph_util::error_for_any_npm_specifier;
|
||||
use crate::util;
|
||||
use crate::util::display;
|
||||
|
||||
pub async fn bundle(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
bundle_flags: BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
log::info!(
|
||||
|
@ -35,11 +35,11 @@ pub async fn bundle(
|
|||
move |flags, watcher_communicator, _changed_paths| {
|
||||
let bundle_flags = bundle_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new().build_from_flags_for_watcher(
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
watcher_communicator.clone(),
|
||||
)?;
|
||||
let cli_options = factory.cli_options();
|
||||
);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
|
||||
|
@ -49,7 +49,7 @@ pub async fn bundle(
|
|||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
}
|
||||
|
||||
|
@ -60,11 +60,11 @@ async fn bundle_action(
|
|||
factory: CliFactory,
|
||||
bundle_flags: &BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = factory.cli_options();
|
||||
let cli_options = factory.cli_options()?;
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
log::debug!(">>>>> bundle START");
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let cli_options = factory.cli_options()?;
|
||||
|
||||
let graph = module_graph_creator
|
||||
.create_graph_and_maybe_check(vec![module_specifier.clone()])
|
||||
|
|
|
@ -183,7 +183,7 @@ impl TypeChecker {
|
|||
self.module_graph_builder.build_fast_check_graph(
|
||||
&mut graph,
|
||||
BuildFastCheckGraphOptions {
|
||||
workspace_fast_check: false,
|
||||
workspace_fast_check: deno_graph::WorkspaceFastCheckOption::Disabled,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::args::Flags;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::is_standalone_binary;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -12,6 +13,8 @@ use deno_core::error::AnyError;
|
|||
use deno_core::resolve_url_or_path;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_terminal::colors;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use rand::Rng;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -19,11 +22,11 @@ use std::sync::Arc;
|
|||
use super::installer::infer_name_from_url;
|
||||
|
||||
pub async fn compile(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
compile_flags: CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
let binary_writer = factory.create_compile_binary_writer().await?;
|
||||
|
@ -74,19 +77,32 @@ pub async fn compile(
|
|||
graph
|
||||
};
|
||||
|
||||
let ts_config_for_emit =
|
||||
cli_options.resolve_ts_config_for_emit(deno_config::TsConfigType::Emit)?;
|
||||
let ts_config_for_emit = cli_options
|
||||
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
|
||||
let (transpile_options, emit_options) =
|
||||
crate::args::ts_config_to_transpile_and_emit_options(
|
||||
ts_config_for_emit.ts_config,
|
||||
)?;
|
||||
let parser = parsed_source_cache.as_capturing_parser();
|
||||
let eszip = eszip::EszipV2::from_graph(
|
||||
let root_dir_url = resolve_root_dir_from_specifiers(
|
||||
cli_options.workspace().root_dir(),
|
||||
graph.specifiers().map(|(s, _)| s).chain(
|
||||
cli_options
|
||||
.node_modules_dir_path()
|
||||
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
|
||||
.iter(),
|
||||
),
|
||||
);
|
||||
log::debug!("Binary root dir: {}", root_dir_url);
|
||||
let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
|
||||
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
|
||||
graph,
|
||||
&parser,
|
||||
parser,
|
||||
transpile_options,
|
||||
emit_options,
|
||||
)?;
|
||||
// make all the modules relative to the root folder
|
||||
relative_file_base: Some(root_dir_url),
|
||||
})?;
|
||||
|
||||
log::info!(
|
||||
"{} {} to {}",
|
||||
|
@ -96,31 +112,62 @@ pub async fn compile(
|
|||
);
|
||||
validate_output_path(&output_path)?;
|
||||
|
||||
let mut file = std::fs::File::create(&output_path)
|
||||
.with_context(|| format!("Opening file '{}'", output_path.display()))?;
|
||||
let mut temp_filename = output_path.file_name().unwrap().to_owned();
|
||||
temp_filename.push(format!(
|
||||
".tmp-{}",
|
||||
faster_hex::hex_encode(
|
||||
&rand::thread_rng().gen::<[u8; 8]>(),
|
||||
&mut [0u8; 16]
|
||||
)
|
||||
.unwrap()
|
||||
));
|
||||
let temp_path = output_path.with_file_name(temp_filename);
|
||||
|
||||
let mut file = std::fs::File::create(&temp_path).with_context(|| {
|
||||
format!("Opening temporary file '{}'", temp_path.display())
|
||||
})?;
|
||||
let write_result = binary_writer
|
||||
.write_bin(
|
||||
&mut file,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
&module_specifier,
|
||||
&compile_flags,
|
||||
cli_options,
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("Writing {}", output_path.display()));
|
||||
.with_context(|| {
|
||||
format!("Writing temporary file '{}'", temp_path.display())
|
||||
});
|
||||
drop(file);
|
||||
if let Err(err) = write_result {
|
||||
// errored, so attempt to remove the output path
|
||||
let _ = std::fs::remove_file(output_path);
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
// set it as executable
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let write_result = write_result.and_then(|_| {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let perms = std::fs::Permissions::from_mode(0o777);
|
||||
std::fs::set_permissions(output_path, perms)?;
|
||||
let perms = std::fs::Permissions::from_mode(0o755);
|
||||
std::fs::set_permissions(&temp_path, perms).with_context(|| {
|
||||
format!(
|
||||
"Setting permissions on temporary file '{}'",
|
||||
temp_path.display()
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
let write_result = write_result.and_then(|_| {
|
||||
std::fs::rename(&temp_path, &output_path).with_context(|| {
|
||||
format!(
|
||||
"Renaming temporary file '{}' to '{}'",
|
||||
temp_path.display(),
|
||||
output_path.display()
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
if let Err(err) = write_result {
|
||||
// errored, so attempt to remove the temporary file
|
||||
let _ = std::fs::remove_file(temp_path);
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -236,6 +283,68 @@ fn get_os_specific_filepath(
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_root_dir_from_specifiers<'a>(
|
||||
starting_dir: &ModuleSpecifier,
|
||||
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
||||
) -> ModuleSpecifier {
|
||||
fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
|
||||
let min_length = a.len().min(b.len());
|
||||
|
||||
let mut last_slash = 0;
|
||||
for i in 0..min_length {
|
||||
if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
|
||||
last_slash = i;
|
||||
} else if a.as_bytes()[i] != b.as_bytes()[i] {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the common root path up to the last common slash.
|
||||
// This returns a slice of the original string 'a', up to and including the last matching '/'.
|
||||
let common = &a[..=last_slash];
|
||||
if cfg!(windows) && common == "file:///" {
|
||||
a
|
||||
} else {
|
||||
common
|
||||
}
|
||||
}
|
||||
|
||||
fn is_file_system_root(url: &str) -> bool {
|
||||
let Some(path) = url.strip_prefix("file:///") else {
|
||||
return false;
|
||||
};
|
||||
if cfg!(windows) {
|
||||
let Some((_drive, path)) = path.split_once('/') else {
|
||||
return true;
|
||||
};
|
||||
path.is_empty()
|
||||
} else {
|
||||
path.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
let mut found_dir = starting_dir.as_str();
|
||||
if !is_file_system_root(found_dir) {
|
||||
for specifier in specifiers {
|
||||
if specifier.scheme() == "file" {
|
||||
found_dir = select_common_root(found_dir, specifier.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
let found_dir = if is_file_system_root(found_dir) {
|
||||
found_dir
|
||||
} else {
|
||||
// include the parent dir name because it helps create some context
|
||||
found_dir
|
||||
.strip_suffix('/')
|
||||
.unwrap_or(found_dir)
|
||||
.rfind('/')
|
||||
.map(|i| &found_dir[..i + 1])
|
||||
.unwrap_or(found_dir)
|
||||
};
|
||||
ModuleSpecifier::parse(found_dir).unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
pub use super::*;
|
||||
|
@ -310,4 +419,38 @@ mod test {
|
|||
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
|
||||
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_root_dir_from_specifiers() {
|
||||
fn resolve(start: &str, specifiers: &[&str]) -> String {
|
||||
let specifiers = specifiers
|
||||
.iter()
|
||||
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
resolve_root_dir_from_specifiers(
|
||||
&ModuleSpecifier::parse(start).unwrap(),
|
||||
specifiers.iter(),
|
||||
)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
|
||||
assert_eq!(
|
||||
resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
|
||||
"file:///a/b/"
|
||||
);
|
||||
assert_eq!(
|
||||
resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
|
||||
"file:///a/b/"
|
||||
);
|
||||
assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
|
||||
if cfg!(windows) {
|
||||
assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
|
||||
// this will ignore the other one because it's on a separate drive
|
||||
assert_eq!(
|
||||
resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
|
||||
"file:///c:/a/b/"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,11 +9,11 @@ use crate::factory::CliFactory;
|
|||
use crate::npm::CliNpmResolver;
|
||||
use crate::tools::fmt::format_json;
|
||||
use crate::tools::test::is_supported_test_path;
|
||||
use crate::util::fs::FileCollector;
|
||||
use crate::util::text_encoding::source_map_from_code;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::glob::FileCollector;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::glob::PathOrPattern;
|
||||
use deno_config::glob::PathOrPatternSet;
|
||||
|
@ -32,6 +32,7 @@ use std::io::BufWriter;
|
|||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use text_lines::TextLines;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
@ -408,7 +409,7 @@ fn collect_coverages(
|
|||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.collect_file_patterns(file_patterns)?;
|
||||
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns)?;
|
||||
|
||||
let coverage_patterns = FilePatterns {
|
||||
base: initial_cwd.to_path_buf(),
|
||||
|
@ -473,17 +474,17 @@ fn filter_coverages(
|
|||
}
|
||||
|
||||
pub async fn cover_files(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
coverage_flags: CoverageFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
if coverage_flags.files.include.is_empty() {
|
||||
return Err(generic_error("No matching coverage profiles found"));
|
||||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let cli_options = factory.cli_options();
|
||||
let emitter = factory.emitter()?;
|
||||
|
||||
assert!(!coverage_flags.files.include.is_empty());
|
||||
|
|
214
cli/tools/doc.rs
214
cli/tools/doc.rs
|
@ -29,6 +29,7 @@ use doc::DocDiagnostic;
|
|||
use indexmap::IndexMap;
|
||||
use std::collections::BTreeMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
async fn generate_doc_nodes_for_builtin_types(
|
||||
doc_flags: DocFlags,
|
||||
|
@ -83,9 +84,12 @@ async fn generate_doc_nodes_for_builtin_types(
|
|||
Ok(IndexMap::from([(source_file_specifier, nodes)]))
|
||||
}
|
||||
|
||||
pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
pub async fn doc(
|
||||
flags: Arc<Flags>,
|
||||
doc_flags: DocFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let module_info_cache = factory.module_info_cache()?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
let capturing_parser = parsed_source_cache.as_capturing_parser();
|
||||
|
@ -102,7 +106,7 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
}
|
||||
DocSourceFileFlag::Paths(ref source_files) => {
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let maybe_lockfile = factory.maybe_lockfile();
|
||||
let maybe_lockfile = cli_options.maybe_lockfile();
|
||||
|
||||
let module_specifiers = collect_specifiers(
|
||||
FilePatterns {
|
||||
|
@ -174,11 +178,12 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
.into_iter()
|
||||
.map(|node| deno_doc::html::DocNodeWithContext {
|
||||
origin: short_path.clone(),
|
||||
ns_qualifiers: Rc::new(vec![]),
|
||||
ns_qualifiers: Rc::new([]),
|
||||
kind_with_drilldown:
|
||||
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind),
|
||||
inner: std::sync::Arc::new(node),
|
||||
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind()),
|
||||
inner: Rc::new(node),
|
||||
drilldown_parent_kind: None,
|
||||
parent: None,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
|
@ -186,7 +191,38 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
Default::default()
|
||||
};
|
||||
|
||||
generate_docs_directory(doc_nodes_by_url, html_options, deno_ns)
|
||||
let rewrite_map =
|
||||
if let Some(config_file) = cli_options.start_dir.maybe_deno_json() {
|
||||
let config = config_file.to_exports_config()?;
|
||||
|
||||
let rewrite_map = config
|
||||
.clone()
|
||||
.into_map()
|
||||
.into_keys()
|
||||
.map(|key| {
|
||||
Ok((
|
||||
config.get_resolved(&key)?.unwrap(),
|
||||
key
|
||||
.strip_prefix('.')
|
||||
.unwrap_or(&key)
|
||||
.strip_prefix('/')
|
||||
.unwrap_or(&key)
|
||||
.to_owned(),
|
||||
))
|
||||
})
|
||||
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
|
||||
|
||||
Some(rewrite_map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
generate_docs_directory(
|
||||
doc_nodes_by_url,
|
||||
html_options,
|
||||
deno_ns,
|
||||
rewrite_map,
|
||||
)
|
||||
} else {
|
||||
let modules_len = doc_nodes_by_url.len();
|
||||
let doc_nodes =
|
||||
|
@ -210,6 +246,7 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
|
||||
struct DocResolver {
|
||||
deno_ns: std::collections::HashSet<Vec<String>>,
|
||||
strip_trailing_html: bool,
|
||||
}
|
||||
|
||||
impl deno_doc::html::HrefResolver for DocResolver {
|
||||
|
@ -218,13 +255,23 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
current: UrlResolveKind,
|
||||
target: UrlResolveKind,
|
||||
) -> String {
|
||||
deno_doc::html::href_path_resolve(current, target)
|
||||
let path = deno_doc::html::href_path_resolve(current, target);
|
||||
if self.strip_trailing_html {
|
||||
if let Some(path) = path
|
||||
.strip_suffix("index.html")
|
||||
.or_else(|| path.strip_suffix(".html"))
|
||||
{
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn resolve_global_symbol(&self, symbol: &[String]) -> Option<String> {
|
||||
if self.deno_ns.contains(symbol) {
|
||||
Some(format!(
|
||||
"https://deno.land/api@{}?s={}",
|
||||
"https://deno.land/api@v{}?s={}",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
symbol.join(".")
|
||||
))
|
||||
|
@ -253,7 +300,93 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
}
|
||||
|
||||
fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> {
|
||||
Some(location.filename.clone())
|
||||
Some(location.filename.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
struct DenoDocResolver(bool);
|
||||
|
||||
impl deno_doc::html::HrefResolver for DenoDocResolver {
|
||||
fn resolve_path(
|
||||
&self,
|
||||
current: UrlResolveKind,
|
||||
target: UrlResolveKind,
|
||||
) -> String {
|
||||
let path = deno_doc::html::href_path_resolve(current, target);
|
||||
if self.0 {
|
||||
if let Some(path) = path
|
||||
.strip_suffix("index.html")
|
||||
.or_else(|| path.strip_suffix(".html"))
|
||||
{
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_import_href(
|
||||
&self,
|
||||
_symbol: &[String],
|
||||
_src: &str,
|
||||
) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_usage(&self, _current_resolve: UrlResolveKind) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
struct NodeDocResolver(bool);
|
||||
|
||||
impl deno_doc::html::HrefResolver for NodeDocResolver {
|
||||
fn resolve_path(
|
||||
&self,
|
||||
current: UrlResolveKind,
|
||||
target: UrlResolveKind,
|
||||
) -> String {
|
||||
let path = deno_doc::html::href_path_resolve(current, target);
|
||||
if self.0 {
|
||||
if let Some(path) = path
|
||||
.strip_suffix("index.html")
|
||||
.or_else(|| path.strip_suffix(".html"))
|
||||
{
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_import_href(
|
||||
&self,
|
||||
_symbol: &[String],
|
||||
_src: &str,
|
||||
) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_usage(&self, current_resolve: UrlResolveKind) -> Option<String> {
|
||||
current_resolve
|
||||
.get_file()
|
||||
.map(|file| format!("node:{}", file.path))
|
||||
}
|
||||
|
||||
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -261,17 +394,68 @@ fn generate_docs_directory(
|
|||
doc_nodes_by_url: IndexMap<ModuleSpecifier, Vec<doc::DocNode>>,
|
||||
html_options: &DocHtmlFlag,
|
||||
deno_ns: std::collections::HashSet<Vec<String>>,
|
||||
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
|
||||
) -> Result<(), AnyError> {
|
||||
let cwd = std::env::current_dir().context("Failed to get CWD")?;
|
||||
let output_dir_resolved = cwd.join(&html_options.output);
|
||||
|
||||
let internal_env = std::env::var("DENO_INTERNAL_HTML_DOCS").ok();
|
||||
|
||||
let href_resolver: Rc<dyn deno_doc::html::HrefResolver> = if internal_env
|
||||
.as_ref()
|
||||
.is_some_and(|internal_html_docs| internal_html_docs == "node")
|
||||
{
|
||||
Rc::new(NodeDocResolver(html_options.strip_trailing_html))
|
||||
} else if internal_env
|
||||
.as_ref()
|
||||
.is_some_and(|internal_html_docs| internal_html_docs == "deno")
|
||||
|| deno_ns.is_empty()
|
||||
{
|
||||
Rc::new(DenoDocResolver(html_options.strip_trailing_html))
|
||||
} else {
|
||||
Rc::new(DocResolver {
|
||||
deno_ns,
|
||||
strip_trailing_html: html_options.strip_trailing_html,
|
||||
})
|
||||
};
|
||||
|
||||
let category_docs =
|
||||
if let Some(category_docs_path) = &html_options.category_docs_path {
|
||||
let content = std::fs::read(category_docs_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let symbol_redirect_map = if let Some(symbol_redirect_map_path) =
|
||||
&html_options.symbol_redirect_map_path
|
||||
{
|
||||
let content = std::fs::read(symbol_redirect_map_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let default_symbol_map = if let Some(default_symbol_map_path) =
|
||||
&html_options.default_symbol_map_path
|
||||
{
|
||||
let content = std::fs::read(default_symbol_map_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let options = deno_doc::html::GenerateOptions {
|
||||
package_name: html_options.name.clone(),
|
||||
main_entrypoint: None,
|
||||
rewrite_map: None,
|
||||
href_resolver: Rc::new(DocResolver { deno_ns }),
|
||||
rewrite_map,
|
||||
href_resolver,
|
||||
usage_composer: None,
|
||||
composable_output: false,
|
||||
category_docs,
|
||||
disable_search: internal_env.is_some(),
|
||||
symbol_redirect_map,
|
||||
default_symbol_map,
|
||||
};
|
||||
|
||||
let files = deno_doc::html::generate(options, doc_nodes_by_url)
|
||||
|
@ -308,9 +492,9 @@ fn print_docs_to_stdout(
|
|||
doc_flags: DocFlags,
|
||||
mut doc_nodes: Vec<deno_doc::DocNode>,
|
||||
) -> Result<(), AnyError> {
|
||||
doc_nodes.retain(|doc_node| doc_node.kind != doc::DocNodeKind::Import);
|
||||
doc_nodes.retain(|doc_node| doc_node.kind() != doc::DocNodeKind::Import);
|
||||
let details = if let Some(filter) = doc_flags.filter {
|
||||
let nodes = doc::find_nodes_by_name_recursively(doc_nodes, filter.clone());
|
||||
let nodes = doc::find_nodes_by_name_recursively(doc_nodes, &filter);
|
||||
if nodes.is_empty() {
|
||||
bail!("Node {} was not found!", filter);
|
||||
}
|
||||
|
|
495
cli/tools/fmt.rs
495
cli/tools/fmt.rs
|
@ -13,14 +13,16 @@ use crate::args::FmtFlags;
|
|||
use crate::args::FmtOptions;
|
||||
use crate::args::FmtOptionsConfig;
|
||||
use crate::args::ProseWrap;
|
||||
use crate::cache::Caches;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::util::diff::diff;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::fs::FileCollector;
|
||||
use crate::util::path::get_extension;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_config::glob::FileCollector;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -47,11 +49,18 @@ use std::sync::Arc;
|
|||
use crate::cache::IncrementalCache;
|
||||
|
||||
/// Format JavaScript/TypeScript files.
|
||||
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
||||
pub async fn format(
|
||||
flags: Arc<Flags>,
|
||||
fmt_flags: FmtFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
if fmt_flags.is_stdin() {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let start_dir = &cli_options.start_dir;
|
||||
let fmt_config = start_dir
|
||||
.to_fmt_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
|
||||
let fmt_options = FmtOptions::resolve(fmt_config, &fmt_flags);
|
||||
return format_stdin(
|
||||
&fmt_flags,
|
||||
fmt_options,
|
||||
cli_options
|
||||
.ext_flag()
|
||||
|
@ -68,44 +77,44 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
move |flags, watcher_communicator, changed_paths| {
|
||||
let fmt_flags = fmt_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files = collect_fmt_files(cli_options, fmt_options.files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let caches = factory.caches()?;
|
||||
let mut paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
|
||||
|
||||
for paths_with_options in &mut paths_with_options_batches {
|
||||
let _ = watcher_communicator
|
||||
.watch_paths(paths_with_options.paths.clone());
|
||||
let files = std::mem::take(&mut paths_with_options.paths);
|
||||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||
if fmt_flags.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
Ok(files)
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
})?;
|
||||
let _ = watcher_communicator.watch_paths(files.clone());
|
||||
let refmt_files = if let Some(paths) = changed_paths {
|
||||
if fmt_options.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
};
|
||||
format_files(factory, fmt_options, refmt_files).await?;
|
||||
};
|
||||
}
|
||||
|
||||
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
|
@ -113,44 +122,78 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files = collect_fmt_files(cli_options, fmt_options.files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
format_files(factory, fmt_options, files).await?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let caches = factory.caches()?;
|
||||
let paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
|
||||
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn format_files(
|
||||
factory: CliFactory,
|
||||
fmt_options: FmtOptions,
|
||||
struct PathsWithOptions {
|
||||
base: PathBuf,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
let caches = factory.caches()?;
|
||||
let check = fmt_options.check;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options.options,
|
||||
&paths,
|
||||
));
|
||||
if check {
|
||||
check_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
} else {
|
||||
format_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
options: FmtOptions,
|
||||
}
|
||||
|
||||
fn resolve_paths_with_options_batches(
|
||||
cli_options: &CliOptions,
|
||||
fmt_flags: &FmtFlags,
|
||||
) -> Result<Vec<PathsWithOptions>, AnyError> {
|
||||
let members_fmt_options =
|
||||
cli_options.resolve_fmt_options_for_members(fmt_flags)?;
|
||||
let mut paths_with_options_batches =
|
||||
Vec::with_capacity(members_fmt_options.len());
|
||||
for (_ctx, member_fmt_options) in members_fmt_options {
|
||||
let files =
|
||||
collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
|
||||
if !files.is_empty() {
|
||||
paths_with_options_batches.push(PathsWithOptions {
|
||||
base: member_fmt_options.files.base.clone(),
|
||||
paths: files,
|
||||
options: member_fmt_options,
|
||||
});
|
||||
}
|
||||
}
|
||||
incremental_cache.wait_completion().await;
|
||||
Ok(())
|
||||
if paths_with_options_batches.is_empty() {
|
||||
return Err(generic_error("No target files found."));
|
||||
}
|
||||
Ok(paths_with_options_batches)
|
||||
}
|
||||
|
||||
async fn format_files(
|
||||
caches: &Arc<Caches>,
|
||||
fmt_flags: &FmtFlags,
|
||||
paths_with_options_batches: Vec<PathsWithOptions>,
|
||||
) -> Result<(), AnyError> {
|
||||
let formatter: Box<dyn Formatter> = if fmt_flags.check {
|
||||
Box::new(CheckFormatter::default())
|
||||
} else {
|
||||
Box::new(RealFormatter::default())
|
||||
};
|
||||
for paths_with_options in paths_with_options_batches {
|
||||
log::debug!(
|
||||
"Formatting {} file(s) in {}",
|
||||
paths_with_options.paths.len(),
|
||||
paths_with_options.base.display()
|
||||
);
|
||||
let fmt_options = paths_with_options.options;
|
||||
let paths = paths_with_options.paths;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options.options,
|
||||
&paths,
|
||||
));
|
||||
formatter
|
||||
.handle_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
incremental_cache.wait_completion().await;
|
||||
}
|
||||
|
||||
formatter.finish()
|
||||
}
|
||||
|
||||
fn collect_fmt_files(
|
||||
|
@ -161,7 +204,7 @@ fn collect_fmt_files(
|
|||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.collect_file_patterns(files)
|
||||
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
|
||||
}
|
||||
|
||||
/// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks
|
||||
|
@ -274,156 +317,190 @@ pub fn format_parsed_source(
|
|||
)
|
||||
}
|
||||
|
||||
async fn check_source_files(
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
#[async_trait]
|
||||
trait Formatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
// prevent threads outputting at the same time
|
||||
let output_lock = Arc::new(Mutex::new(0));
|
||||
fn finish(&self) -> Result<(), AnyError>;
|
||||
}
|
||||
|
||||
run_parallelized(paths, {
|
||||
let not_formatted_files_count = not_formatted_files_count.clone();
|
||||
let checked_files_count = checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_text = read_file_contents(&file_path)?.text;
|
||||
#[derive(Default)]
|
||||
struct CheckFormatter {
|
||||
not_formatted_files_count: Arc<AtomicUsize>,
|
||||
checked_files_count: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
// skip checking the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
#[async_trait]
|
||||
impl Formatter for CheckFormatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
// prevent threads outputting at the same time
|
||||
let output_lock = Arc::new(Mutex::new(0));
|
||||
|
||||
run_parallelized(paths, {
|
||||
let not_formatted_files_count = self.not_formatted_files_count.clone();
|
||||
let checked_files_count = self.checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_text = read_file_contents(&file_path)?.text;
|
||||
|
||||
// skip checking the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match format_file(&file_path, &file_text, &fmt_options) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
let diff = diff(&file_text, &formatted_text);
|
||||
info!("");
|
||||
info!("{} {}:", colors::bold("from"), file_path.display());
|
||||
info!("{}", diff);
|
||||
}
|
||||
Ok(None) => {
|
||||
// When checking formatting, only update the incremental cache when
|
||||
// the file is the same since we don't bother checking for stable
|
||||
// formatting here. Additionally, ensure this is done during check
|
||||
// so that CIs that cache the DENO_DIR will get the benefit of
|
||||
// incremental formatting
|
||||
incremental_cache.update_file(&file_path, &file_text);
|
||||
}
|
||||
Err(e) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
warn!("Error checking: {}", file_path.to_string_lossy());
|
||||
warn!(
|
||||
"{}",
|
||||
format!("{e}")
|
||||
.split('\n')
|
||||
.map(|l| {
|
||||
if l.trim().is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {l}")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
match format_file(&file_path, &file_text, &fmt_options) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
let diff = diff(&file_text, &formatted_text);
|
||||
info!("");
|
||||
info!("{} {}:", colors::bold("from"), file_path.display());
|
||||
info!("{}", diff);
|
||||
}
|
||||
Ok(None) => {
|
||||
// When checking formatting, only update the incremental cache when
|
||||
// the file is the same since we don't bother checking for stable
|
||||
// formatting here. Additionally, ensure this is done during check
|
||||
// so that CIs that cache the DENO_DIR will get the benefit of
|
||||
// incremental formatting
|
||||
incremental_cache.update_file(&file_path, &file_text);
|
||||
}
|
||||
Err(e) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
warn!("Error checking: {}", file_path.to_string_lossy());
|
||||
warn!(
|
||||
"{}",
|
||||
format!("{e}")
|
||||
.split('\n')
|
||||
.map(|l| {
|
||||
if l.trim().is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {l}")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let not_formatted_files_count =
|
||||
not_formatted_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_count = checked_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_str =
|
||||
format!("{} {}", checked_files_count, files_str(checked_files_count));
|
||||
if not_formatted_files_count == 0 {
|
||||
info!("Checked {}", checked_files_str);
|
||||
Ok(())
|
||||
} else {
|
||||
let not_formatted_files_str = files_str(not_formatted_files_count);
|
||||
Err(generic_error(format!(
|
||||
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
|
||||
)))
|
||||
}
|
||||
|
||||
fn finish(&self) -> Result<(), AnyError> {
|
||||
let not_formatted_files_count =
|
||||
self.not_formatted_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_str =
|
||||
format!("{} {}", checked_files_count, files_str(checked_files_count));
|
||||
if not_formatted_files_count == 0 {
|
||||
info!("Checked {}", checked_files_str);
|
||||
Ok(())
|
||||
} else {
|
||||
let not_formatted_files_str = files_str(not_formatted_files_count);
|
||||
Err(generic_error(format!(
|
||||
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn format_source_files(
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
||||
#[derive(Default)]
|
||||
struct RealFormatter {
|
||||
formatted_files_count: Arc<AtomicUsize>,
|
||||
checked_files_count: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
run_parallelized(paths, {
|
||||
let formatted_files_count = formatted_files_count.clone();
|
||||
let checked_files_count = checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_contents = read_file_contents(&file_path)?;
|
||||
#[async_trait]
|
||||
impl Formatter for RealFormatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
||||
|
||||
// skip formatting the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_contents.text) {
|
||||
return Ok(());
|
||||
run_parallelized(paths, {
|
||||
let formatted_files_count = self.formatted_files_count.clone();
|
||||
let checked_files_count = self.checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_contents = read_file_contents(&file_path)?;
|
||||
|
||||
// skip formatting the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_contents.text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match format_ensure_stable(
|
||||
&file_path,
|
||||
&file_contents.text,
|
||||
&fmt_options,
|
||||
format_file,
|
||||
) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
incremental_cache.update_file(&file_path, &formatted_text);
|
||||
write_file_contents(
|
||||
&file_path,
|
||||
FileContents {
|
||||
had_bom: file_contents.had_bom,
|
||||
text: formatted_text,
|
||||
},
|
||||
)?;
|
||||
formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
info!("{}", file_path.to_string_lossy());
|
||||
}
|
||||
Ok(None) => {
|
||||
incremental_cache.update_file(&file_path, &file_contents.text);
|
||||
}
|
||||
Err(e) => {
|
||||
let _g = output_lock.lock();
|
||||
log::error!("Error formatting: {}", file_path.to_string_lossy());
|
||||
log::error!(" {e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
match format_ensure_stable(
|
||||
&file_path,
|
||||
&file_contents.text,
|
||||
&fmt_options,
|
||||
format_file,
|
||||
) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
incremental_cache.update_file(&file_path, &formatted_text);
|
||||
write_file_contents(
|
||||
&file_path,
|
||||
FileContents {
|
||||
had_bom: file_contents.had_bom,
|
||||
text: formatted_text,
|
||||
},
|
||||
)?;
|
||||
formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
info!("{}", file_path.to_string_lossy());
|
||||
}
|
||||
Ok(None) => {
|
||||
incremental_cache.update_file(&file_path, &file_contents.text);
|
||||
}
|
||||
Err(e) => {
|
||||
let _g = output_lock.lock();
|
||||
log::error!("Error formatting: {}", file_path.to_string_lossy());
|
||||
log::error!(" {e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
fn finish(&self) -> Result<(), AnyError> {
|
||||
let formatted_files_count =
|
||||
self.formatted_files_count.load(Ordering::Relaxed);
|
||||
debug!(
|
||||
"Formatted {} {}",
|
||||
formatted_files_count,
|
||||
files_str(formatted_files_count),
|
||||
);
|
||||
|
||||
let formatted_files_count = formatted_files_count.load(Ordering::Relaxed);
|
||||
debug!(
|
||||
"Formatted {} {}",
|
||||
formatted_files_count,
|
||||
files_str(formatted_files_count),
|
||||
);
|
||||
|
||||
let checked_files_count = checked_files_count.load(Ordering::Relaxed);
|
||||
info!(
|
||||
"Checked {} {}",
|
||||
checked_files_count,
|
||||
files_str(checked_files_count)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
|
||||
info!(
|
||||
"Checked {} {}",
|
||||
checked_files_count,
|
||||
files_str(checked_files_count)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// When storing any formatted text in the incremental cache, we want
|
||||
|
@ -491,14 +568,18 @@ fn format_ensure_stable(
|
|||
/// Format stdin and write result to stdout.
|
||||
/// Treats input as set by `--ext` flag.
|
||||
/// Compatible with `--check` flag.
|
||||
fn format_stdin(fmt_options: FmtOptions, ext: &str) -> Result<(), AnyError> {
|
||||
fn format_stdin(
|
||||
fmt_flags: &FmtFlags,
|
||||
fmt_options: FmtOptions,
|
||||
ext: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut source = String::new();
|
||||
if stdin().read_to_string(&mut source).is_err() {
|
||||
bail!("Failed to read from stdin");
|
||||
}
|
||||
let file_path = PathBuf::from(format!("_stdin.{ext}"));
|
||||
let formatted_text = format_file(&file_path, &source, &fmt_options.options)?;
|
||||
if fmt_options.check {
|
||||
if fmt_flags.check {
|
||||
#[allow(clippy::print_stdout)]
|
||||
if formatted_text.is_some() {
|
||||
println!("Not formatted stdin");
|
||||
|
|
|
@ -4,6 +4,7 @@ use std::collections::HashMap;
|
|||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -25,7 +26,6 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use deno_semver::package::PackageNv;
|
||||
use deno_terminal::colors;
|
||||
|
||||
use crate::args::write_lockfile_if_has_changes;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InfoFlags;
|
||||
use crate::display;
|
||||
|
@ -35,27 +35,31 @@ use crate::npm::CliNpmResolver;
|
|||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::util::checksum;
|
||||
|
||||
pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
pub async fn info(
|
||||
flags: Arc<Flags>,
|
||||
info_flags: InfoFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
if let Some(specifier) = info_flags.file {
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let maybe_lockfile = factory.maybe_lockfile();
|
||||
let maybe_imports_map = factory.maybe_import_map().await?;
|
||||
let maybe_lockfile = cli_options.maybe_lockfile();
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
|
||||
let maybe_import_specifier = if let Some(imports_map) = maybe_imports_map {
|
||||
if let Ok(imports_specifier) =
|
||||
imports_map.resolve(&specifier, imports_map.base_url())
|
||||
{
|
||||
Some(imports_specifier)
|
||||
let maybe_import_specifier =
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
if let Ok(imports_specifier) =
|
||||
import_map.resolve(&specifier, import_map.base_url())
|
||||
{
|
||||
Some(imports_specifier)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
};
|
||||
|
||||
let specifier = match maybe_import_specifier {
|
||||
Some(specifier) => specifier,
|
||||
|
@ -71,7 +75,7 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
|
|||
// write out the lockfile if there is one
|
||||
if let Some(lockfile) = &maybe_lockfile {
|
||||
graph_exit_lock_errors(&graph);
|
||||
write_lockfile_if_has_changes(&mut lockfile.lock())?;
|
||||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
if info_flags.json {
|
||||
|
|
|
@ -4,10 +4,151 @@ use crate::args::InitFlags;
|
|||
use crate::colors;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json::json;
|
||||
use log::info;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
|
||||
let cwd =
|
||||
std::env::current_dir().context("Can't read current working directory.")?;
|
||||
let dir = if let Some(dir) = &init_flags.dir {
|
||||
let dir = cwd.join(dir);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
dir
|
||||
} else {
|
||||
cwd
|
||||
};
|
||||
|
||||
if init_flags.lib {
|
||||
// Extract the directory name to use as the project name
|
||||
let project_name = dir
|
||||
.file_name()
|
||||
.unwrap_or_else(|| dir.as_os_str())
|
||||
.to_str()
|
||||
.unwrap();
|
||||
|
||||
create_file(
|
||||
&dir,
|
||||
"mod.ts",
|
||||
r#"export function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
||||
"#,
|
||||
)?;
|
||||
create_file(
|
||||
&dir,
|
||||
"mod_test.ts",
|
||||
r#"import { assertEquals } from "@std/assert";
|
||||
import { add } from "./mod.ts";
|
||||
|
||||
Deno.test(function addTest() {
|
||||
assertEquals(add(2, 3), 5);
|
||||
});
|
||||
"#,
|
||||
)?;
|
||||
|
||||
create_json_file(
|
||||
&dir,
|
||||
"deno.json",
|
||||
&json!({
|
||||
"name": project_name,
|
||||
"version": "0.1.0",
|
||||
"tasks": {
|
||||
"dev": "deno test --watch mod.ts"
|
||||
},
|
||||
"imports": {
|
||||
"@std/assert": "jsr:@std/assert@1"
|
||||
},
|
||||
"exports": "./mod.ts"
|
||||
}),
|
||||
)?;
|
||||
} else {
|
||||
create_file(
|
||||
&dir,
|
||||
"main.ts",
|
||||
r#"export function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// Learn more at https://docs.deno.com/runtime/manual/examples/module_metadata#concepts
|
||||
if (import.meta.main) {
|
||||
console.log("Add 2 + 3 =", add(2, 3));
|
||||
}
|
||||
"#,
|
||||
)?;
|
||||
create_file(
|
||||
&dir,
|
||||
"main_test.ts",
|
||||
r#"import { assertEquals } from "@std/assert";
|
||||
import { add } from "./main.ts";
|
||||
|
||||
Deno.test(function addTest() {
|
||||
assertEquals(add(2, 3), 5);
|
||||
});
|
||||
"#,
|
||||
)?;
|
||||
|
||||
create_json_file(
|
||||
&dir,
|
||||
"deno.json",
|
||||
&json!({
|
||||
"tasks": {
|
||||
"dev": "deno run --watch main.ts"
|
||||
},
|
||||
"imports": {
|
||||
"@std/assert": "jsr:@std/assert@1"
|
||||
}
|
||||
}),
|
||||
)?;
|
||||
}
|
||||
|
||||
info!("✅ {}", colors::green("Project initialized"));
|
||||
info!("");
|
||||
info!("{}", colors::gray("Run these commands to get started"));
|
||||
info!("");
|
||||
if let Some(dir) = init_flags.dir {
|
||||
info!(" cd {}", dir);
|
||||
info!("");
|
||||
}
|
||||
if init_flags.lib {
|
||||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno test");
|
||||
info!("");
|
||||
info!(
|
||||
" {}",
|
||||
colors::gray("# Run the tests and watch for file changes")
|
||||
);
|
||||
info!(" deno task dev");
|
||||
info!("");
|
||||
info!(" {}", colors::gray("# Publish to JSR (dry run)"));
|
||||
info!(" deno publish --dry-run");
|
||||
} else {
|
||||
info!(" {}", colors::gray("# Run the program"));
|
||||
info!(" deno run main.ts");
|
||||
info!("");
|
||||
info!(
|
||||
" {}",
|
||||
colors::gray("# Run the program and watch for file changes")
|
||||
);
|
||||
info!(" deno task dev");
|
||||
info!("");
|
||||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno test");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_json_file(
|
||||
dir: &Path,
|
||||
filename: &str,
|
||||
value: &deno_core::serde_json::Value,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut text = deno_core::serde_json::to_string_pretty(value)?;
|
||||
text.push('\n');
|
||||
create_file(dir, filename, &text)
|
||||
}
|
||||
|
||||
fn create_file(
|
||||
dir: &Path,
|
||||
filename: &str,
|
||||
|
@ -30,46 +171,3 @@ fn create_file(
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
|
||||
let cwd =
|
||||
std::env::current_dir().context("Can't read current working directory.")?;
|
||||
let dir = if let Some(dir) = &init_flags.dir {
|
||||
let dir = cwd.join(dir);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
dir
|
||||
} else {
|
||||
cwd
|
||||
};
|
||||
|
||||
let main_ts = include_str!("./templates/main.ts");
|
||||
create_file(&dir, "main.ts", main_ts)?;
|
||||
|
||||
create_file(
|
||||
&dir,
|
||||
"main_test.ts",
|
||||
include_str!("./templates/main_test.ts"),
|
||||
)?;
|
||||
create_file(&dir, "deno.json", include_str!("./templates/deno.json"))?;
|
||||
|
||||
info!("✅ {}", colors::green("Project initialized"));
|
||||
info!("");
|
||||
info!("{}", colors::gray("Run these commands to get started"));
|
||||
info!("");
|
||||
if let Some(dir) = init_flags.dir {
|
||||
info!(" cd {}", dir);
|
||||
info!("");
|
||||
}
|
||||
info!(" {}", colors::gray("# Run the program"));
|
||||
info!(" deno run main.ts");
|
||||
info!("");
|
||||
info!(
|
||||
" {}",
|
||||
colors::gray("# Run the program and watch for file changes")
|
||||
);
|
||||
info!(" deno task dev");
|
||||
info!("");
|
||||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno test");
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"tasks": {
|
||||
"dev": "deno run --watch main.ts"
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
export function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// Learn more at https://deno.land/manual/examples/module_metadata#concepts
|
||||
if (import.meta.main) {
|
||||
console.log("Add 2 + 3 =", add(2, 3));
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
import { assertEquals } from "jsr:@std/assert";
|
||||
import { add } from "./main.ts";
|
||||
|
||||
Deno.test(function addTest() {
|
||||
assertEquals(add(2, 3), 5);
|
||||
});
|
|
@ -1,9 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::resolve_no_prompt;
|
||||
use crate::args::write_lockfile_if_has_changes;
|
||||
use crate::args::AddFlags;
|
||||
use crate::args::CaData;
|
||||
use crate::args::ConfigFlag;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InstallFlags;
|
||||
use crate::args::InstallFlagsGlobal;
|
||||
|
@ -15,7 +15,6 @@ use crate::factory::CliFactory;
|
|||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
use deno_config::ConfigFlag;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -36,6 +35,7 @@ use std::path::PathBuf;
|
|||
|
||||
#[cfg(not(windows))]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::sync::Arc;
|
||||
|
||||
static EXEC_NAME_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
RegexBuilder::new(r"^[a-z0-9][\w-]*$")
|
||||
|
@ -262,33 +262,33 @@ pub fn uninstall(uninstall_flags: UninstallFlags) -> Result<(), AnyError> {
|
|||
}
|
||||
|
||||
async fn install_local(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
maybe_add_flags: Option<AddFlags>,
|
||||
) -> Result<(), AnyError> {
|
||||
if let Some(add_flags) = maybe_add_flags {
|
||||
return super::registry::add(flags, add_flags).await;
|
||||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
crate::module_loader::load_top_level_deps(&factory).await?;
|
||||
|
||||
if let Some(lockfile) = factory.cli_options().maybe_lockfile() {
|
||||
write_lockfile_if_has_changes(&mut lockfile.lock())?;
|
||||
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
|
||||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn install_command(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
install_flags: InstallFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
if !install_flags.global {
|
||||
log::warn!("⚠️ `deno install` behavior will change in Deno 2. To preserve the current behavior use the `-g` or `--global` flag.");
|
||||
}
|
||||
|
||||
match install_flags.kind {
|
||||
InstallKind::Global(global_flags) => {
|
||||
if !install_flags.global {
|
||||
log::warn!("⚠️ `deno install` behavior will change in Deno 2. To preserve the current behavior use the `-g` or `--global` flag.");
|
||||
}
|
||||
|
||||
install_global(flags, global_flags).await
|
||||
}
|
||||
InstallKind::Local(maybe_add_flags) => {
|
||||
|
@ -298,11 +298,11 @@ pub async fn install_command(
|
|||
}
|
||||
|
||||
async fn install_global(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
install_flags_global: InstallFlagsGlobal,
|
||||
) -> Result<(), AnyError> {
|
||||
// ensure the module is cached
|
||||
let factory = CliFactory::from_flags(flags.clone())?;
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
factory
|
||||
.main_module_graph_container()
|
||||
.await?
|
||||
|
@ -311,16 +311,16 @@ async fn install_global(
|
|||
let http_client = factory.http_client_provider();
|
||||
|
||||
// create the install shim
|
||||
create_install_shim(http_client, flags, install_flags_global).await
|
||||
create_install_shim(http_client, &flags, install_flags_global).await
|
||||
}
|
||||
|
||||
async fn create_install_shim(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
flags: Flags,
|
||||
flags: &Flags,
|
||||
install_flags_global: InstallFlagsGlobal,
|
||||
) -> Result<(), AnyError> {
|
||||
let shim_data =
|
||||
resolve_shim_data(http_client_provider, &flags, &install_flags_global)
|
||||
resolve_shim_data(http_client_provider, flags, &install_flags_global)
|
||||
.await?;
|
||||
|
||||
// ensure directory exists
|
||||
|
@ -467,6 +467,10 @@ async fn resolve_shim_data(
|
|||
executable_args.push("--cached-only".to_string());
|
||||
}
|
||||
|
||||
if flags.frozen_lockfile {
|
||||
executable_args.push("--frozen".to_string());
|
||||
}
|
||||
|
||||
if resolve_no_prompt(&flags.permissions) {
|
||||
executable_args.push("--no-prompt".to_string());
|
||||
}
|
||||
|
@ -568,11 +572,11 @@ fn is_in_path(dir: &Path) -> bool {
|
|||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::args::ConfigFlag;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UninstallFlagsGlobal;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::ConfigFlag;
|
||||
use std::process::Command;
|
||||
use test_util::testdata_path;
|
||||
use test_util::TempDir;
|
||||
|
@ -775,7 +779,7 @@ mod tests {
|
|||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
&Flags {
|
||||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: true,
|
||||
..Default::default()
|
||||
|
@ -1170,7 +1174,7 @@ mod tests {
|
|||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: local_module_str.to_string(),
|
||||
args: vec![],
|
||||
|
@ -1200,7 +1204,7 @@ mod tests {
|
|||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
args: vec![],
|
||||
|
@ -1221,7 +1225,7 @@ mod tests {
|
|||
// No force. Install failed.
|
||||
let no_force_result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
|
||||
args: vec![],
|
||||
|
@ -1243,7 +1247,7 @@ mod tests {
|
|||
// Force. Install success.
|
||||
let force_result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
|
||||
args: vec![],
|
||||
|
@ -1271,7 +1275,7 @@ mod tests {
|
|||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
&Flags {
|
||||
config_flag: ConfigFlag::Path(config_file_path.to_string()),
|
||||
..Flags::default()
|
||||
},
|
||||
|
@ -1304,7 +1308,7 @@ mod tests {
|
|||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
args: vec!["\"".to_string()],
|
||||
|
@ -1345,7 +1349,7 @@ mod tests {
|
|||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: local_module_str.to_string(),
|
||||
args: vec![],
|
||||
|
@ -1387,7 +1391,7 @@ mod tests {
|
|||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
&Flags {
|
||||
import_map_path: Some(import_map_path.to_string()),
|
||||
..Flags::default()
|
||||
},
|
||||
|
@ -1433,7 +1437,7 @@ mod tests {
|
|||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
&Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: file_module_string.to_string(),
|
||||
args: vec![],
|
||||
|
|
|
@ -1,19 +1,26 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::Flags;
|
||||
use crate::args::JupyterFlags;
|
||||
use crate::cdp;
|
||||
use crate::lsp::ReplCompletionItem;
|
||||
use crate::ops;
|
||||
use crate::tools::repl;
|
||||
use crate::tools::test::create_single_test_event_channel;
|
||||
use crate::tools::test::reporters::PrettyTestReporter;
|
||||
use crate::tools::test::TestEventWorkerSender;
|
||||
use crate::CliFactory;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::located_script_name;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_io::Stdio;
|
||||
use deno_runtime::deno_io::StdioPipe;
|
||||
|
@ -21,17 +28,17 @@ use deno_runtime::deno_permissions::Permissions;
|
|||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_terminal::colors;
|
||||
|
||||
use jupyter_runtime::jupyter::ConnectionInfo;
|
||||
use jupyter_runtime::messaging::StreamContent;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
mod install;
|
||||
pub mod server;
|
||||
|
||||
pub async fn kernel(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
jupyter_flags: JupyterFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
log::info!(
|
||||
|
@ -51,8 +58,8 @@ pub async fn kernel(
|
|||
|
||||
let connection_filepath = jupyter_flags.conn_file.unwrap();
|
||||
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let main_module =
|
||||
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
|
||||
.unwrap();
|
||||
|
@ -118,9 +125,7 @@ pub async fn kernel(
|
|||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self
|
||||
.0
|
||||
.send(StreamContent::stdout(
|
||||
String::from_utf8_lossy(buf).into_owned(),
|
||||
))
|
||||
.send(StreamContent::stdout(&String::from_utf8_lossy(buf)))
|
||||
.ok();
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
@ -142,7 +147,378 @@ pub async fn kernel(
|
|||
)
|
||||
}));
|
||||
|
||||
server::JupyterServer::start(spec, stdio_rx, repl_session).await?;
|
||||
let (tx1, rx1) = mpsc::unbounded_channel();
|
||||
let (tx2, rx2) = mpsc::unbounded_channel();
|
||||
let (startup_data_tx, startup_data_rx) =
|
||||
oneshot::channel::<server::StartupData>();
|
||||
|
||||
let mut repl_session_proxy = JupyterReplSession {
|
||||
repl_session,
|
||||
rx: rx1,
|
||||
tx: tx2,
|
||||
};
|
||||
let repl_session_proxy_channels = JupyterReplProxy { tx: tx1, rx: rx2 };
|
||||
|
||||
let join_handle = std::thread::spawn(move || {
|
||||
let fut = server::JupyterServer::start(
|
||||
spec,
|
||||
stdio_rx,
|
||||
repl_session_proxy_channels,
|
||||
startup_data_tx,
|
||||
)
|
||||
.boxed_local();
|
||||
deno_runtime::tokio_util::create_and_run_current_thread(fut)
|
||||
});
|
||||
|
||||
let Ok(startup_data) = startup_data_rx.await else {
|
||||
bail!("Failed to acquire startup data");
|
||||
};
|
||||
{
|
||||
let op_state_rc =
|
||||
repl_session_proxy.repl_session.worker.js_runtime.op_state();
|
||||
let mut op_state = op_state_rc.borrow_mut();
|
||||
op_state.put(startup_data.iopub_connection.clone());
|
||||
op_state.put(startup_data.last_execution_request.clone());
|
||||
op_state.put(startup_data.stdin_connection_proxy.clone());
|
||||
}
|
||||
|
||||
repl_session_proxy.start().await;
|
||||
let server_result = join_handle.join();
|
||||
match server_result {
|
||||
Ok(result) => {
|
||||
result?;
|
||||
}
|
||||
Err(e) => {
|
||||
bail!("Jupyter kernel error: {:?}", e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub enum JupyterReplRequest {
|
||||
LspCompletions {
|
||||
line_text: String,
|
||||
position: usize,
|
||||
},
|
||||
JsGetProperties {
|
||||
object_id: String,
|
||||
},
|
||||
JsEvaluate {
|
||||
expr: String,
|
||||
},
|
||||
JsGlobalLexicalScopeNames,
|
||||
JsEvaluateLineWithObjectWrapping {
|
||||
line: String,
|
||||
},
|
||||
JsCallFunctionOnArgs {
|
||||
function_declaration: String,
|
||||
args: Vec<cdp::RemoteObject>,
|
||||
},
|
||||
JsCallFunctionOn {
|
||||
arg0: cdp::CallArgument,
|
||||
arg1: cdp::CallArgument,
|
||||
},
|
||||
}
|
||||
|
||||
pub enum JupyterReplResponse {
|
||||
LspCompletions(Vec<ReplCompletionItem>),
|
||||
JsGetProperties(Option<cdp::GetPropertiesResponse>),
|
||||
JsEvaluate(Option<cdp::EvaluateResponse>),
|
||||
JsGlobalLexicalScopeNames(cdp::GlobalLexicalScopeNamesResponse),
|
||||
JsEvaluateLineWithObjectWrapping(Result<repl::TsEvaluateResponse, AnyError>),
|
||||
JsCallFunctionOnArgs(Result<cdp::CallFunctionOnResponse, AnyError>),
|
||||
JsCallFunctionOn(Option<cdp::CallFunctionOnResponse>),
|
||||
}
|
||||
|
||||
pub struct JupyterReplProxy {
|
||||
tx: mpsc::UnboundedSender<JupyterReplRequest>,
|
||||
rx: mpsc::UnboundedReceiver<JupyterReplResponse>,
|
||||
}
|
||||
|
||||
impl JupyterReplProxy {
|
||||
pub async fn lsp_completions(
|
||||
&mut self,
|
||||
line_text: String,
|
||||
position: usize,
|
||||
) -> Vec<ReplCompletionItem> {
|
||||
let _ = self.tx.send(JupyterReplRequest::LspCompletions {
|
||||
line_text,
|
||||
position,
|
||||
});
|
||||
let Some(JupyterReplResponse::LspCompletions(resp)) = self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
pub async fn get_properties(
|
||||
&mut self,
|
||||
object_id: String,
|
||||
) -> Option<cdp::GetPropertiesResponse> {
|
||||
let _ = self
|
||||
.tx
|
||||
.send(JupyterReplRequest::JsGetProperties { object_id });
|
||||
let Some(JupyterReplResponse::JsGetProperties(resp)) = self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
pub async fn evaluate(
|
||||
&mut self,
|
||||
expr: String,
|
||||
) -> Option<cdp::EvaluateResponse> {
|
||||
let _ = self.tx.send(JupyterReplRequest::JsEvaluate { expr });
|
||||
let Some(JupyterReplResponse::JsEvaluate(resp)) = self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
pub async fn global_lexical_scope_names(
|
||||
&mut self,
|
||||
) -> cdp::GlobalLexicalScopeNamesResponse {
|
||||
let _ = self.tx.send(JupyterReplRequest::JsGlobalLexicalScopeNames);
|
||||
let Some(JupyterReplResponse::JsGlobalLexicalScopeNames(resp)) =
|
||||
self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
pub async fn evaluate_line_with_object_wrapping(
|
||||
&mut self,
|
||||
line: String,
|
||||
) -> Result<repl::TsEvaluateResponse, AnyError> {
|
||||
let _ = self
|
||||
.tx
|
||||
.send(JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line });
|
||||
let Some(JupyterReplResponse::JsEvaluateLineWithObjectWrapping(resp)) =
|
||||
self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
pub async fn call_function_on_args(
|
||||
&mut self,
|
||||
function_declaration: String,
|
||||
args: Vec<cdp::RemoteObject>,
|
||||
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
|
||||
let _ = self.tx.send(JupyterReplRequest::JsCallFunctionOnArgs {
|
||||
function_declaration,
|
||||
args,
|
||||
});
|
||||
let Some(JupyterReplResponse::JsCallFunctionOnArgs(resp)) =
|
||||
self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): rename to "broadcast_result"?
|
||||
pub async fn call_function_on(
|
||||
&mut self,
|
||||
arg0: cdp::CallArgument,
|
||||
arg1: cdp::CallArgument,
|
||||
) -> Option<cdp::CallFunctionOnResponse> {
|
||||
let _ = self
|
||||
.tx
|
||||
.send(JupyterReplRequest::JsCallFunctionOn { arg0, arg1 });
|
||||
let Some(JupyterReplResponse::JsCallFunctionOn(resp)) =
|
||||
self.rx.recv().await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
}
|
||||
}
|
||||
|
||||
pub struct JupyterReplSession {
|
||||
repl_session: repl::ReplSession,
|
||||
rx: mpsc::UnboundedReceiver<JupyterReplRequest>,
|
||||
tx: mpsc::UnboundedSender<JupyterReplResponse>,
|
||||
}
|
||||
|
||||
impl JupyterReplSession {
|
||||
pub async fn start(&mut self) {
|
||||
loop {
|
||||
let Some(msg) = self.rx.recv().await else {
|
||||
break;
|
||||
};
|
||||
let resp = match msg {
|
||||
JupyterReplRequest::LspCompletions {
|
||||
line_text,
|
||||
position,
|
||||
} => JupyterReplResponse::LspCompletions(
|
||||
self.lsp_completions(&line_text, position).await,
|
||||
),
|
||||
JupyterReplRequest::JsGetProperties { object_id } => {
|
||||
JupyterReplResponse::JsGetProperties(
|
||||
self.get_properties(object_id).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluate { expr } => {
|
||||
JupyterReplResponse::JsEvaluate(self.evaluate(expr).await)
|
||||
}
|
||||
JupyterReplRequest::JsGlobalLexicalScopeNames => {
|
||||
JupyterReplResponse::JsGlobalLexicalScopeNames(
|
||||
self.global_lexical_scope_names().await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line } => {
|
||||
JupyterReplResponse::JsEvaluateLineWithObjectWrapping(
|
||||
self.evaluate_line_with_object_wrapping(&line).await,
|
||||
)
|
||||
}
|
||||
JupyterReplRequest::JsCallFunctionOnArgs {
|
||||
function_declaration,
|
||||
args,
|
||||
} => JupyterReplResponse::JsCallFunctionOnArgs(
|
||||
self
|
||||
.call_function_on_args(function_declaration, &args)
|
||||
.await,
|
||||
),
|
||||
JupyterReplRequest::JsCallFunctionOn { arg0, arg1 } => {
|
||||
JupyterReplResponse::JsCallFunctionOn(
|
||||
self.call_function_on(arg0, arg1).await,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let Ok(()) = self.tx.send(resp) else {
|
||||
break;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn lsp_completions(
|
||||
&mut self,
|
||||
line_text: &str,
|
||||
position: usize,
|
||||
) -> Vec<ReplCompletionItem> {
|
||||
self
|
||||
.repl_session
|
||||
.language_server
|
||||
.completions(line_text, position)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_properties(
|
||||
&mut self,
|
||||
object_id: String,
|
||||
) -> Option<cdp::GetPropertiesResponse> {
|
||||
let get_properties_response = self
|
||||
.repl_session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.getProperties",
|
||||
Some(cdp::GetPropertiesArgs {
|
||||
object_id,
|
||||
own_properties: None,
|
||||
accessor_properties_only: None,
|
||||
generate_preview: None,
|
||||
non_indexed_properties_only: Some(true),
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
serde_json::from_value(get_properties_response).ok()
|
||||
}
|
||||
|
||||
pub async fn evaluate(
|
||||
&mut self,
|
||||
expr: String,
|
||||
) -> Option<cdp::EvaluateResponse> {
|
||||
let evaluate_response: serde_json::Value = self
|
||||
.repl_session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.evaluate",
|
||||
Some(cdp::EvaluateArgs {
|
||||
expression: expr,
|
||||
object_group: None,
|
||||
include_command_line_api: None,
|
||||
silent: None,
|
||||
context_id: Some(self.repl_session.context_id),
|
||||
return_by_value: None,
|
||||
generate_preview: None,
|
||||
user_gesture: None,
|
||||
await_promise: None,
|
||||
throw_on_side_effect: Some(true),
|
||||
timeout: Some(200),
|
||||
disable_breaks: None,
|
||||
repl_mode: None,
|
||||
allow_unsafe_eval_blocked_by_csp: None,
|
||||
unique_context_id: None,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
serde_json::from_value(evaluate_response).ok()
|
||||
}
|
||||
|
||||
pub async fn global_lexical_scope_names(
|
||||
&mut self,
|
||||
) -> cdp::GlobalLexicalScopeNamesResponse {
|
||||
let evaluate_response = self
|
||||
.repl_session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.globalLexicalScopeNames",
|
||||
Some(cdp::GlobalLexicalScopeNamesArgs {
|
||||
execution_context_id: Some(self.repl_session.context_id),
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
serde_json::from_value(evaluate_response).unwrap()
|
||||
}
|
||||
|
||||
pub async fn evaluate_line_with_object_wrapping(
|
||||
&mut self,
|
||||
line: &str,
|
||||
) -> Result<repl::TsEvaluateResponse, AnyError> {
|
||||
self
|
||||
.repl_session
|
||||
.evaluate_line_with_object_wrapping(line)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn call_function_on_args(
|
||||
&mut self,
|
||||
function_declaration: String,
|
||||
args: &[cdp::RemoteObject],
|
||||
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
|
||||
self
|
||||
.repl_session
|
||||
.call_function_on_args(function_declaration, args)
|
||||
.await
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): rename to "broadcast_result"?
|
||||
pub async fn call_function_on(
|
||||
&mut self,
|
||||
arg0: cdp::CallArgument,
|
||||
arg1: cdp::CallArgument,
|
||||
) -> Option<cdp::CallFunctionOnResponse> {
|
||||
let response = self.repl_session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.callFunctionOn",
|
||||
Some(json!({
|
||||
"functionDeclaration": r#"async function (execution_count, result) {
|
||||
await Deno[Deno.internal].jupyter.broadcastResult(execution_count, result);
|
||||
}"#,
|
||||
"arguments": [arg0, arg1],
|
||||
"executionContextId": self.repl_session.context_id,
|
||||
"awaitPromise": true,
|
||||
})),
|
||||
)
|
||||
.await.ok()?;
|
||||
serde_json::from_value(response).ok()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,92 +3,143 @@
|
|||
// This file is forked/ported from <https://github.com/evcxr/evcxr>
|
||||
// Copyright 2020 The Evcxr Authors. MIT license.
|
||||
|
||||
use std::cell::RefCell;
|
||||
// NOTE(bartlomieju): unfortunately it appears that clippy is broken
|
||||
// and can't allow a single line ignore for `await_holding_lock`.
|
||||
#![allow(clippy::await_holding_lock)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::cdp;
|
||||
use crate::tools::repl;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::CancelFuture;
|
||||
use deno_core::CancelHandle;
|
||||
use jupyter_runtime::ExecutionCount;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
use jupyter_runtime::messaging;
|
||||
use jupyter_runtime::AsChildOf;
|
||||
use jupyter_runtime::ConnectionInfo;
|
||||
use jupyter_runtime::JupyterMessage;
|
||||
use jupyter_runtime::JupyterMessageContent;
|
||||
use jupyter_runtime::KernelControlConnection;
|
||||
use jupyter_runtime::KernelHeartbeatConnection;
|
||||
use jupyter_runtime::KernelIoPubConnection;
|
||||
use jupyter_runtime::KernelShellConnection;
|
||||
use jupyter_runtime::ReplyError;
|
||||
use jupyter_runtime::ReplyStatus;
|
||||
use jupyter_runtime::StreamContent;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::JupyterReplProxy;
|
||||
|
||||
pub struct JupyterServer {
|
||||
execution_count: usize,
|
||||
last_execution_request: Rc<RefCell<Option<JupyterMessage>>>,
|
||||
// This is Arc<Mutex<>>, so we don't hold RefCell borrows across await
|
||||
// points.
|
||||
execution_count: ExecutionCount,
|
||||
last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
|
||||
iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
|
||||
repl_session: repl::ReplSession,
|
||||
repl_session_proxy: JupyterReplProxy,
|
||||
}
|
||||
|
||||
pub struct StdinConnectionProxy {
|
||||
pub tx: mpsc::UnboundedSender<JupyterMessage>,
|
||||
pub rx: mpsc::UnboundedReceiver<JupyterMessage>,
|
||||
}
|
||||
|
||||
pub struct StartupData {
|
||||
pub iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
|
||||
pub stdin_connection_proxy: Arc<Mutex<StdinConnectionProxy>>,
|
||||
pub last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
|
||||
}
|
||||
|
||||
impl JupyterServer {
|
||||
pub async fn start(
|
||||
connection_info: ConnectionInfo,
|
||||
mut stdio_rx: mpsc::UnboundedReceiver<StreamContent>,
|
||||
mut repl_session: repl::ReplSession,
|
||||
repl_session_proxy: JupyterReplProxy,
|
||||
setup_tx: oneshot::Sender<StartupData>,
|
||||
) -> Result<(), AnyError> {
|
||||
let session_id = Uuid::new_v4().to_string();
|
||||
|
||||
let mut heartbeat =
|
||||
connection_info.create_kernel_heartbeat_connection().await?;
|
||||
let shell_connection =
|
||||
connection_info.create_kernel_shell_connection().await?;
|
||||
let control_connection =
|
||||
connection_info.create_kernel_control_connection().await?;
|
||||
let _stdin_connection =
|
||||
connection_info.create_kernel_stdin_connection().await?;
|
||||
let iopub_connection =
|
||||
connection_info.create_kernel_iopub_connection().await?;
|
||||
let shell_connection = connection_info
|
||||
.create_kernel_shell_connection(&session_id)
|
||||
.await?;
|
||||
let control_connection = connection_info
|
||||
.create_kernel_control_connection(&session_id)
|
||||
.await?;
|
||||
let mut stdin_connection = connection_info
|
||||
.create_kernel_stdin_connection(&session_id)
|
||||
.await?;
|
||||
let iopub_connection = connection_info
|
||||
.create_kernel_iopub_connection(&session_id)
|
||||
.await?;
|
||||
|
||||
let iopub_connection = Arc::new(Mutex::new(iopub_connection));
|
||||
let last_execution_request = Rc::new(RefCell::new(None));
|
||||
let last_execution_request = Arc::new(Mutex::new(None));
|
||||
|
||||
// Store `iopub_connection` in the op state so it's accessible to the runtime API.
|
||||
{
|
||||
let op_state_rc = repl_session.worker.js_runtime.op_state();
|
||||
let mut op_state = op_state_rc.borrow_mut();
|
||||
op_state.put(iopub_connection.clone());
|
||||
op_state.put(last_execution_request.clone());
|
||||
}
|
||||
let (stdin_tx1, mut stdin_rx1) =
|
||||
mpsc::unbounded_channel::<JupyterMessage>();
|
||||
let (stdin_tx2, stdin_rx2) = mpsc::unbounded_channel::<JupyterMessage>();
|
||||
|
||||
let stdin_connection_proxy = Arc::new(Mutex::new(StdinConnectionProxy {
|
||||
tx: stdin_tx1,
|
||||
rx: stdin_rx2,
|
||||
}));
|
||||
|
||||
let Ok(()) = setup_tx.send(StartupData {
|
||||
iopub_connection: iopub_connection.clone(),
|
||||
last_execution_request: last_execution_request.clone(),
|
||||
stdin_connection_proxy,
|
||||
}) else {
|
||||
bail!("Failed to send startup data");
|
||||
};
|
||||
|
||||
let cancel_handle = CancelHandle::new_rc();
|
||||
|
||||
let mut server = Self {
|
||||
execution_count: 0,
|
||||
execution_count: ExecutionCount::new(0),
|
||||
iopub_connection: iopub_connection.clone(),
|
||||
last_execution_request: last_execution_request.clone(),
|
||||
repl_session,
|
||||
repl_session_proxy,
|
||||
};
|
||||
|
||||
let handle1 = deno_core::unsync::spawn(async move {
|
||||
if let Err(err) = Self::handle_heartbeat(&mut heartbeat).await {
|
||||
log::error!(
|
||||
"Heartbeat error: {}\nBacktrace:\n{}",
|
||||
err,
|
||||
err.backtrace()
|
||||
);
|
||||
let stdin_fut = deno_core::unsync::spawn(async move {
|
||||
loop {
|
||||
let Some(msg) = stdin_rx1.recv().await else {
|
||||
return;
|
||||
};
|
||||
let Ok(()) = stdin_connection.send(msg).await else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Ok(msg) = stdin_connection.read().await else {
|
||||
return;
|
||||
};
|
||||
let Ok(()) = stdin_tx2.send(msg) else {
|
||||
return;
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
let handle2 = deno_core::unsync::spawn({
|
||||
let hearbeat_fut = deno_core::unsync::spawn(async move {
|
||||
loop {
|
||||
if let Err(err) = heartbeat.single_heartbeat().await {
|
||||
log::error!(
|
||||
"Heartbeat error: {}\nBacktrace:\n{}",
|
||||
err,
|
||||
err.backtrace()
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let control_fut = deno_core::unsync::spawn({
|
||||
let cancel_handle = cancel_handle.clone();
|
||||
async move {
|
||||
if let Err(err) =
|
||||
|
@ -103,13 +154,13 @@ impl JupyterServer {
|
|||
}
|
||||
});
|
||||
|
||||
let handle3 = deno_core::unsync::spawn(async move {
|
||||
let shell_fut = deno_core::unsync::spawn(async move {
|
||||
if let Err(err) = server.handle_shell(shell_connection).await {
|
||||
log::error!("Shell error: {}\nBacktrace:\n{}", err, err.backtrace());
|
||||
}
|
||||
});
|
||||
|
||||
let handle4 = deno_core::unsync::spawn(async move {
|
||||
let stdio_fut = deno_core::unsync::spawn(async move {
|
||||
while let Some(stdio_msg) = stdio_rx.recv().await {
|
||||
Self::handle_stdio_msg(
|
||||
iopub_connection.clone(),
|
||||
|
@ -120,8 +171,16 @@ impl JupyterServer {
|
|||
}
|
||||
});
|
||||
|
||||
let join_fut =
|
||||
futures::future::try_join_all(vec![handle1, handle2, handle3, handle4]);
|
||||
let repl_session_fut = deno_core::unsync::spawn(async move {});
|
||||
|
||||
let join_fut = futures::future::try_join_all(vec![
|
||||
hearbeat_fut,
|
||||
control_fut,
|
||||
shell_fut,
|
||||
stdio_fut,
|
||||
repl_session_fut,
|
||||
stdin_fut,
|
||||
]);
|
||||
|
||||
if let Ok(result) = join_fut.or_cancel(cancel_handle).await {
|
||||
result?;
|
||||
|
@ -132,26 +191,21 @@ impl JupyterServer {
|
|||
|
||||
async fn handle_stdio_msg(
|
||||
iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
|
||||
last_execution_request: Rc<RefCell<Option<JupyterMessage>>>,
|
||||
last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
|
||||
stdio_msg: StreamContent,
|
||||
) {
|
||||
let maybe_exec_result = last_execution_request.borrow().clone();
|
||||
if let Some(exec_request) = maybe_exec_result {
|
||||
let result = (iopub_connection.lock().await)
|
||||
.send(stdio_msg.as_child_of(&exec_request))
|
||||
.await;
|
||||
let maybe_exec_result = last_execution_request.lock().clone();
|
||||
let Some(exec_request) = maybe_exec_result else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Err(err) = result {
|
||||
log::error!("Output error: {}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
let result = iopub_connection
|
||||
.lock()
|
||||
.send(stdio_msg.as_child_of(&exec_request))
|
||||
.await;
|
||||
|
||||
async fn handle_heartbeat(
|
||||
connection: &mut KernelHeartbeatConnection,
|
||||
) -> Result<(), AnyError> {
|
||||
loop {
|
||||
connection.single_heartbeat().await?;
|
||||
if let Err(err) = result {
|
||||
log::error!("Output error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,9 +276,8 @@ impl JupyterServer {
|
|||
let cursor_pos = req.cursor_pos;
|
||||
|
||||
let lsp_completions = self
|
||||
.repl_session
|
||||
.language_server
|
||||
.completions(&user_code, cursor_pos)
|
||||
.repl_session_proxy
|
||||
.lsp_completions(user_code.clone(), cursor_pos)
|
||||
.await;
|
||||
|
||||
if !lsp_completions.is_empty() {
|
||||
|
@ -263,27 +316,32 @@ impl JupyterServer {
|
|||
{
|
||||
let sub_expr = &expr[..index];
|
||||
let prop_name = &expr[index + 1..];
|
||||
let candidates =
|
||||
get_expression_property_names(&mut self.repl_session, sub_expr)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|n| {
|
||||
!n.starts_with("Symbol(")
|
||||
&& n.starts_with(prop_name)
|
||||
&& n != &*repl::REPL_INTERNALS_NAME
|
||||
})
|
||||
.collect();
|
||||
let candidates = get_expression_property_names(
|
||||
&mut self.repl_session_proxy,
|
||||
sub_expr,
|
||||
)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|n| {
|
||||
!n.starts_with("Symbol(")
|
||||
&& n.starts_with(prop_name)
|
||||
&& n != &*repl::REPL_INTERNALS_NAME
|
||||
})
|
||||
.collect();
|
||||
|
||||
(candidates, cursor_pos - prop_name.len())
|
||||
} else {
|
||||
// combine results of declarations and globalThis properties
|
||||
let mut candidates = get_expression_property_names(
|
||||
&mut self.repl_session,
|
||||
&mut self.repl_session_proxy,
|
||||
"globalThis",
|
||||
)
|
||||
.await
|
||||
.into_iter()
|
||||
.chain(get_global_lexical_scope_names(&mut self.repl_session).await)
|
||||
.chain(
|
||||
get_global_lexical_scope_names(&mut self.repl_session_proxy)
|
||||
.await,
|
||||
)
|
||||
.filter(|n| n.starts_with(expr) && n != &*repl::REPL_INTERNALS_NAME)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -417,9 +475,9 @@ impl JupyterServer {
|
|||
connection: &mut KernelShellConnection,
|
||||
) -> Result<(), AnyError> {
|
||||
if !execute_request.silent && execute_request.store_history {
|
||||
self.execution_count += 1;
|
||||
self.execution_count.increment();
|
||||
}
|
||||
*self.last_execution_request.borrow_mut() = Some(parent_message.clone());
|
||||
*self.last_execution_request.lock() = Some(parent_message.clone());
|
||||
|
||||
self
|
||||
.send_iopub(
|
||||
|
@ -432,8 +490,8 @@ impl JupyterServer {
|
|||
.await?;
|
||||
|
||||
let result = self
|
||||
.repl_session
|
||||
.evaluate_line_with_object_wrapping(&execute_request.code)
|
||||
.repl_session_proxy
|
||||
.evaluate_line_with_object_wrapping(execute_request.code)
|
||||
.await;
|
||||
|
||||
let evaluate_response = match result {
|
||||
|
@ -471,8 +529,12 @@ impl JupyterServer {
|
|||
} = evaluate_response.value;
|
||||
|
||||
if exception_details.is_none() {
|
||||
publish_result(&mut self.repl_session, &result, self.execution_count)
|
||||
.await?;
|
||||
publish_result(
|
||||
&mut self.repl_session_proxy,
|
||||
&result,
|
||||
self.execution_count,
|
||||
)
|
||||
.await?;
|
||||
|
||||
connection
|
||||
.send(
|
||||
|
@ -497,7 +559,7 @@ impl JupyterServer {
|
|||
exception_details.exception
|
||||
{
|
||||
let result = self
|
||||
.repl_session
|
||||
.repl_session_proxy
|
||||
.call_function_on_args(
|
||||
r#"
|
||||
function(object) {
|
||||
|
@ -513,7 +575,7 @@ impl JupyterServer {
|
|||
}
|
||||
"#
|
||||
.into(),
|
||||
&[exception],
|
||||
vec![exception],
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -579,11 +641,11 @@ impl JupyterServer {
|
|||
messaging::ExecuteReply {
|
||||
execution_count: self.execution_count,
|
||||
status: ReplyStatus::Error,
|
||||
error: Some(ReplyError {
|
||||
error: Some(Box::new(ReplyError {
|
||||
ename,
|
||||
evalue,
|
||||
traceback,
|
||||
}),
|
||||
})),
|
||||
user_expressions: None,
|
||||
payload: Default::default(),
|
||||
}
|
||||
|
@ -599,7 +661,7 @@ impl JupyterServer {
|
|||
&mut self,
|
||||
message: JupyterMessage,
|
||||
) -> Result<(), AnyError> {
|
||||
self.iopub_connection.lock().await.send(message).await
|
||||
self.iopub_connection.lock().send(message.clone()).await
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -629,33 +691,22 @@ fn kernel_info() -> messaging::KernelInfoReply {
|
|||
}
|
||||
|
||||
async fn publish_result(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
evaluate_result: &cdp::RemoteObject,
|
||||
execution_count: usize,
|
||||
execution_count: ExecutionCount,
|
||||
) -> Result<Option<HashMap<String, serde_json::Value>>, AnyError> {
|
||||
let arg0 = cdp::CallArgument {
|
||||
value: Some(serde_json::Value::Number(execution_count.into())),
|
||||
value: Some(execution_count.into()),
|
||||
unserializable_value: None,
|
||||
object_id: None,
|
||||
};
|
||||
|
||||
let arg1 = cdp::CallArgument::from(evaluate_result);
|
||||
|
||||
let response = session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.callFunctionOn",
|
||||
Some(json!({
|
||||
"functionDeclaration": r#"async function (execution_count, result) {
|
||||
await Deno[Deno.internal].jupyter.broadcastResult(execution_count, result);
|
||||
}"#,
|
||||
"arguments": [arg0, arg1],
|
||||
"executionContextId": session.context_id,
|
||||
"awaitPromise": true,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response: cdp::CallFunctionOnResponse = serde_json::from_value(response)?;
|
||||
let Some(response) = repl_session_proxy.call_function_on(arg0, arg1).await
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
if let Some(exception_details) = &response.exception_details {
|
||||
// If the object doesn't have a Jupyter.display method or it throws an
|
||||
|
@ -693,34 +744,25 @@ fn is_word_boundary(c: char) -> bool {
|
|||
|
||||
// TODO(bartlomieju): dedup with repl::editor
|
||||
async fn get_global_lexical_scope_names(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
) -> Vec<String> {
|
||||
let evaluate_response = session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.globalLexicalScopeNames",
|
||||
Some(cdp::GlobalLexicalScopeNamesArgs {
|
||||
execution_context_id: Some(session.context_id),
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let evaluate_response: cdp::GlobalLexicalScopeNamesResponse =
|
||||
serde_json::from_value(evaluate_response).unwrap();
|
||||
evaluate_response.names
|
||||
repl_session_proxy.global_lexical_scope_names().await.names
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): dedup with repl::editor
|
||||
async fn get_expression_property_names(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
expr: &str,
|
||||
) -> Vec<String> {
|
||||
// try to get the properties from the expression
|
||||
if let Some(properties) = get_object_expr_properties(session, expr).await {
|
||||
if let Some(properties) =
|
||||
get_object_expr_properties(repl_session_proxy, expr).await
|
||||
{
|
||||
return properties;
|
||||
}
|
||||
|
||||
// otherwise fall back to the prototype
|
||||
let expr_type = get_expression_type(session, expr).await;
|
||||
let expr_type = get_expression_type(repl_session_proxy, expr).await;
|
||||
let object_expr = match expr_type.as_deref() {
|
||||
// possibilities: https://chromedevtools.github.io/devtools-protocol/v8/Runtime/#type-RemoteObject
|
||||
Some("object") => "Object.prototype",
|
||||
|
@ -732,44 +774,32 @@ async fn get_expression_property_names(
|
|||
_ => return Vec::new(), // undefined, symbol, and unhandled
|
||||
};
|
||||
|
||||
get_object_expr_properties(session, object_expr)
|
||||
get_object_expr_properties(repl_session_proxy, object_expr)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): dedup with repl::editor
|
||||
async fn get_expression_type(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
expr: &str,
|
||||
) -> Option<String> {
|
||||
evaluate_expression(session, expr)
|
||||
evaluate_expression(repl_session_proxy, expr)
|
||||
.await
|
||||
.map(|res| res.result.kind)
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): dedup with repl::editor
|
||||
async fn get_object_expr_properties(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
object_expr: &str,
|
||||
) -> Option<Vec<String>> {
|
||||
let evaluate_result = evaluate_expression(session, object_expr).await?;
|
||||
let evaluate_result =
|
||||
evaluate_expression(repl_session_proxy, object_expr).await?;
|
||||
let object_id = evaluate_result.result.object_id?;
|
||||
|
||||
let get_properties_response = session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.getProperties",
|
||||
Some(cdp::GetPropertiesArgs {
|
||||
object_id,
|
||||
own_properties: None,
|
||||
accessor_properties_only: None,
|
||||
generate_preview: None,
|
||||
non_indexed_properties_only: Some(true),
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
let get_properties_response: cdp::GetPropertiesResponse =
|
||||
serde_json::from_value(get_properties_response).ok()?;
|
||||
let get_properties_response =
|
||||
repl_session_proxy.get_properties(object_id.clone()).await?;
|
||||
Some(
|
||||
get_properties_response
|
||||
.result
|
||||
|
@ -781,35 +811,10 @@ async fn get_object_expr_properties(
|
|||
|
||||
// TODO(bartlomieju): dedup with repl::editor
|
||||
async fn evaluate_expression(
|
||||
session: &mut repl::ReplSession,
|
||||
repl_session_proxy: &mut JupyterReplProxy,
|
||||
expr: &str,
|
||||
) -> Option<cdp::EvaluateResponse> {
|
||||
let evaluate_response = session
|
||||
.post_message_with_event_loop(
|
||||
"Runtime.evaluate",
|
||||
Some(cdp::EvaluateArgs {
|
||||
expression: expr.to_string(),
|
||||
object_group: None,
|
||||
include_command_line_api: None,
|
||||
silent: None,
|
||||
context_id: Some(session.context_id),
|
||||
return_by_value: None,
|
||||
generate_preview: None,
|
||||
user_gesture: None,
|
||||
await_promise: None,
|
||||
throw_on_side_effect: Some(true),
|
||||
timeout: Some(200),
|
||||
disable_breaks: None,
|
||||
repl_mode: None,
|
||||
allow_unsafe_eval_blocked_by_csp: None,
|
||||
unique_context_id: None,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
let evaluate_response: cdp::EvaluateResponse =
|
||||
serde_json::from_value(evaluate_response).ok()?;
|
||||
|
||||
let evaluate_response = repl_session_proxy.evaluate(expr.to_string()).await?;
|
||||
if evaluate_response.exception_details.is_some() {
|
||||
None
|
||||
} else {
|
||||
|
|
242
cli/tools/lint/linter.rs
Normal file
242
cli/tools/lint/linter.rs
Normal file
|
@ -0,0 +1,242 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||
use deno_lint::linter::LintFileOptions;
|
||||
use deno_lint::linter::Linter as DenoLintLinter;
|
||||
use deno_lint::linter::LinterOptions;
|
||||
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::specifier_from_file_path;
|
||||
|
||||
use super::rules::FileOrPackageLintRule;
|
||||
use super::rules::PackageLintRule;
|
||||
use super::ConfiguredRules;
|
||||
|
||||
pub struct CliLinterOptions {
|
||||
pub configured_rules: ConfiguredRules,
|
||||
pub fix: bool,
|
||||
pub deno_lint_config: DenoLintConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLinter {
|
||||
fix: bool,
|
||||
package_rules: Vec<Box<dyn PackageLintRule>>,
|
||||
linter: DenoLintLinter,
|
||||
deno_lint_config: DenoLintConfig,
|
||||
}
|
||||
|
||||
impl CliLinter {
|
||||
pub fn new(options: CliLinterOptions) -> Self {
|
||||
let rules = options.configured_rules.rules;
|
||||
let mut deno_lint_rules = Vec::with_capacity(rules.len());
|
||||
let mut package_rules = Vec::with_capacity(rules.len());
|
||||
for rule in rules {
|
||||
match rule.into_file_or_pkg_rule() {
|
||||
FileOrPackageLintRule::File(rule) => {
|
||||
deno_lint_rules.push(rule);
|
||||
}
|
||||
FileOrPackageLintRule::Package(rule) => {
|
||||
package_rules.push(rule);
|
||||
}
|
||||
}
|
||||
}
|
||||
Self {
|
||||
fix: options.fix,
|
||||
package_rules,
|
||||
linter: DenoLintLinter::new(LinterOptions {
|
||||
rules: deno_lint_rules,
|
||||
all_rule_codes: options.configured_rules.all_rule_codes,
|
||||
custom_ignore_file_directive: None,
|
||||
custom_ignore_diagnostic_directive: None,
|
||||
}),
|
||||
deno_lint_config: options.deno_lint_config,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_package_rules(&self) -> bool {
|
||||
!self.package_rules.is_empty()
|
||||
}
|
||||
|
||||
pub fn lint_package(
|
||||
&self,
|
||||
graph: &ModuleGraph,
|
||||
entrypoints: &[ModuleSpecifier],
|
||||
) -> Vec<LintDiagnostic> {
|
||||
let mut diagnostics = Vec::new();
|
||||
for rule in &self.package_rules {
|
||||
diagnostics.extend(rule.lint_package(graph, entrypoints));
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
||||
pub fn lint_with_ast(
|
||||
&self,
|
||||
parsed_source: &ParsedSource,
|
||||
) -> Vec<LintDiagnostic> {
|
||||
self
|
||||
.linter
|
||||
.lint_with_ast(parsed_source, self.deno_lint_config.clone())
|
||||
}
|
||||
|
||||
pub fn lint_file(
|
||||
&self,
|
||||
file_path: &Path,
|
||||
source_code: String,
|
||||
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
|
||||
let specifier = specifier_from_file_path(file_path)?;
|
||||
let media_type = MediaType::from_specifier(&specifier);
|
||||
|
||||
if self.fix {
|
||||
self.lint_file_and_fix(&specifier, media_type, source_code, file_path)
|
||||
} else {
|
||||
self
|
||||
.linter
|
||||
.lint_file(LintFileOptions {
|
||||
specifier,
|
||||
media_type,
|
||||
source_code,
|
||||
config: self.deno_lint_config.clone(),
|
||||
})
|
||||
.map_err(AnyError::from)
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_file_and_fix(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source_code: String,
|
||||
file_path: &Path,
|
||||
) -> Result<(ParsedSource, Vec<LintDiagnostic>), deno_core::anyhow::Error> {
|
||||
// initial lint
|
||||
let (source, diagnostics) = self.linter.lint_file(LintFileOptions {
|
||||
specifier: specifier.clone(),
|
||||
media_type,
|
||||
source_code,
|
||||
config: self.deno_lint_config.clone(),
|
||||
})?;
|
||||
|
||||
// Try applying fixes repeatedly until the file has none left or
|
||||
// a maximum number of iterations is reached. This is necessary
|
||||
// because lint fixes may overlap and so we can't always apply
|
||||
// them in one pass.
|
||||
let mut source = source;
|
||||
let mut diagnostics = diagnostics;
|
||||
let mut fix_iterations = 0;
|
||||
loop {
|
||||
let change = apply_lint_fixes_and_relint(
|
||||
specifier,
|
||||
media_type,
|
||||
&self.linter,
|
||||
self.deno_lint_config.clone(),
|
||||
source.text_info_lazy(),
|
||||
&diagnostics,
|
||||
)?;
|
||||
match change {
|
||||
Some(change) => {
|
||||
source = change.0;
|
||||
diagnostics = change.1;
|
||||
}
|
||||
None => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
fix_iterations += 1;
|
||||
if fix_iterations > 5 {
|
||||
log::warn!(
|
||||
concat!(
|
||||
"Reached maximum number of fix iterations for '{}'. There's ",
|
||||
"probably a bug in Deno. Please fix this file manually.",
|
||||
),
|
||||
specifier,
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if fix_iterations > 0 {
|
||||
// everything looks good and the file still parses, so write it out
|
||||
atomic_write_file_with_retries(
|
||||
file_path,
|
||||
source.text().as_ref(),
|
||||
crate::cache::CACHE_PERM,
|
||||
)
|
||||
.context("Failed writing fix to file.")?;
|
||||
}
|
||||
|
||||
Ok((source, diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_lint_fixes_and_relint(
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
linter: &DenoLintLinter,
|
||||
config: DenoLintConfig,
|
||||
text_info: &SourceTextInfo,
|
||||
diagnostics: &[LintDiagnostic],
|
||||
) -> Result<Option<(ParsedSource, Vec<LintDiagnostic>)>, AnyError> {
|
||||
let Some(new_text) = apply_lint_fixes(text_info, diagnostics) else {
|
||||
return Ok(None);
|
||||
};
|
||||
linter
|
||||
.lint_file(LintFileOptions {
|
||||
specifier: specifier.clone(),
|
||||
source_code: new_text,
|
||||
media_type,
|
||||
config,
|
||||
})
|
||||
.map(Some)
|
||||
.context(
|
||||
"An applied lint fix caused a syntax error. Please report this bug.",
|
||||
)
|
||||
}
|
||||
|
||||
fn apply_lint_fixes(
|
||||
text_info: &SourceTextInfo,
|
||||
diagnostics: &[LintDiagnostic],
|
||||
) -> Option<String> {
|
||||
if diagnostics.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let file_start = text_info.range().start;
|
||||
let mut quick_fixes = diagnostics
|
||||
.iter()
|
||||
// use the first quick fix
|
||||
.filter_map(|d| d.details.fixes.first())
|
||||
.flat_map(|fix| fix.changes.iter())
|
||||
.map(|change| deno_ast::TextChange {
|
||||
range: change.range.as_byte_range(file_start),
|
||||
new_text: change.new_text.to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if quick_fixes.is_empty() {
|
||||
return None;
|
||||
}
|
||||
// remove any overlapping text changes, we'll circle
|
||||
// back for another pass to fix the remaining
|
||||
quick_fixes.sort_by_key(|change| change.range.start);
|
||||
for i in (1..quick_fixes.len()).rev() {
|
||||
let cur = &quick_fixes[i];
|
||||
let previous = &quick_fixes[i - 1];
|
||||
let is_overlapping = cur.range.start < previous.range.end;
|
||||
if is_overlapping {
|
||||
quick_fixes.remove(i);
|
||||
}
|
||||
}
|
||||
let new_text =
|
||||
deno_ast::apply_text_changes(text_info.text_str(), quick_fixes);
|
||||
Some(new_text)
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,38 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::diagnostics::Diagnostic;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_graph::FastCheckDiagnostic;
|
||||
use deno_graph::ModuleGraph;
|
||||
|
||||
/// Collects diagnostics from the module graph for the
|
||||
/// given package's export URLs.
|
||||
pub fn collect_no_slow_type_diagnostics(
|
||||
package_export_urls: &[ModuleSpecifier],
|
||||
graph: &ModuleGraph,
|
||||
) -> Vec<FastCheckDiagnostic> {
|
||||
let mut js_exports = package_export_urls
|
||||
.iter()
|
||||
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
|
||||
// fast check puts the same diagnostics in each entrypoint for the
|
||||
// package (since it's all or nothing), so we only need to check
|
||||
// the first one JS entrypoint
|
||||
let Some(module) = js_exports.next() else {
|
||||
// could happen if all the exports are JSON
|
||||
return vec![];
|
||||
};
|
||||
|
||||
if let Some(diagnostics) = module.fast_check_diagnostics() {
|
||||
let mut diagnostics = diagnostics.clone();
|
||||
diagnostics.sort_by_cached_key(|d| {
|
||||
(
|
||||
d.specifier().clone(),
|
||||
d.range().map(|r| r.range),
|
||||
d.code().to_string(),
|
||||
)
|
||||
});
|
||||
diagnostics
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
252
cli/tools/lint/reporters.rs
Normal file
252
cli/tools/lint/reporters.rs
Normal file
|
@ -0,0 +1,252 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::diagnostics::Diagnostic;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_runtime::colors;
|
||||
use log::info;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::LintReporterKind;
|
||||
|
||||
use super::LintError;
|
||||
|
||||
pub fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
|
||||
match kind {
|
||||
LintReporterKind::Pretty => Box::new(PrettyLintReporter::new()),
|
||||
LintReporterKind::Json => Box::new(JsonLintReporter::new()),
|
||||
LintReporterKind::Compact => Box::new(CompactLintReporter::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LintReporter {
|
||||
fn visit_diagnostic(&mut self, d: &LintDiagnostic);
|
||||
fn visit_error(&mut self, file_path: &str, err: &AnyError);
|
||||
fn close(&mut self, check_count: usize);
|
||||
}
|
||||
|
||||
struct PrettyLintReporter {
|
||||
lint_count: u32,
|
||||
fixable_diagnostics: u32,
|
||||
}
|
||||
|
||||
impl PrettyLintReporter {
|
||||
fn new() -> PrettyLintReporter {
|
||||
PrettyLintReporter {
|
||||
lint_count: 0,
|
||||
fixable_diagnostics: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LintReporter for PrettyLintReporter {
|
||||
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
|
||||
self.lint_count += 1;
|
||||
if !d.details.fixes.is_empty() {
|
||||
self.fixable_diagnostics += 1;
|
||||
}
|
||||
|
||||
log::error!("{}\n", d.display());
|
||||
}
|
||||
|
||||
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
|
||||
log::error!("Error linting: {file_path}");
|
||||
log::error!(" {err}");
|
||||
}
|
||||
|
||||
fn close(&mut self, check_count: usize) {
|
||||
let fixable_suffix = if self.fixable_diagnostics > 0 {
|
||||
colors::gray(format!(" ({} fixable via --fix)", self.fixable_diagnostics))
|
||||
.to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
match self.lint_count {
|
||||
1 => info!("Found 1 problem{}", fixable_suffix),
|
||||
n if n > 1 => {
|
||||
info!("Found {} problems{}", self.lint_count, fixable_suffix)
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
match check_count {
|
||||
1 => info!("Checked 1 file"),
|
||||
n => info!("Checked {} files", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct CompactLintReporter {
|
||||
lint_count: u32,
|
||||
}
|
||||
|
||||
impl CompactLintReporter {
|
||||
fn new() -> CompactLintReporter {
|
||||
CompactLintReporter { lint_count: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl LintReporter for CompactLintReporter {
|
||||
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
|
||||
self.lint_count += 1;
|
||||
|
||||
match &d.range {
|
||||
Some(range) => {
|
||||
let text_info = &range.text_info;
|
||||
let range = &range.range;
|
||||
let line_and_column = text_info.line_and_column_display(range.start);
|
||||
log::error!(
|
||||
"{}: line {}, col {} - {} ({})",
|
||||
d.specifier,
|
||||
line_and_column.line_number,
|
||||
line_and_column.column_number,
|
||||
d.message(),
|
||||
d.code(),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
log::error!("{}: {} ({})", d.specifier, d.message(), d.code())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
|
||||
log::error!("Error linting: {file_path}");
|
||||
log::error!(" {err}");
|
||||
}
|
||||
|
||||
fn close(&mut self, check_count: usize) {
|
||||
match self.lint_count {
|
||||
1 => info!("Found 1 problem"),
|
||||
n if n > 1 => info!("Found {} problems", self.lint_count),
|
||||
_ => (),
|
||||
}
|
||||
|
||||
match check_count {
|
||||
1 => info!("Checked 1 file"),
|
||||
n => info!("Checked {} files", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WARNING: Ensure doesn't change because it's used in the JSON output
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct JsonDiagnosticLintPosition {
|
||||
/// The 1-indexed line number.
|
||||
pub line: usize,
|
||||
/// The 0-indexed column index.
|
||||
pub col: usize,
|
||||
pub byte_pos: usize,
|
||||
}
|
||||
|
||||
impl JsonDiagnosticLintPosition {
|
||||
pub fn new(byte_index: usize, loc: deno_ast::LineAndColumnIndex) -> Self {
|
||||
JsonDiagnosticLintPosition {
|
||||
line: loc.line_index + 1,
|
||||
col: loc.column_index,
|
||||
byte_pos: byte_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WARNING: Ensure doesn't change because it's used in the JSON output
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||
struct JsonLintDiagnosticRange {
|
||||
pub start: JsonDiagnosticLintPosition,
|
||||
pub end: JsonDiagnosticLintPosition,
|
||||
}
|
||||
|
||||
// WARNING: Ensure doesn't change because it's used in the JSON output
|
||||
#[derive(Clone, Serialize)]
|
||||
struct JsonLintDiagnostic {
|
||||
pub filename: String,
|
||||
pub range: Option<JsonLintDiagnosticRange>,
|
||||
pub message: String,
|
||||
pub code: String,
|
||||
pub hint: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonLintReporter {
|
||||
diagnostics: Vec<JsonLintDiagnostic>,
|
||||
errors: Vec<LintError>,
|
||||
}
|
||||
|
||||
impl JsonLintReporter {
|
||||
fn new() -> JsonLintReporter {
|
||||
JsonLintReporter {
|
||||
diagnostics: Vec::new(),
|
||||
errors: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LintReporter for JsonLintReporter {
|
||||
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
|
||||
self.diagnostics.push(JsonLintDiagnostic {
|
||||
filename: d.specifier.to_string(),
|
||||
range: d.range.as_ref().map(|range| {
|
||||
let text_info = &range.text_info;
|
||||
let range = range.range;
|
||||
JsonLintDiagnosticRange {
|
||||
start: JsonDiagnosticLintPosition::new(
|
||||
range.start.as_byte_index(text_info.range().start),
|
||||
text_info.line_and_column_index(range.start),
|
||||
),
|
||||
end: JsonDiagnosticLintPosition::new(
|
||||
range.end.as_byte_index(text_info.range().start),
|
||||
text_info.line_and_column_index(range.end),
|
||||
),
|
||||
}
|
||||
}),
|
||||
message: d.message().to_string(),
|
||||
code: d.code().to_string(),
|
||||
hint: d.hint().map(|h| h.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
|
||||
self.errors.push(LintError {
|
||||
file_path: file_path.to_string(),
|
||||
message: err.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
fn close(&mut self, _check_count: usize) {
|
||||
sort_diagnostics(&mut self.diagnostics);
|
||||
let json = serde_json::to_string_pretty(&self);
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{}", json.unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_diagnostics(diagnostics: &mut [JsonLintDiagnostic]) {
|
||||
// Sort so that we guarantee a deterministic output which is useful for tests
|
||||
diagnostics.sort_by(|a, b| {
|
||||
use std::cmp::Ordering;
|
||||
let file_order = a.filename.cmp(&b.filename);
|
||||
match file_order {
|
||||
Ordering::Equal => match &a.range {
|
||||
Some(a_range) => match &b.range {
|
||||
Some(b_range) => {
|
||||
let line_order = a_range.start.line.cmp(&b_range.start.line);
|
||||
match line_order {
|
||||
Ordering::Equal => a_range.start.col.cmp(&b_range.start.col),
|
||||
_ => line_order,
|
||||
}
|
||||
}
|
||||
None => Ordering::Less,
|
||||
},
|
||||
None => match &b.range {
|
||||
Some(_) => Ordering::Greater,
|
||||
None => Ordering::Equal,
|
||||
},
|
||||
},
|
||||
_ => file_order,
|
||||
}
|
||||
});
|
||||
}
|
296
cli/tools/lint/rules/mod.rs
Normal file
296
cli/tools/lint/rules/mod.rs
Normal file
|
@ -0,0 +1,296 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::deno_json::LintRulesConfig;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::rules::LintRule;
|
||||
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
|
||||
mod no_sloppy_imports;
|
||||
mod no_slow_types;
|
||||
|
||||
// used for publishing
|
||||
pub use no_slow_types::collect_no_slow_type_diagnostics;
|
||||
|
||||
pub trait PackageLintRule: std::fmt::Debug + Send + Sync {
|
||||
fn code(&self) -> &'static str;
|
||||
|
||||
fn tags(&self) -> &'static [&'static str] {
|
||||
&[]
|
||||
}
|
||||
|
||||
fn docs(&self) -> &'static str;
|
||||
|
||||
fn help_docs_url(&self) -> Cow<'static, str>;
|
||||
|
||||
fn lint_package(
|
||||
&self,
|
||||
graph: &ModuleGraph,
|
||||
entrypoints: &[ModuleSpecifier],
|
||||
) -> Vec<LintDiagnostic>;
|
||||
}
|
||||
|
||||
pub(super) trait ExtendedLintRule: LintRule {
|
||||
/// If the rule supports the incremental cache.
|
||||
fn supports_incremental_cache(&self) -> bool;
|
||||
|
||||
fn help_docs_url(&self) -> Cow<'static, str>;
|
||||
|
||||
fn into_base(self: Box<Self>) -> Box<dyn LintRule>;
|
||||
}
|
||||
|
||||
pub enum FileOrPackageLintRule {
|
||||
File(Box<dyn LintRule>),
|
||||
Package(Box<dyn PackageLintRule>),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CliLintRuleKind {
|
||||
DenoLint(Box<dyn LintRule>),
|
||||
Extended(Box<dyn ExtendedLintRule>),
|
||||
Package(Box<dyn PackageLintRule>),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliLintRule(CliLintRuleKind);
|
||||
|
||||
impl CliLintRule {
|
||||
pub fn code(&self) -> &'static str {
|
||||
use CliLintRuleKind::*;
|
||||
match &self.0 {
|
||||
DenoLint(rule) => rule.code(),
|
||||
Extended(rule) => rule.code(),
|
||||
Package(rule) => rule.code(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tags(&self) -> &'static [&'static str] {
|
||||
use CliLintRuleKind::*;
|
||||
match &self.0 {
|
||||
DenoLint(rule) => rule.tags(),
|
||||
Extended(rule) => rule.tags(),
|
||||
Package(rule) => rule.tags(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn docs(&self) -> &'static str {
|
||||
use CliLintRuleKind::*;
|
||||
match &self.0 {
|
||||
DenoLint(rule) => rule.docs(),
|
||||
Extended(rule) => rule.docs(),
|
||||
Package(rule) => rule.docs(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn help_docs_url(&self) -> Cow<'static, str> {
|
||||
use CliLintRuleKind::*;
|
||||
match &self.0 {
|
||||
DenoLint(rule) => {
|
||||
Cow::Owned(format!("https://lint.deno.land/rules/{}", rule.code()))
|
||||
}
|
||||
Extended(rule) => rule.help_docs_url(),
|
||||
Package(rule) => rule.help_docs_url(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn supports_incremental_cache(&self) -> bool {
|
||||
use CliLintRuleKind::*;
|
||||
match &self.0 {
|
||||
DenoLint(_) => true,
|
||||
Extended(rule) => rule.supports_incremental_cache(),
|
||||
// graph rules don't go through the incremental cache, so allow it
|
||||
Package(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_file_or_pkg_rule(self) -> FileOrPackageLintRule {
|
||||
use CliLintRuleKind::*;
|
||||
match self.0 {
|
||||
DenoLint(rule) => FileOrPackageLintRule::File(rule),
|
||||
Extended(rule) => FileOrPackageLintRule::File(rule.into_base()),
|
||||
Package(rule) => FileOrPackageLintRule::Package(rule),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ConfiguredRules {
|
||||
pub all_rule_codes: HashSet<&'static str>,
|
||||
pub rules: Vec<CliLintRule>,
|
||||
}
|
||||
|
||||
impl ConfiguredRules {
|
||||
pub fn incremental_cache_state(&self) -> Option<impl std::hash::Hash> {
|
||||
if self.rules.iter().any(|r| !r.supports_incremental_cache()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// use a hash of the rule names in order to bust the cache
|
||||
let mut codes = self.rules.iter().map(|r| r.code()).collect::<Vec<_>>();
|
||||
// ensure this is stable by sorting it
|
||||
codes.sort_unstable();
|
||||
Some(codes)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LintRuleProvider {
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
impl LintRuleProvider {
|
||||
pub fn new(
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
sloppy_imports_resolver,
|
||||
workspace_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_lint_rules_err_empty(
|
||||
&self,
|
||||
rules: LintRulesConfig,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
) -> Result<ConfiguredRules, AnyError> {
|
||||
let lint_rules = self.resolve_lint_rules(rules, maybe_config_file);
|
||||
if lint_rules.rules.is_empty() {
|
||||
bail!("No rules have been configured")
|
||||
}
|
||||
Ok(lint_rules)
|
||||
}
|
||||
|
||||
pub fn resolve_lint_rules(
|
||||
&self,
|
||||
rules: LintRulesConfig,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
) -> ConfiguredRules {
|
||||
let deno_lint_rules = deno_lint::rules::get_all_rules();
|
||||
let cli_lint_rules = vec![CliLintRule(CliLintRuleKind::Extended(
|
||||
Box::new(no_sloppy_imports::NoSloppyImportsRule::new(
|
||||
self.sloppy_imports_resolver.clone(),
|
||||
self.workspace_resolver.clone(),
|
||||
)),
|
||||
))];
|
||||
let cli_graph_rules = vec![CliLintRule(CliLintRuleKind::Package(
|
||||
Box::new(no_slow_types::NoSlowTypesRule),
|
||||
))];
|
||||
let mut all_rule_names = HashSet::with_capacity(
|
||||
deno_lint_rules.len() + cli_lint_rules.len() + cli_graph_rules.len(),
|
||||
);
|
||||
let all_rules = deno_lint_rules
|
||||
.into_iter()
|
||||
.map(|rule| CliLintRule(CliLintRuleKind::DenoLint(rule)))
|
||||
.chain(cli_lint_rules)
|
||||
.chain(cli_graph_rules)
|
||||
.inspect(|rule| {
|
||||
all_rule_names.insert(rule.code());
|
||||
});
|
||||
let rules = filtered_rules(
|
||||
all_rules,
|
||||
rules
|
||||
.tags
|
||||
.or_else(|| Some(get_default_tags(maybe_config_file))),
|
||||
rules.exclude,
|
||||
rules.include,
|
||||
);
|
||||
ConfiguredRules {
|
||||
rules,
|
||||
all_rule_codes: all_rule_names,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_default_tags(maybe_config_file: Option<&ConfigFile>) -> Vec<String> {
|
||||
let mut tags = Vec::with_capacity(2);
|
||||
tags.push("recommended".to_string());
|
||||
if maybe_config_file.map(|c| c.is_package()).unwrap_or(false) {
|
||||
tags.push("jsr".to_string());
|
||||
}
|
||||
tags
|
||||
}
|
||||
|
||||
fn filtered_rules(
|
||||
all_rules: impl Iterator<Item = CliLintRule>,
|
||||
maybe_tags: Option<Vec<String>>,
|
||||
maybe_exclude: Option<Vec<String>>,
|
||||
maybe_include: Option<Vec<String>>,
|
||||
) -> Vec<CliLintRule> {
|
||||
let tags_set =
|
||||
maybe_tags.map(|tags| tags.into_iter().collect::<HashSet<_>>());
|
||||
|
||||
let mut rules = all_rules
|
||||
.filter(|rule| {
|
||||
let mut passes = if let Some(tags_set) = &tags_set {
|
||||
rule
|
||||
.tags()
|
||||
.iter()
|
||||
.any(|t| tags_set.contains(&t.to_string()))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
if let Some(includes) = &maybe_include {
|
||||
if includes.contains(&rule.code().to_owned()) {
|
||||
passes |= true;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(excludes) = &maybe_exclude {
|
||||
if excludes.contains(&rule.code().to_owned()) {
|
||||
passes &= false;
|
||||
}
|
||||
}
|
||||
|
||||
passes
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
rules.sort_by_key(|r| r.code());
|
||||
|
||||
rules
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::args::LintRulesConfig;
|
||||
|
||||
#[test]
|
||||
fn recommended_rules_when_no_tags_in_config() {
|
||||
let rules_config = LintRulesConfig {
|
||||
exclude: Some(vec!["no-debugger".to_string()]),
|
||||
include: None,
|
||||
tags: None,
|
||||
};
|
||||
let rules_provider = LintRuleProvider::new(None, None);
|
||||
let rules = rules_provider.resolve_lint_rules(rules_config, None);
|
||||
let mut rule_names = rules
|
||||
.rules
|
||||
.into_iter()
|
||||
.map(|r| r.code().to_string())
|
||||
.collect::<Vec<_>>();
|
||||
rule_names.sort();
|
||||
let mut recommended_rule_names = rules_provider
|
||||
.resolve_lint_rules(Default::default(), None)
|
||||
.rules
|
||||
.into_iter()
|
||||
.filter(|r| r.tags().iter().any(|t| *t == "recommended"))
|
||||
.map(|r| r.code().to_string())
|
||||
.filter(|n| n != "no-debugger")
|
||||
.collect::<Vec<_>>();
|
||||
recommended_rule_names.sort();
|
||||
assert_eq!(rule_names, recommended_rule_names);
|
||||
}
|
||||
}
|
20
cli/tools/lint/rules/no_sloppy_imports.md
Normal file
20
cli/tools/lint/rules/no_sloppy_imports.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
Enforces specifying explicit references to paths in module specifiers.
|
||||
|
||||
Non-explicit specifiers are ambiguous and require probing for the correct file
|
||||
path on every run, which has a performance overhead.
|
||||
|
||||
Note: This lint rule is only active when using `--unstable-sloppy-imports`.
|
||||
|
||||
### Invalid:
|
||||
|
||||
```typescript
|
||||
import { add } from "./math/add";
|
||||
import { ConsoleLogger } from "./loggers";
|
||||
```
|
||||
|
||||
### Valid:
|
||||
|
||||
```typescript
|
||||
import { add } from "./math/add.ts";
|
||||
import { ConsoleLogger } from "./loggers/index.ts";
|
||||
```
|
214
cli/tools/lint/rules/no_sloppy_imports.rs
Normal file
214
cli/tools/lint/rules/no_sloppy_imports.rs
Normal file
|
@ -0,0 +1,214 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::SourceRange;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::Range;
|
||||
use deno_lint::diagnostic::LintDiagnosticDetails;
|
||||
use deno_lint::diagnostic::LintDiagnosticRange;
|
||||
use deno_lint::diagnostic::LintFix;
|
||||
use deno_lint::diagnostic::LintFixChange;
|
||||
use deno_lint::rules::LintRule;
|
||||
use text_lines::LineAndColumnIndex;
|
||||
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::resolver::SloppyImportsResolution;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
|
||||
use super::ExtendedLintRule;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSloppyImportsRule {
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
// None for making printing out the lint rules easy
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
impl NoSloppyImportsRule {
|
||||
pub fn new(
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
) -> Self {
|
||||
NoSloppyImportsRule {
|
||||
sloppy_imports_resolver,
|
||||
workspace_resolver,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const CODE: &str = "no-sloppy-imports";
|
||||
const DOCS_URL: &str = "https://docs.deno.com/runtime/manual/tools/unstable_flags/#--unstable-sloppy-imports";
|
||||
|
||||
impl ExtendedLintRule for NoSloppyImportsRule {
|
||||
fn supports_incremental_cache(&self) -> bool {
|
||||
// only allow the incremental cache when we don't
|
||||
// do sloppy import resolution because sloppy import
|
||||
// resolution requires knowing about the surrounding files
|
||||
// in addition to the current one
|
||||
self.sloppy_imports_resolver.is_none() || self.workspace_resolver.is_none()
|
||||
}
|
||||
|
||||
fn help_docs_url(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed(DOCS_URL)
|
||||
}
|
||||
|
||||
fn into_base(self: Box<Self>) -> Box<dyn LintRule> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LintRule for NoSloppyImportsRule {
|
||||
fn lint_program_with_ast_view<'view>(
|
||||
&self,
|
||||
context: &mut deno_lint::context::Context<'view>,
|
||||
_program: deno_lint::Program<'view>,
|
||||
) {
|
||||
let Some(workspace_resolver) = &self.workspace_resolver else {
|
||||
return;
|
||||
};
|
||||
let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver else {
|
||||
return;
|
||||
};
|
||||
if context.specifier().scheme() != "file" {
|
||||
return;
|
||||
}
|
||||
|
||||
let resolver = SloppyImportCaptureResolver {
|
||||
workspace_resolver,
|
||||
sloppy_imports_resolver,
|
||||
captures: Default::default(),
|
||||
};
|
||||
|
||||
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
|
||||
graph_kind: deno_graph::GraphKind::All,
|
||||
specifier: context.specifier().clone(),
|
||||
maybe_headers: None,
|
||||
parsed_source: context.parsed_source(),
|
||||
// ignore resolving dynamic imports like import(`./dir/${something}`)
|
||||
file_system: &deno_graph::source::NullFileSystem,
|
||||
jsr_url_provider: &CliJsrUrlProvider,
|
||||
maybe_resolver: Some(&resolver),
|
||||
// don't bother resolving npm specifiers
|
||||
maybe_npm_resolver: None,
|
||||
});
|
||||
|
||||
for (range, sloppy_import) in resolver.captures.borrow_mut().drain() {
|
||||
let start_range =
|
||||
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
||||
line_index: range.start.line,
|
||||
column_index: range.start.character,
|
||||
});
|
||||
let end_range =
|
||||
context.text_info().loc_to_source_pos(LineAndColumnIndex {
|
||||
line_index: range.end.line,
|
||||
column_index: range.end.character,
|
||||
});
|
||||
let source_range = SourceRange::new(start_range, end_range);
|
||||
context.add_diagnostic_details(
|
||||
Some(LintDiagnosticRange {
|
||||
range: source_range,
|
||||
description: None,
|
||||
text_info: context.text_info().clone(),
|
||||
}),
|
||||
LintDiagnosticDetails {
|
||||
message: "Sloppy imports are not allowed.".to_string(),
|
||||
code: CODE.to_string(),
|
||||
custom_docs_url: Some(DOCS_URL.to_string()),
|
||||
fixes: context
|
||||
.specifier()
|
||||
.make_relative(sloppy_import.as_specifier())
|
||||
.map(|relative| {
|
||||
vec![LintFix {
|
||||
description: Cow::Owned(sloppy_import.as_quick_fix_message()),
|
||||
changes: vec![LintFixChange {
|
||||
new_text: Cow::Owned({
|
||||
let relative = if relative.starts_with("../") {
|
||||
relative
|
||||
} else {
|
||||
format!("./{}", relative)
|
||||
};
|
||||
let current_text =
|
||||
context.text_info().range_text(&source_range);
|
||||
if current_text.starts_with('"') {
|
||||
format!("\"{}\"", relative)
|
||||
} else if current_text.starts_with('\'') {
|
||||
format!("'{}'", relative)
|
||||
} else {
|
||||
relative
|
||||
}
|
||||
}),
|
||||
range: source_range,
|
||||
}],
|
||||
}]
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
hint: None,
|
||||
info: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn code(&self) -> &'static str {
|
||||
CODE
|
||||
}
|
||||
|
||||
fn docs(&self) -> &'static str {
|
||||
include_str!("no_sloppy_imports.md")
|
||||
}
|
||||
|
||||
fn tags(&self) -> &'static [&'static str] {
|
||||
&["recommended"]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SloppyImportCaptureResolver<'a> {
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
sloppy_imports_resolver: &'a SloppyImportsResolver,
|
||||
captures: RefCell<HashMap<Range, SloppyImportsResolution>>,
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier_text: &str,
|
||||
referrer_range: &Range,
|
||||
mode: ResolutionMode,
|
||||
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
|
||||
let resolution = self
|
||||
.workspace_resolver
|
||||
.resolve(specifier_text, &referrer_range.specifier)
|
||||
.map_err(|err| ResolveError::Other(err.into()))?;
|
||||
|
||||
match resolution {
|
||||
deno_config::workspace::MappedResolution::Normal(specifier)
|
||||
| deno_config::workspace::MappedResolution::ImportMap(specifier) => {
|
||||
match self.sloppy_imports_resolver.resolve(&specifier, mode) {
|
||||
Some(res) => {
|
||||
self
|
||||
.captures
|
||||
.borrow_mut()
|
||||
.entry(referrer_range.clone())
|
||||
.or_insert_with(|| res.clone());
|
||||
Ok(res.into_specifier())
|
||||
}
|
||||
None => Ok(specifier),
|
||||
}
|
||||
}
|
||||
deno_config::workspace::MappedResolution::WorkspaceNpmPackage {
|
||||
..
|
||||
}
|
||||
| deno_config::workspace::MappedResolution::PackageJson { .. } => {
|
||||
Err(ResolveError::Other(anyhow!("")))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
3
cli/tools/lint/rules/no_slow_types.md
Normal file
3
cli/tools/lint/rules/no_slow_types.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
Enforces using types that are explicit or can be simply inferred.
|
||||
|
||||
Read more: https://jsr.io/docs/about-slow-types
|
98
cli/tools/lint/rules/no_slow_types.rs
Normal file
98
cli/tools/lint/rules/no_slow_types.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use deno_ast::diagnostics::Diagnostic;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_graph::FastCheckDiagnostic;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::diagnostic::LintDiagnosticDetails;
|
||||
use deno_lint::diagnostic::LintDiagnosticRange;
|
||||
|
||||
use super::PackageLintRule;
|
||||
|
||||
const CODE: &str = "no-slow-types";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSlowTypesRule;
|
||||
|
||||
impl PackageLintRule for NoSlowTypesRule {
|
||||
fn code(&self) -> &'static str {
|
||||
CODE
|
||||
}
|
||||
|
||||
fn tags(&self) -> &'static [&'static str] {
|
||||
&["jsr"]
|
||||
}
|
||||
|
||||
fn docs(&self) -> &'static str {
|
||||
include_str!("no_slow_types.md")
|
||||
}
|
||||
|
||||
fn help_docs_url(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed("https://jsr.io/docs/about-slow-types")
|
||||
}
|
||||
|
||||
fn lint_package(
|
||||
&self,
|
||||
graph: &ModuleGraph,
|
||||
entrypoints: &[ModuleSpecifier],
|
||||
) -> Vec<LintDiagnostic> {
|
||||
collect_no_slow_type_diagnostics(graph, entrypoints)
|
||||
.into_iter()
|
||||
.map(|d| LintDiagnostic {
|
||||
specifier: d.specifier().clone(),
|
||||
range: d.range().map(|range| LintDiagnosticRange {
|
||||
text_info: range.text_info.clone(),
|
||||
range: range.range,
|
||||
description: d.range_description().map(|r| r.to_string()),
|
||||
}),
|
||||
details: LintDiagnosticDetails {
|
||||
message: d.message().to_string(),
|
||||
code: CODE.to_string(),
|
||||
hint: d.hint().map(|h| h.to_string()),
|
||||
info: d
|
||||
.info()
|
||||
.iter()
|
||||
.map(|info| Cow::Owned(info.to_string()))
|
||||
.collect(),
|
||||
fixes: vec![],
|
||||
custom_docs_url: d.docs_url().map(|u| u.into_owned()),
|
||||
},
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects diagnostics from the module graph for the
|
||||
/// given package's export URLs.
|
||||
pub fn collect_no_slow_type_diagnostics(
|
||||
graph: &ModuleGraph,
|
||||
package_export_urls: &[ModuleSpecifier],
|
||||
) -> Vec<FastCheckDiagnostic> {
|
||||
let mut js_exports = package_export_urls
|
||||
.iter()
|
||||
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
|
||||
// fast check puts the same diagnostics in each entrypoint for the
|
||||
// package (since it's all or nothing), so we only need to check
|
||||
// the first one JS entrypoint
|
||||
let Some(module) = js_exports.next() else {
|
||||
// could happen if all the exports are JSON
|
||||
return vec![];
|
||||
};
|
||||
|
||||
if let Some(diagnostics) = module.fast_check_diagnostics() {
|
||||
let mut diagnostics = diagnostics.clone();
|
||||
diagnostics.sort_by_cached_key(|d| {
|
||||
(
|
||||
d.specifier().clone(),
|
||||
d.range().map(|r| r.range),
|
||||
d.code().to_string(),
|
||||
)
|
||||
});
|
||||
diagnostics
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
|
@ -1,8 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::http_util;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fetch;
|
||||
use lsp_types::Url;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
|
@ -82,7 +83,7 @@ impl std::fmt::Debug for ApiError {
|
|||
impl std::error::Error for ApiError {}
|
||||
|
||||
pub async fn parse_response<T: DeserializeOwned>(
|
||||
response: reqwest::Response,
|
||||
response: http::Response<deno_fetch::ResBody>,
|
||||
) -> Result<T, ApiError> {
|
||||
let status = response.status();
|
||||
let x_deno_ray = response
|
||||
|
@ -90,7 +91,7 @@ pub async fn parse_response<T: DeserializeOwned>(
|
|||
.get("x-deno-ray")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(|s| s.to_string());
|
||||
let text = response.text().await.unwrap();
|
||||
let text = http_util::body_to_string(response).await.unwrap();
|
||||
|
||||
if !status.is_success() {
|
||||
match serde_json::from_str::<ApiError>(&text) {
|
||||
|
@ -122,9 +123,9 @@ pub async fn get_scope(
|
|||
client: &HttpClient,
|
||||
registry_api_url: &Url,
|
||||
scope: &str,
|
||||
) -> Result<reqwest::Response, AnyError> {
|
||||
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
|
||||
let scope_url = format!("{}scopes/{}", registry_api_url, scope);
|
||||
let response = client.get(&scope_url).send().await?;
|
||||
let response = client.get(scope_url.parse()?)?.send().await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
|
@ -141,9 +142,9 @@ pub async fn get_package(
|
|||
registry_api_url: &Url,
|
||||
scope: &str,
|
||||
package: &str,
|
||||
) -> Result<reqwest::Response, AnyError> {
|
||||
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
|
||||
let package_url = get_package_api_url(registry_api_url, scope, package);
|
||||
let response = client.get(&package_url).send().await?;
|
||||
let response = client.get(package_url.parse()?)?.send().await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
|
|||
use deno_ast::diagnostics::DiagnosticSourcePos;
|
||||
use deno_ast::diagnostics::DiagnosticSourceRange;
|
||||
use deno_ast::swc::common::util::take::Take;
|
||||
use deno_ast::ParseDiagnostic;
|
||||
use deno_ast::SourcePos;
|
||||
use deno_ast::SourceRange;
|
||||
use deno_ast::SourceRanged;
|
||||
|
@ -117,6 +118,11 @@ pub enum PublishDiagnostic {
|
|||
text_info: SourceTextInfo,
|
||||
range: SourceRange,
|
||||
},
|
||||
SyntaxError(ParseDiagnostic),
|
||||
MissingLicense {
|
||||
/// This only exists because diagnostics require a location.
|
||||
expected_path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
impl PublishDiagnostic {
|
||||
|
@ -165,6 +171,9 @@ impl Diagnostic for PublishDiagnostic {
|
|||
ExcludedModule { .. } => DiagnosticLevel::Error,
|
||||
MissingConstraint { .. } => DiagnosticLevel::Error,
|
||||
BannedTripleSlashDirectives { .. } => DiagnosticLevel::Error,
|
||||
SyntaxError { .. } => DiagnosticLevel::Error,
|
||||
// todo(#24676): make this an error in Deno 1.46
|
||||
MissingLicense { .. } => DiagnosticLevel::Warning,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,6 +192,8 @@ impl Diagnostic for PublishDiagnostic {
|
|||
BannedTripleSlashDirectives { .. } => {
|
||||
Cow::Borrowed("banned-triple-slash-directives")
|
||||
}
|
||||
SyntaxError { .. } => Cow::Borrowed("syntax-error"),
|
||||
MissingLicense { .. } => Cow::Borrowed("missing-license"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,6 +214,8 @@ impl Diagnostic for PublishDiagnostic {
|
|||
ExcludedModule { .. } => Cow::Borrowed("module in package's module graph was excluded from publishing"),
|
||||
MissingConstraint { specifier, .. } => Cow::Owned(format!("specifier '{}' is missing a version constraint", specifier)),
|
||||
BannedTripleSlashDirectives { .. } => Cow::Borrowed("triple slash directives that modify globals are not allowed"),
|
||||
SyntaxError(diagnostic) => diagnostic.message(),
|
||||
MissingLicense { .. } => Cow::Borrowed("missing license file"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,6 +282,10 @@ impl Diagnostic for PublishDiagnostic {
|
|||
source_pos: DiagnosticSourcePos::SourcePos(range.start),
|
||||
text_info: Cow::Borrowed(text_info),
|
||||
},
|
||||
SyntaxError(diagnostic) => diagnostic.location(),
|
||||
MissingLicense { expected_path } => DiagnosticLocation::Path {
|
||||
path: expected_path.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -348,6 +365,8 @@ impl Diagnostic for PublishDiagnostic {
|
|||
description: Some("the triple slash directive".into()),
|
||||
}],
|
||||
}),
|
||||
SyntaxError(diagnostic) => diagnostic.snippet(),
|
||||
MissingLicense { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -380,6 +399,10 @@ impl Diagnostic for PublishDiagnostic {
|
|||
BannedTripleSlashDirectives { .. } => Some(
|
||||
Cow::Borrowed("remove the triple slash directive"),
|
||||
),
|
||||
SyntaxError(diagnostic) => diagnostic.hint(),
|
||||
MissingLicense { .. } => Some(
|
||||
Cow::Borrowed("add a LICENSE file to the package and ensure it is not ignored from being published"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -407,7 +430,17 @@ impl Diagnostic for PublishDiagnostic {
|
|||
None => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
SyntaxError(diagnostic) => diagnostic.snippet_fixed(),
|
||||
FastCheck(_)
|
||||
| SpecifierUnfurl(_)
|
||||
| InvalidPath { .. }
|
||||
| DuplicatePath { .. }
|
||||
| UnsupportedFileType { .. }
|
||||
| UnsupportedJsxTsx { .. }
|
||||
| ExcludedModule { .. }
|
||||
| MissingConstraint { .. }
|
||||
| BannedTripleSlashDirectives { .. }
|
||||
| MissingLicense { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -456,6 +489,8 @@ impl Diagnostic for PublishDiagnostic {
|
|||
Cow::Borrowed("instead instruct the user of your package to specify these directives"),
|
||||
Cow::Borrowed("or set their 'lib' compiler option appropriately"),
|
||||
]),
|
||||
SyntaxError(diagnostic) => diagnostic.info(),
|
||||
MissingLicense { .. } => Cow::Borrowed(&[]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -488,6 +523,10 @@ impl Diagnostic for PublishDiagnostic {
|
|||
BannedTripleSlashDirectives { .. } => Some(Cow::Borrowed(
|
||||
"https://jsr.io/go/banned-triple-slash-directives",
|
||||
)),
|
||||
SyntaxError(diagnostic) => diagnostic.docs_url(),
|
||||
MissingLicense { .. } => {
|
||||
Some(Cow::Borrowed("https://jsr.io/go/missing-license"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,6 +147,13 @@ impl GraphDiagnosticsCollector {
|
|||
let parsed_source = self
|
||||
.parsed_source_cache
|
||||
.get_parsed_source_from_js_module(module)?;
|
||||
|
||||
// surface syntax errors
|
||||
for diagnostic in parsed_source.diagnostics() {
|
||||
diagnostics_collector
|
||||
.push(PublishDiagnostic::SyntaxError(diagnostic.clone()));
|
||||
}
|
||||
|
||||
check_for_banned_triple_slash_directives(
|
||||
&parsed_source,
|
||||
diagnostics_collector,
|
||||
|
|
|
@ -4,6 +4,7 @@ use std::collections::HashMap;
|
|||
use std::collections::HashSet;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
@ -11,9 +12,9 @@ use std::sync::Arc;
|
|||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::ConfigFile;
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -24,10 +25,8 @@ use deno_core::futures::StreamExt;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_terminal::colors;
|
||||
use import_map::ImportMap;
|
||||
use http_body_util::BodyExt;
|
||||
use lsp_types::Url;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
@ -44,10 +43,9 @@ use crate::cache::ParsedSourceCache;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::ModuleGraphCreator;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::tools::check::CheckOptions;
|
||||
use crate::tools::lint::no_slow_types;
|
||||
use crate::tools::lint::collect_no_slow_type_diagnostics;
|
||||
use crate::tools::registry::diagnostics::PublishDiagnostic;
|
||||
use crate::tools::registry::diagnostics::PublishDiagnosticsCollector;
|
||||
use crate::util::display::human_size;
|
||||
|
@ -76,35 +74,48 @@ use self::paths::CollectedPublishPath;
|
|||
use self::tar::PublishableTarball;
|
||||
|
||||
pub async fn publish(
|
||||
flags: Flags,
|
||||
flags: Arc<Flags>,
|
||||
publish_flags: PublishFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_factory = CliFactory::from_flags(flags)?;
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
|
||||
let auth_method =
|
||||
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
|
||||
|
||||
let import_map = cli_factory
|
||||
.maybe_import_map()
|
||||
.await?
|
||||
.clone()
|
||||
.unwrap_or_else(|| {
|
||||
Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
|
||||
});
|
||||
|
||||
let directory_path = cli_factory.cli_options().initial_cwd();
|
||||
|
||||
let mapped_resolver = Arc::new(MappedSpecifierResolver::new(
|
||||
Some(import_map),
|
||||
cli_factory.package_json_deps_provider().clone(),
|
||||
let cli_options = cli_factory.cli_options()?;
|
||||
let directory_path = cli_options.initial_cwd();
|
||||
let publish_configs = cli_options.start_dir.jsr_packages_for_publish();
|
||||
if publish_configs.is_empty() {
|
||||
match cli_options.start_dir.maybe_deno_json() {
|
||||
Some(deno_json) => {
|
||||
debug_assert!(!deno_json.is_package());
|
||||
bail!(
|
||||
"Missing 'name', 'version' and 'exports' field in '{}'.",
|
||||
deno_json.specifier
|
||||
);
|
||||
}
|
||||
None => {
|
||||
bail!(
|
||||
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
|
||||
directory_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
||||
if cli_options.unstable_sloppy_imports() {
|
||||
Some(SloppyImportsResolver::new(cli_factory.fs().clone()))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
cli_options
|
||||
.create_workspace_resolver(
|
||||
cli_factory.file_fetcher()?,
|
||||
PackageJsonDepResolution::Enabled,
|
||||
)
|
||||
.await?,
|
||||
cli_options.unstable_bare_node_builtins(),
|
||||
));
|
||||
let cli_options = cli_factory.cli_options();
|
||||
let Some(config_file) = cli_options.maybe_config_file() else {
|
||||
bail!(
|
||||
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
|
||||
directory_path.display()
|
||||
);
|
||||
};
|
||||
|
||||
let diagnostics_collector = PublishDiagnosticsCollector::default();
|
||||
let publish_preparer = PublishPreparer::new(
|
||||
|
@ -112,16 +123,15 @@ pub async fn publish(
|
|||
cli_factory.module_graph_creator().await?.clone(),
|
||||
cli_factory.parsed_source_cache().clone(),
|
||||
cli_factory.type_checker().await?.clone(),
|
||||
cli_factory.fs().clone(),
|
||||
cli_factory.cli_options().clone(),
|
||||
mapped_resolver,
|
||||
cli_options.clone(),
|
||||
specifier_unfurler,
|
||||
);
|
||||
|
||||
let prepared_data = publish_preparer
|
||||
.prepare_packages_for_publishing(
|
||||
publish_flags.allow_slow_types,
|
||||
&diagnostics_collector,
|
||||
config_file.clone(),
|
||||
publish_configs,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -135,9 +145,13 @@ pub async fn publish(
|
|||
.ok()
|
||||
.is_none()
|
||||
&& !publish_flags.allow_dirty
|
||||
&& check_if_git_repo_dirty(cli_options.initial_cwd()).await
|
||||
{
|
||||
bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
|
||||
if let Some(dirty_text) =
|
||||
check_if_git_repo_dirty(cli_options.initial_cwd()).await
|
||||
{
|
||||
log::error!("\nUncommitted changes:\n\n{}\n", dirty_text);
|
||||
bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
|
||||
}
|
||||
}
|
||||
|
||||
if publish_flags.dry_run {
|
||||
|
@ -193,8 +207,7 @@ struct PublishPreparer {
|
|||
source_cache: Arc<ParsedSourceCache>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
mapped_resolver: Arc<MappedSpecifierResolver>,
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
specifier_unfurler: Arc<SpecifierUnfurler>,
|
||||
}
|
||||
|
||||
impl PublishPreparer {
|
||||
|
@ -203,23 +216,16 @@ impl PublishPreparer {
|
|||
module_graph_creator: Arc<ModuleGraphCreator>,
|
||||
source_cache: Arc<ParsedSourceCache>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
mapped_resolver: Arc<MappedSpecifierResolver>,
|
||||
specifier_unfurler: Arc<SpecifierUnfurler>,
|
||||
) -> Self {
|
||||
let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() {
|
||||
Some(Arc::new(SloppyImportsResolver::new(fs.clone())))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Self {
|
||||
graph_diagnostics_collector,
|
||||
module_graph_creator,
|
||||
source_cache,
|
||||
type_checker,
|
||||
cli_options,
|
||||
mapped_resolver,
|
||||
sloppy_imports_resolver,
|
||||
specifier_unfurler,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,11 +233,9 @@ impl PublishPreparer {
|
|||
&self,
|
||||
allow_slow_types: bool,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
deno_json: ConfigFile,
|
||||
publish_configs: Vec<JsrPackageConfig>,
|
||||
) -> Result<PreparePackagesData, AnyError> {
|
||||
let members = deno_json.to_workspace_members()?;
|
||||
|
||||
if members.len() > 1 {
|
||||
if publish_configs.len() > 1 {
|
||||
log::info!("Publishing a workspace...");
|
||||
}
|
||||
|
||||
|
@ -240,31 +244,24 @@ impl PublishPreparer {
|
|||
.build_and_check_graph_for_publish(
|
||||
allow_slow_types,
|
||||
diagnostics_collector,
|
||||
&members,
|
||||
&publish_configs,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut package_by_name = HashMap::with_capacity(members.len());
|
||||
let mut package_by_name = HashMap::with_capacity(publish_configs.len());
|
||||
let publish_order_graph =
|
||||
publish_order::build_publish_order_graph(&graph, &members)?;
|
||||
publish_order::build_publish_order_graph(&graph, &publish_configs)?;
|
||||
|
||||
let results = members
|
||||
let results = publish_configs
|
||||
.into_iter()
|
||||
.map(|member| {
|
||||
let graph = graph.clone();
|
||||
async move {
|
||||
let package = self
|
||||
.prepare_publish(
|
||||
&member.package_name,
|
||||
&member.config_file,
|
||||
graph,
|
||||
diagnostics_collector,
|
||||
)
|
||||
.prepare_publish(&member, graph, diagnostics_collector)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Failed preparing '{}'.", member.package_name)
|
||||
})?;
|
||||
Ok::<_, AnyError>((member.package_name, package))
|
||||
.with_context(|| format!("Failed preparing '{}'.", member.name))?;
|
||||
Ok::<_, AnyError>((member.name, package))
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
|
@ -284,12 +281,15 @@ impl PublishPreparer {
|
|||
&self,
|
||||
allow_slow_types: bool,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
packages: &[WorkspaceMemberConfig],
|
||||
package_configs: &[JsrPackageConfig],
|
||||
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
|
||||
let build_fast_check_graph = !allow_slow_types;
|
||||
let graph = self
|
||||
.module_graph_creator
|
||||
.create_and_validate_publish_graph(packages, build_fast_check_graph)
|
||||
.create_and_validate_publish_graph(
|
||||
package_configs,
|
||||
build_fast_check_graph,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// todo(dsherret): move to lint rule
|
||||
|
@ -312,7 +312,10 @@ impl PublishPreparer {
|
|||
} else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref()
|
||||
== Ok("1")
|
||||
{
|
||||
if check_if_git_repo_dirty(self.cli_options.initial_cwd()).await {
|
||||
if check_if_git_repo_dirty(self.cli_options.initial_cwd())
|
||||
.await
|
||||
.is_some()
|
||||
{
|
||||
bail!("When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state.");
|
||||
}
|
||||
|
||||
|
@ -335,10 +338,10 @@ impl PublishPreparer {
|
|||
} else {
|
||||
log::info!("Checking for slow types in the public API...");
|
||||
let mut any_pkg_had_diagnostics = false;
|
||||
for package in packages {
|
||||
for package in package_configs {
|
||||
let export_urls = package.config_file.resolve_export_value_urls()?;
|
||||
let diagnostics =
|
||||
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
|
||||
collect_no_slow_type_diagnostics(&graph, &export_urls);
|
||||
if !diagnostics.is_empty() {
|
||||
any_pkg_had_diagnostics = true;
|
||||
for diagnostic in diagnostics {
|
||||
|
@ -389,14 +392,14 @@ impl PublishPreparer {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
async fn prepare_publish(
|
||||
&self,
|
||||
package_name: &str,
|
||||
deno_json: &ConfigFile,
|
||||
package: &JsrPackageConfig,
|
||||
graph: Arc<deno_graph::ModuleGraph>,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
||||
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
||||
["mod.ts", "mod.js", "index.ts", "index.js"];
|
||||
|
||||
let deno_json = &package.config_file;
|
||||
let config_path = deno_json.specifier.to_file_path().unwrap();
|
||||
let root_dir = config_path.parent().unwrap().to_path_buf();
|
||||
let Some(version) = deno_json.json.version.clone() else {
|
||||
|
@ -418,46 +421,36 @@ impl PublishPreparer {
|
|||
"version": "{}",
|
||||
"exports": "{}"
|
||||
}}"#,
|
||||
package_name,
|
||||
package.name,
|
||||
version,
|
||||
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
||||
);
|
||||
|
||||
bail!(
|
||||
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
||||
package_name,
|
||||
package.name,
|
||||
deno_json.specifier,
|
||||
exports_content
|
||||
);
|
||||
}
|
||||
let Some(name_no_at) = package_name.strip_prefix('@') else {
|
||||
let Some(name_no_at) = package.name.strip_prefix('@') else {
|
||||
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||
};
|
||||
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
|
||||
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||
};
|
||||
let file_patterns = deno_json
|
||||
.to_publish_config()?
|
||||
.map(|c| c.files)
|
||||
.unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf()));
|
||||
let file_patterns = package.member_dir.to_publish_config()?.files;
|
||||
|
||||
let tarball = deno_core::unsync::spawn_blocking({
|
||||
let diagnostics_collector = diagnostics_collector.clone();
|
||||
let mapped_resolver = self.mapped_resolver.clone();
|
||||
let sloppy_imports_resolver = self.sloppy_imports_resolver.clone();
|
||||
let unfurler = self.specifier_unfurler.clone();
|
||||
let cli_options = self.cli_options.clone();
|
||||
let source_cache = self.source_cache.clone();
|
||||
let config_path = config_path.clone();
|
||||
move || {
|
||||
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
|
||||
let unfurler = SpecifierUnfurler::new(
|
||||
&mapped_resolver,
|
||||
sloppy_imports_resolver.as_deref(),
|
||||
bare_node_builtins,
|
||||
);
|
||||
let root_specifier =
|
||||
ModuleSpecifier::from_directory_path(&root_dir).unwrap();
|
||||
let publish_paths =
|
||||
let mut publish_paths =
|
||||
paths::collect_publish_paths(paths::CollectPublishPathsOptions {
|
||||
root_dir: &root_dir,
|
||||
cli_options: &cli_options,
|
||||
|
@ -471,8 +464,30 @@ impl PublishPreparer {
|
|||
&publish_paths,
|
||||
&diagnostics_collector,
|
||||
);
|
||||
|
||||
if !has_license_file(publish_paths.iter().map(|p| &p.specifier)) {
|
||||
if let Some(license_path) =
|
||||
resolve_license_file(&root_dir, cli_options.workspace())
|
||||
{
|
||||
// force including the license file from the package or workspace root
|
||||
publish_paths.push(CollectedPublishPath {
|
||||
specifier: ModuleSpecifier::from_file_path(&license_path)
|
||||
.unwrap(),
|
||||
relative_path: "/LICENSE".to_string(),
|
||||
maybe_content: Some(std::fs::read(&license_path).with_context(
|
||||
|| format!("failed reading '{}'.", license_path.display()),
|
||||
)?),
|
||||
path: license_path,
|
||||
});
|
||||
} else {
|
||||
diagnostics_collector.push(PublishDiagnostic::MissingLicense {
|
||||
expected_path: root_dir.join("LICENSE"),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
tar::create_gzipped_tarball(
|
||||
&publish_paths,
|
||||
publish_paths,
|
||||
LazyGraphSourceParser::new(&source_cache, &graph),
|
||||
&diagnostics_collector,
|
||||
&unfurler,
|
||||
|
@ -482,7 +497,7 @@ impl PublishPreparer {
|
|||
})
|
||||
.await??;
|
||||
|
||||
log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len());
|
||||
log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
|
||||
|
||||
Ok(Rc::new(PreparedPublishPackage {
|
||||
scope: scope.to_string(),
|
||||
|
@ -548,11 +563,13 @@ async fn get_auth_headers(
|
|||
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
|
||||
|
||||
let response = client
|
||||
.post(format!("{}authorizations", registry_url))
|
||||
.json(&serde_json::json!({
|
||||
"challenge": challenge,
|
||||
"permissions": permissions,
|
||||
}))
|
||||
.post_json(
|
||||
format!("{}authorizations", registry_url).parse()?,
|
||||
&serde_json::json!({
|
||||
"challenge": challenge,
|
||||
"permissions": permissions,
|
||||
}),
|
||||
)?
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to create interactive authorization")?;
|
||||
|
@ -582,11 +599,13 @@ async fn get_auth_headers(
|
|||
loop {
|
||||
tokio::time::sleep(interval).await;
|
||||
let response = client
|
||||
.post(format!("{}authorizations/exchange", registry_url))
|
||||
.json(&serde_json::json!({
|
||||
"exchangeToken": auth.exchange_token,
|
||||
"verifier": verifier,
|
||||
}))
|
||||
.post_json(
|
||||
format!("{}authorizations/exchange", registry_url).parse()?,
|
||||
&serde_json::json!({
|
||||
"exchangeToken": auth.exchange_token,
|
||||
"verifier": verifier,
|
||||
}),
|
||||
)?
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to exchange authorization")?;
|
||||
|
@ -643,15 +662,20 @@ async fn get_auth_headers(
|
|||
);
|
||||
|
||||
let response = client
|
||||
.get(url)
|
||||
.bearer_auth(&oidc_config.token)
|
||||
.get(url.parse()?)?
|
||||
.header(
|
||||
http::header::AUTHORIZATION,
|
||||
format!("Bearer {}", oidc_config.token).parse()?,
|
||||
)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to get OIDC token")?;
|
||||
let status = response.status();
|
||||
let text = response.text().await.with_context(|| {
|
||||
format!("Failed to get OIDC token: status {}", status)
|
||||
})?;
|
||||
let text = crate::http_util::body_to_string(response)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Failed to get OIDC token: status {}", status)
|
||||
})?;
|
||||
if !status.is_success() {
|
||||
bail!(
|
||||
"Failed to get OIDC token: status {}, response: '{}'",
|
||||
|
@ -779,7 +803,7 @@ async fn ensure_scopes_and_packages_exist(
|
|||
|
||||
loop {
|
||||
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
||||
let response = client.get(&package_api_url).send().await?;
|
||||
let response = client.get(package_api_url.parse()?)?.send().await?;
|
||||
if response.status() == 200 {
|
||||
let name = format!("@{}/{}", package.scope, package.package);
|
||||
log::info!("Package {} created", colors::green(name));
|
||||
|
@ -903,11 +927,19 @@ async fn publish_package(
|
|||
package.config
|
||||
);
|
||||
|
||||
let body = http_body_util::Full::new(package.tarball.bytes.clone())
|
||||
.map_err(|never| match never {})
|
||||
.boxed();
|
||||
let response = http_client
|
||||
.post(url)
|
||||
.header(reqwest::header::AUTHORIZATION, authorization)
|
||||
.header(reqwest::header::CONTENT_ENCODING, "gzip")
|
||||
.body(package.tarball.bytes.clone())
|
||||
.post(url.parse()?, body)?
|
||||
.header(
|
||||
http::header::AUTHORIZATION,
|
||||
authorization.parse().map_err(http::Error::from)?,
|
||||
)
|
||||
.header(
|
||||
http::header::CONTENT_ENCODING,
|
||||
"gzip".parse().map_err(http::Error::from)?,
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
|
@ -952,7 +984,7 @@ async fn publish_package(
|
|||
while task.status != "success" && task.status != "failure" {
|
||||
tokio::time::sleep(interval).await;
|
||||
let resp = http_client
|
||||
.get(format!("{}publish_status/{}", registry_api_url, task.id))
|
||||
.get(format!("{}publish_status/{}", registry_api_url, task.id).parse()?)?
|
||||
.send()
|
||||
.await
|
||||
.with_context(|| {
|
||||
|
@ -982,14 +1014,6 @@ async fn publish_package(
|
|||
);
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"{} @{}/{}@{}",
|
||||
colors::green("Successfully published"),
|
||||
package.scope,
|
||||
package.package,
|
||||
package.version
|
||||
);
|
||||
|
||||
let enable_provenance = std::env::var("DISABLE_JSR_PROVENANCE").is_err()
|
||||
&& (auth::is_gha() && auth::gha_oidc_token().is_some() && provenance);
|
||||
|
||||
|
@ -1001,7 +1025,8 @@ async fn publish_package(
|
|||
package.scope, package.package, package.version
|
||||
))?;
|
||||
|
||||
let meta_bytes = http_client.get(meta_url).send().await?.bytes().await?;
|
||||
let resp = http_client.get(meta_url)?.send().await?;
|
||||
let meta_bytes = resp.collect().await?.to_bytes();
|
||||
|
||||
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
|
||||
verify_version_manifest(&meta_bytes, &package)?;
|
||||
|
@ -1032,13 +1057,20 @@ async fn publish_package(
|
|||
registry_api_url, package.scope, package.package, package.version
|
||||
);
|
||||
http_client
|
||||
.post(provenance_url)
|
||||
.header(reqwest::header::AUTHORIZATION, authorization)
|
||||
.json(&json!({ "bundle": bundle }))
|
||||
.post_json(provenance_url.parse()?, &json!({ "bundle": bundle }))?
|
||||
.header(http::header::AUTHORIZATION, authorization.parse()?)
|
||||
.send()
|
||||
.await?;
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"{} @{}/{}@{}",
|
||||
colors::green("Successfully published"),
|
||||
package.scope,
|
||||
package.package,
|
||||
package.version
|
||||
);
|
||||
|
||||
log::info!(
|
||||
"{}",
|
||||
colors::gray(format!(
|
||||
|
@ -1146,10 +1178,10 @@ fn verify_version_manifest(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
|
||||
async fn check_if_git_repo_dirty(cwd: &Path) -> Option<String> {
|
||||
let bin_name = if cfg!(windows) { "git.exe" } else { "git" };
|
||||
|
||||
// Check if git exists
|
||||
// Check if git exists
|
||||
let git_exists = Command::new(bin_name)
|
||||
.arg("--version")
|
||||
.stderr(Stdio::null())
|
||||
|
@ -1159,7 +1191,7 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
|
|||
.map_or(false, |status| status.success());
|
||||
|
||||
if !git_exists {
|
||||
return false; // Git is not installed
|
||||
return None; // Git is not installed
|
||||
}
|
||||
|
||||
// Check if there are uncommitted changes
|
||||
|
@ -1171,7 +1203,60 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
|
|||
.expect("Failed to execute command");
|
||||
|
||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||
!output_str.trim().is_empty()
|
||||
let text = output_str.trim();
|
||||
if text.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(text.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
static SUPPORTED_LICENSE_FILE_NAMES: [&str; 6] = [
|
||||
"LICENSE",
|
||||
"LICENSE.md",
|
||||
"LICENSE.txt",
|
||||
"LICENCE",
|
||||
"LICENCE.md",
|
||||
"LICENCE.txt",
|
||||
];
|
||||
|
||||
fn resolve_license_file(
|
||||
pkg_root_dir: &Path,
|
||||
workspace: &Workspace,
|
||||
) -> Option<PathBuf> {
|
||||
let workspace_root_dir = workspace.root_dir_path();
|
||||
let mut dirs = Vec::with_capacity(2);
|
||||
dirs.push(pkg_root_dir);
|
||||
if workspace_root_dir != pkg_root_dir {
|
||||
dirs.push(&workspace_root_dir);
|
||||
}
|
||||
for dir in dirs {
|
||||
for file_name in &SUPPORTED_LICENSE_FILE_NAMES {
|
||||
let file_path = dir.join(file_name);
|
||||
if file_path.exists() {
|
||||
return Some(file_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn has_license_file<'a>(
|
||||
mut specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
||||
) -> bool {
|
||||
let supported_license_files = SUPPORTED_LICENSE_FILE_NAMES
|
||||
.iter()
|
||||
.map(|s| s.to_lowercase())
|
||||
.collect::<HashSet<_>>();
|
||||
specifiers.any(|specifier| {
|
||||
specifier
|
||||
.path()
|
||||
.rsplit_once('/')
|
||||
.map(|(_, file)| {
|
||||
supported_license_files.contains(file.to_lowercase().as_str())
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
|
@ -1182,6 +1267,10 @@ fn ring_bell() {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
||||
use crate::tools::registry::has_license_file;
|
||||
|
||||
use super::tar::PublishableTarball;
|
||||
use super::tar::PublishableTarballFile;
|
||||
use super::verify_version_manifest;
|
||||
|
@ -1283,4 +1372,31 @@ mod tests {
|
|||
|
||||
assert!(verify_version_manifest(meta_bytes, &package).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_license_files() {
|
||||
fn has_license_file_str(expected: &[&str]) -> bool {
|
||||
let specifiers = expected
|
||||
.iter()
|
||||
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
has_license_file(specifiers.iter())
|
||||
}
|
||||
|
||||
assert!(has_license_file_str(&["file:///LICENSE"]));
|
||||
assert!(has_license_file_str(&["file:///license"]));
|
||||
assert!(has_license_file_str(&["file:///LICENSE.txt"]));
|
||||
assert!(has_license_file_str(&["file:///LICENSE.md"]));
|
||||
assert!(has_license_file_str(&["file:///LICENCE"]));
|
||||
assert!(has_license_file_str(&["file:///LICENCE.txt"]));
|
||||
assert!(has_license_file_str(&["file:///LICENCE.md"]));
|
||||
assert!(has_license_file_str(&[
|
||||
"file:///other",
|
||||
"file:///test/LICENCE.md"
|
||||
]),);
|
||||
assert!(!has_license_file_str(&[
|
||||
"file:///other",
|
||||
"file:///test/tLICENSE"
|
||||
]),);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,12 +8,12 @@ use std::path::PathBuf;
|
|||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::glob::FileCollector;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_core::error::AnyError;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::util::fs::FileCollector;
|
||||
|
||||
use super::diagnostics::PublishDiagnostic;
|
||||
use super::diagnostics::PublishDiagnosticsCollector;
|
||||
|
@ -214,7 +214,10 @@ pub enum PackagePathValidationError {
|
|||
pub struct CollectedPublishPath {
|
||||
pub specifier: ModuleSpecifier,
|
||||
pub path: PathBuf,
|
||||
/// Relative path to use in the tarball. This should be prefixed with a `/`.
|
||||
pub relative_path: String,
|
||||
/// Specify the contents for any injected paths.
|
||||
pub maybe_content: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
pub struct CollectPublishPathsOptions<'a> {
|
||||
|
@ -307,6 +310,7 @@ pub fn collect_publish_paths(
|
|||
specifier,
|
||||
path,
|
||||
relative_path,
|
||||
maybe_content: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -319,14 +323,14 @@ fn collect_paths(
|
|||
file_patterns: FilePatterns,
|
||||
) -> Result<Vec<PathBuf>, AnyError> {
|
||||
FileCollector::new(|e| {
|
||||
if !e.file_type.is_file() {
|
||||
if !e.metadata.is_file {
|
||||
if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
|
||||
diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
|
||||
specifier,
|
||||
kind: if e.file_type.is_symlink() {
|
||||
"symlink".to_owned()
|
||||
kind: if e.metadata.is_symlink {
|
||||
"symlink".to_string()
|
||||
} else {
|
||||
format!("{:?}", e.file_type)
|
||||
"Unknown".to_string()
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -341,5 +345,5 @@ fn collect_paths(
|
|||
.ignore_node_modules()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.use_gitignore()
|
||||
.collect_file_patterns(file_patterns)
|
||||
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, file_patterns)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::TextChange;
|
||||
use deno_config::FmtOptionsConfig;
|
||||
use deno_config::deno_json::FmtOptionsConfig;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
|
@ -49,8 +49,8 @@ impl DenoConfigFormat {
|
|||
}
|
||||
|
||||
enum DenoOrPackageJson {
|
||||
Deno(deno_config::ConfigFile, DenoConfigFormat),
|
||||
Npm(deno_node::PackageJson, Option<FmtOptionsConfig>),
|
||||
Deno(Arc<deno_config::deno_json::ConfigFile>, DenoConfigFormat),
|
||||
Npm(Arc<deno_node::PackageJson>, Option<FmtOptionsConfig>),
|
||||
}
|
||||
|
||||
impl DenoOrPackageJson {
|
||||
|
@ -87,7 +87,6 @@ impl DenoOrPackageJson {
|
|||
DenoOrPackageJson::Deno(deno, ..) => deno
|
||||
.to_fmt_config()
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|f| f.options)
|
||||
.unwrap_or_default(),
|
||||
DenoOrPackageJson::Npm(_, config) => config.clone().unwrap_or_default(),
|
||||
|
@ -120,11 +119,12 @@ impl DenoOrPackageJson {
|
|||
/// creates a `deno.json` file - in this case
|
||||
/// we also return a new `CliFactory` that knows about
|
||||
/// the new config
|
||||
fn from_flags(flags: Flags) -> Result<(Self, CliFactory), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags.clone())?;
|
||||
let options = factory.cli_options().clone();
|
||||
fn from_flags(flags: Arc<Flags>) -> Result<(Self, CliFactory), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
let options = factory.cli_options()?;
|
||||
let start_dir = &options.start_dir;
|
||||
|
||||
match (options.maybe_config_file(), options.maybe_package_json()) {
|
||||
match (start_dir.maybe_deno_json(), start_dir.maybe_pkg_json()) {
|
||||
// when both are present, for now,
|
||||
// default to deno.json
|
||||
(Some(deno), Some(_) | None) => Ok((
|
||||
|
@ -140,21 +140,19 @@ impl DenoOrPackageJson {
|
|||
(None, Some(_) | None) => {
|
||||
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
|
||||
.context("Failed to create deno.json file")?;
|
||||
drop(factory); // drop to prevent use
|
||||
log::info!("Created deno.json configuration file.");
|
||||
let new_factory = CliFactory::from_flags(flags.clone())?;
|
||||
let new_options = new_factory.cli_options().clone();
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
let options = factory.cli_options()?.clone();
|
||||
let start_dir = &options.start_dir;
|
||||
Ok((
|
||||
DenoOrPackageJson::Deno(
|
||||
new_options
|
||||
.maybe_config_file()
|
||||
.as_ref()
|
||||
.ok_or_else(|| {
|
||||
anyhow!("config not found, but it was just created")
|
||||
})?
|
||||
.clone(),
|
||||
start_dir.maybe_deno_json().cloned().ok_or_else(|| {
|
||||
anyhow!("config not found, but it was just created")
|
||||
})?,
|
||||
DenoConfigFormat::Json,
|
||||
),
|
||||
new_factory,
|
||||
factory,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -177,7 +175,10 @@ fn package_json_dependency_entry(
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
|
||||
pub async fn add(
|
||||
flags: Arc<Flags>,
|
||||
add_flags: AddFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let (config_file, cli_factory) =
|
||||
DenoOrPackageJson::from_flags(flags.clone())?;
|
||||
|
||||
|
@ -306,12 +307,12 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
|
|||
.await
|
||||
.context("Failed to update configuration file")?;
|
||||
|
||||
// TODO(bartlomieju): we should now cache the imports from the deno.json.
|
||||
|
||||
// clear the previously cached package.json from memory before reloading it
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
// make a new CliFactory to pick up the updated config file
|
||||
let cli_factory = CliFactory::from_flags(flags)?;
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
// cache deps
|
||||
if cli_factory.cli_options().enable_future_features() {
|
||||
if cli_factory.cli_options()?.enable_future_features() {
|
||||
crate::module_loader::load_top_level_deps(&cli_factory).await?;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::http_util;
|
||||
use crate::http_util::HttpClient;
|
||||
|
||||
use super::api::OidcTokenResponse;
|
||||
|
@ -12,6 +13,8 @@ use deno_core::anyhow;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use http_body_util::BodyExt;
|
||||
use once_cell::sync::Lazy;
|
||||
use p256::elliptic_curve;
|
||||
use p256::pkcs8::AssociatedOid;
|
||||
|
@ -504,12 +507,12 @@ impl<'a> FulcioSigner<'a> {
|
|||
|
||||
let response = self
|
||||
.http_client
|
||||
.post(url)
|
||||
.json(&request_body)
|
||||
.post_json(url.parse()?, &request_body)?
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let body: SigningCertificateResponse = response.json().await?;
|
||||
let body: SigningCertificateResponse =
|
||||
http_util::body_to_json(response).await?;
|
||||
|
||||
let key = body
|
||||
.signed_certificate_embedded_sct
|
||||
|
@ -527,15 +530,23 @@ impl<'a> FulcioSigner<'a> {
|
|||
bail!("No OIDC token available");
|
||||
};
|
||||
|
||||
let res = self
|
||||
let mut url = req_url.parse::<Url>()?;
|
||||
url.query_pairs_mut().append_pair("audience", aud);
|
||||
let res_bytes = self
|
||||
.http_client
|
||||
.get(&req_url)
|
||||
.bearer_auth(token)
|
||||
.query(&[("audience", aud)])
|
||||
.get(url)?
|
||||
.header(
|
||||
http::header::AUTHORIZATION,
|
||||
format!("Bearer {}", token)
|
||||
.parse()
|
||||
.map_err(http::Error::from)?,
|
||||
)
|
||||
.send()
|
||||
.await?
|
||||
.json::<OidcTokenResponse>()
|
||||
.await?;
|
||||
.collect()
|
||||
.await?
|
||||
.to_bytes();
|
||||
let res: OidcTokenResponse = serde_json::from_slice(&res_bytes)?;
|
||||
Ok(res.value)
|
||||
}
|
||||
}
|
||||
|
@ -685,11 +696,10 @@ async fn testify(
|
|||
|
||||
let url = format!("{}/api/v1/log/entries", *DEFAULT_REKOR_URL);
|
||||
let res = http_client
|
||||
.post(&url)
|
||||
.json(&proposed_intoto_entry)
|
||||
.post_json(url.parse()?, &proposed_intoto_entry)?
|
||||
.send()
|
||||
.await?;
|
||||
let body: RekorEntry = res.json().await?;
|
||||
let body: RekorEntry = http_util::body_to_json(res).await?;
|
||||
|
||||
Ok(body)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::collections::HashSet;
|
|||
use std::collections::VecDeque;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::ModuleGraph;
|
||||
|
@ -114,7 +114,7 @@ impl PublishOrderGraph {
|
|||
|
||||
pub fn build_publish_order_graph(
|
||||
graph: &ModuleGraph,
|
||||
roots: &[WorkspaceMemberConfig],
|
||||
roots: &[JsrPackageConfig],
|
||||
) -> Result<PublishOrderGraph, AnyError> {
|
||||
let packages = build_pkg_deps(graph, roots)?;
|
||||
Ok(build_publish_order_graph_from_pkgs_deps(packages))
|
||||
|
@ -122,18 +122,23 @@ pub fn build_publish_order_graph(
|
|||
|
||||
fn build_pkg_deps(
|
||||
graph: &deno_graph::ModuleGraph,
|
||||
roots: &[WorkspaceMemberConfig],
|
||||
roots: &[JsrPackageConfig],
|
||||
) -> Result<HashMap<String, HashSet<String>>, AnyError> {
|
||||
let mut members = HashMap::with_capacity(roots.len());
|
||||
let mut seen_modules = HashSet::with_capacity(graph.modules().count());
|
||||
let roots = roots
|
||||
.iter()
|
||||
.map(|r| (ModuleSpecifier::from_file_path(&r.dir_path).unwrap(), r))
|
||||
.map(|r| {
|
||||
(
|
||||
ModuleSpecifier::from_directory_path(r.config_file.dir_path()).unwrap(),
|
||||
r,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
for (root_dir_url, root) in &roots {
|
||||
for (root_dir_url, pkg_config) in &roots {
|
||||
let mut deps = HashSet::new();
|
||||
let mut pending = VecDeque::new();
|
||||
pending.extend(root.config_file.resolve_export_value_urls()?);
|
||||
pending.extend(pkg_config.config_file.resolve_export_value_urls()?);
|
||||
while let Some(specifier) = pending.pop_front() {
|
||||
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
|
||||
continue;
|
||||
|
@ -168,12 +173,12 @@ fn build_pkg_deps(
|
|||
specifier.as_str().starts_with(dir_url.as_str())
|
||||
});
|
||||
if let Some(root) = found_root {
|
||||
deps.insert(root.1.package_name.clone());
|
||||
deps.insert(root.1.name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
members.insert(root.package_name.clone(), deps);
|
||||
members.insert(pkg_config.name.clone(), deps);
|
||||
}
|
||||
Ok(members)
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ pub struct PublishableTarball {
|
|||
}
|
||||
|
||||
pub fn create_gzipped_tarball(
|
||||
publish_paths: &[CollectedPublishPath],
|
||||
publish_paths: Vec<CollectedPublishPath>,
|
||||
source_parser: LazyGraphSourceParser,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
unfurler: &SpecifierUnfurler,
|
||||
|
@ -45,15 +45,17 @@ pub fn create_gzipped_tarball(
|
|||
for path in publish_paths {
|
||||
let path_str = &path.relative_path;
|
||||
let specifier = &path.specifier;
|
||||
let path = &path.path;
|
||||
|
||||
let content = resolve_content_maybe_unfurling(
|
||||
path,
|
||||
specifier,
|
||||
unfurler,
|
||||
source_parser,
|
||||
diagnostics_collector,
|
||||
)?;
|
||||
let content = match path.maybe_content {
|
||||
Some(content) => content.clone(),
|
||||
None => resolve_content_maybe_unfurling(
|
||||
&path.path,
|
||||
specifier,
|
||||
unfurler,
|
||||
source_parser,
|
||||
diagnostics_collector,
|
||||
)?,
|
||||
};
|
||||
|
||||
files.push(PublishableTarballFile {
|
||||
path_str: path_str.clone(),
|
||||
|
@ -62,10 +64,11 @@ pub fn create_gzipped_tarball(
|
|||
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
|
||||
size: content.len(),
|
||||
});
|
||||
assert!(path_str.starts_with('/'));
|
||||
tar
|
||||
.add_file(format!(".{}", path_str), &content)
|
||||
.with_context(|| {
|
||||
format!("Unable to add file to tarball '{}'", path.display())
|
||||
format!("Unable to add file to tarball '{}'", path.path.display())
|
||||
})?;
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue