0
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-02-07 23:06:50 -05:00

Merge branch 'main' into jupyter_comms

This commit is contained in:
Bartek Iwańczuk 2024-08-13 02:59:44 +02:00
commit b6476d461f
No known key found for this signature in database
GPG key ID: 0C6BCDDC3B3AD750
1174 changed files with 32546 additions and 13079 deletions

View file

@ -8,6 +8,9 @@
"json": {
"deno": true
},
"yaml": {
"quotes": "preferSingle"
},
"exec": {
"commands": [{
"command": "rustfmt --config imports_granularity=item",
@ -38,6 +41,8 @@
"tests/registry/",
"tests/specs/fmt",
"tests/specs/lint/bom",
"tests/specs/lint/syntax_error_reporting",
"tests/specs/publish/no_check_surfaces_syntax_error",
"tests/testdata/byte_order_mark.ts",
"tests/testdata/encoding",
"tests/testdata/file_extensions/ts_with_js_extension.js",
@ -60,10 +65,11 @@
"third_party"
],
"plugins": [
"https://plugins.dprint.dev/typescript-0.91.1.wasm",
"https://plugins.dprint.dev/typescript-0.91.6.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm",
"https://plugins.dprint.dev/markdown-0.17.1.wasm",
"https://plugins.dprint.dev/markdown-0.17.2.wasm",
"https://plugins.dprint.dev/toml-0.6.2.wasm",
"https://plugins.dprint.dev/exec-0.4.4.json@c207bf9b9a4ee1f0ecb75c594f774924baf62e8e53a2ce9d873816a408cecbf7"
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.4.0.wasm"
]
}

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 2;
const cacheVersion = 10;
const ubuntuX86Runner = "ubuntu-22.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
@ -629,6 +629,7 @@ const ci = {
path: [
"./target",
"!./target/*/gn_out",
"!./target/*/gn_root",
"!./target/*/*.zip",
"!./target/*/*.tar.gz",
].join("\n"),
@ -825,7 +826,7 @@ const ci = {
"!startsWith(github.ref, 'refs/tags/')",
].join("\n"),
run:
"target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js",
"target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js",
},
{
name: "Test (full, debug)",
@ -878,9 +879,9 @@ const ci = {
DENO_BIN: "./target/debug/deno",
},
run: [
"deno run -A --unstable --lock=tools/deno.lock.json \\",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json \\",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
' ./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"',
].join("\n"),
},
@ -891,9 +892,9 @@ const ci = {
DENO_BIN: "./target/release/deno",
},
run: [
"deno run -A --unstable --lock=tools/deno.lock.json \\",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts setup",
"deno run -A --unstable --lock=tools/deno.lock.json \\",
"deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\\",
" ./tests/wpt/wpt.ts run --quiet --release \\",
' --binary="$DENO_BIN" \\',
" --json=wpt.json \\",

View file

@ -367,8 +367,8 @@ jobs:
path: |-
~/.cargo/registry/index
~/.cargo/registry/cache
key: '2-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '2-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
key: '10-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '10-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)'
- name: Restore cache build output (PR)
uses: actions/cache/restore@v4
@ -377,10 +377,11 @@ jobs:
path: |-
./target
!./target/*/gn_out
!./target/*/gn_root
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '2-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '10-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -493,7 +494,7 @@ jobs:
matrix.job == 'test' &&
matrix.profile == 'release' &&
!startsWith(github.ref, 'refs/tags/'))
run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js
run: target/release/deno run -A --unstable --config tests/config/deno.json ext/websocket/autobahn/fuzzingclient.js
- name: 'Test (full, debug)'
if: |-
!(matrix.skip) && (matrix.job == 'test' &&
@ -530,18 +531,18 @@ jobs:
env:
DENO_BIN: ./target/debug/deno
run: |-
deno run -A --unstable --lock=tools/deno.lock.json \
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json \
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --binary="$DENO_BIN"
- name: Run web platform tests (release)
if: '!(matrix.skip) && (matrix.wpt && matrix.profile == ''release'')'
env:
DENO_BIN: ./target/release/deno
run: |-
deno run -A --unstable --lock=tools/deno.lock.json \
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts setup
deno run -A --unstable --lock=tools/deno.lock.json \
deno run -A --unstable --lock=tools/deno.lock.json --config tests/config/deno.json\
./tests/wpt/wpt.ts run --quiet --release \
--binary="$DENO_BIN" \
--json=wpt.json \
@ -669,7 +670,7 @@ jobs:
!./target/*/gn_out
!./target/*/*.zip
!./target/*/*.tar.gz
key: '2-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '10-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-22.04

View file

@ -8,9 +8,9 @@ on:
default: 'patch'
type: choice
options:
- patch
- minor
- major
- patch
- minor
- major
required: true
jobs:

View file

@ -8,9 +8,9 @@ on:
default: 'patch'
type: choice
options:
- patch
- minor
- major
- patch
- minor
- major
required: true
jobs:

View file

@ -37,7 +37,7 @@ jobs:
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
python-version: '3.11'
- name: Log versions
run: |
@ -66,9 +66,9 @@ jobs:
- name: Run web platform tests
shell: bash
run: |
deno run --unstable -A --lock=tools/deno.lock.json \
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts setup
deno run --unstable -A --lock=tools/deno.lock.json \
deno run --unstable -A --lock=tools/deno.lock.json --config=tests/config/deno.json \
./tests/wpt/wpt.ts run \ \
--binary=$(which deno) --quiet --release --no-ignore --json=wpt.json --wptreport=wptreport.json --exit-zero

1156
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -21,6 +21,7 @@ members = [
"ext/napi",
"ext/net",
"ext/node",
"ext/node_resolver",
"ext/url",
"ext/web",
"ext/webgpu",
@ -43,16 +44,16 @@ license = "MIT"
repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.39.2", features = ["transpiling"] }
deno_core = { version = "0.292.0" }
deno_ast = { version = "=0.41.2", features = ["transpiling"] }
deno_core = { version = "0.303.0" }
deno_bench_util = { version = "0.152.0", path = "./bench_util" }
deno_bench_util = { version = "0.158.0", path = "./bench_util" }
deno_lockfile = "0.20.0"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
deno_permissions = { version = "0.18.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.166.0", path = "./runtime" }
deno_terminal = "0.1.1"
napi_sym = { version = "0.88.0", path = "./cli/napi/sym" }
deno_permissions = { version = "0.24.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.173.0", path = "./runtime" }
deno_terminal = "0.2.0"
napi_sym = { version = "0.94.0", path = "./cli/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1"
@ -61,34 +62,35 @@ denokv_remote = "0.8.1"
denokv_sqlite = { default-features = false, version = "0.8.1" }
# exts
deno_broadcast_channel = { version = "0.152.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.90.0", path = "./ext/cache" }
deno_canvas = { version = "0.27.0", path = "./ext/canvas" }
deno_console = { version = "0.158.0", path = "./ext/console" }
deno_cron = { version = "0.38.0", path = "./ext/cron" }
deno_crypto = { version = "0.172.0", path = "./ext/crypto" }
deno_fetch = { version = "0.182.0", path = "./ext/fetch" }
deno_ffi = { version = "0.145.0", path = "./ext/ffi" }
deno_fs = { version = "0.68.0", path = "./ext/fs" }
deno_http = { version = "0.156.0", path = "./ext/http" }
deno_io = { version = "0.68.0", path = "./ext/io" }
deno_kv = { version = "0.66.0", path = "./ext/kv" }
deno_napi = { version = "0.88.0", path = "./ext/napi" }
deno_net = { version = "0.150.0", path = "./ext/net" }
deno_node = { version = "0.95.0", path = "./ext/node" }
deno_tls = { version = "0.145.0", path = "./ext/tls" }
deno_url = { version = "0.158.0", path = "./ext/url" }
deno_web = { version = "0.189.0", path = "./ext/web" }
deno_webgpu = { version = "0.125.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.158.0", path = "./ext/webidl" }
deno_websocket = { version = "0.163.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.153.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.158.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.96.0", path = "./ext/cache" }
deno_canvas = { version = "0.33.0", path = "./ext/canvas" }
deno_console = { version = "0.164.0", path = "./ext/console" }
deno_cron = { version = "0.44.0", path = "./ext/cron" }
deno_crypto = { version = "0.178.0", path = "./ext/crypto" }
deno_fetch = { version = "0.188.0", path = "./ext/fetch" }
deno_ffi = { version = "0.151.0", path = "./ext/ffi" }
deno_fs = { version = "0.74.0", path = "./ext/fs" }
deno_http = { version = "0.162.0", path = "./ext/http" }
deno_io = { version = "0.74.0", path = "./ext/io" }
deno_kv = { version = "0.72.0", path = "./ext/kv" }
deno_napi = { version = "0.95.0", path = "./ext/napi" }
deno_net = { version = "0.156.0", path = "./ext/net" }
deno_node = { version = "0.101.0", path = "./ext/node" }
deno_tls = { version = "0.151.0", path = "./ext/tls" }
deno_url = { version = "0.164.0", path = "./ext/url" }
deno_web = { version = "0.195.0", path = "./ext/web" }
deno_webgpu = { version = "0.131.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.164.0", path = "./ext/webidl" }
deno_websocket = { version = "0.169.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.159.0", path = "./ext/webstorage" }
node_resolver = { version = "0.3.0", path = "./ext/node_resolver" }
aes = "=0.8.3"
anyhow = "1.0.57"
async-trait = "0.1.73"
base32 = "=0.4.0"
base64 = "0.21.4"
base32 = "=0.5.1"
base64 = "0.21.7"
bencher = "0.1"
brotli = "6.0.0"
bytes = "1.4.0"
@ -100,28 +102,32 @@ chrono = { version = "0.4", default-features = false, features = ["std", "serde"
console_static_text = "=0.8.1"
data-encoding = "2.3.3"
data-url = "=0.3.0"
deno_cache_dir = "=0.10.0"
deno_config = { version = "=0.20.0", default-features = false }
deno_cache_dir = "=0.10.2"
deno_package_json = { version = "=0.1.1", default-features = false }
dlopen2 = "0.6.1"
ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
encoding_rs = "=0.8.33"
fast-socks5 = "0.9.6"
faster-hex = "0.9"
fastwebsockets = { version = "0.6", features = ["upgrade", "unstable-split"] }
fastwebsockets = { version = "0.8", features = ["upgrade", "unstable-split"] }
filetime = "0.2.16"
flate2 = { version = "1.0.26", default-features = false }
flate2 = { version = "1.0.30", default-features = false }
fs3 = "0.5.0"
futures = "0.3.21"
glob = "0.3.1"
h2 = "0.4.4"
http = "1.0"
http-body-util = "0.1"
http-body = "1.0"
http-body-util = "0.1.2"
http_v02 = { package = "http", version = "0.2.9" }
httparse = "1.8.0"
hyper = { version = "=1.4.0", features = ["full"] }
hyper-util = { version = "=0.1.6", features = ["tokio", "server", "server-auto"] }
hyper = { version = "1.4.1", features = ["full"] }
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3"
jsonc-parser = { version = "=0.23.0", features = ["serde"] }
lazy-regex = "3"
libc = "0.2.126"
@ -130,7 +136,7 @@ log = "0.4.20"
lsp-types = "=0.94.1" # used by tower-lsp and "proposed" feature is unstable in patch releases
memmem = "0.1.1"
monch = "=0.5.0"
notify = "=5.0.0"
notify = "=6.1.1"
num-bigint = { version = "0.4", features = ["rand"] }
once_cell = "1.17.1"
os_pipe = { version = "=1.1.5", features = ["io_safety"] }
@ -146,12 +152,12 @@ prost = "0.11"
prost-build = "0.11"
rand = "=0.8.5"
regex = "^1.7.0"
reqwest = { version = "=0.12.4", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
ring = "^0.17.0"
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] }
rustls = "0.22.4"
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
rustls-pemfile = "2"
rustls-tokio-stream = "=0.2.23"
rustls-tokio-stream = "=0.3.0"
rustls-webpki = "0.102"
rustyline = "=13.0.0"
saffron = "=0.1.0"
@ -171,16 +177,22 @@ tar = "=0.4.40"
tempfile = "3.4.0"
termcolor = "1.1.3"
thiserror = "1.0.61"
tokio = { version = "1.36.0", features = ["full"] }
tokio = { version = "=1.36.0", features = ["full"] }
tokio-metrics = { version = "0.3.0", features = ["rt"] }
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
tokio-socks = "0.5.1"
tokio-util = "0.7.4"
tower = { version = "0.4.13", default-features = false, features = ["util"] }
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] }
tower-lsp = { version = "=0.20.0", features = ["proposed"] }
tower-service = "0.3.2"
twox-hash = "=1.6.3"
# Upgrading past 2.4.1 may cause WPT failures
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] }
webpki-roots = "0.26"
zeromq = { version = "=0.3.4", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
which = "4.2.5"
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4"
# crypto
@ -189,7 +201,7 @@ rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "
# webgpu
raw-window-handle = "0.6.0"
wgpu-core = "0.20"
wgpu-core = "0.21.1"
wgpu-types = "0.20"
# macros
@ -202,7 +214,7 @@ nix = "=0.26.2"
# windows deps
junction = "=0.2.0"
winapi = "=0.3.9"
windows-sys = { version = "0.48.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem"] }
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry"] }
winres = "=0.1.12"
# NB: the `bench` and `release` profiles must remain EXACTLY the same.

View file

@ -6,6 +6,196 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at:
https://github.com/denoland/deno_install
### 1.45.5 / 2024.07.31
- fix(cli): Unhide publish subcommand help string (#24787)
- fix(compile/windows): handle cjs re-export of relative path with parent
component (#24795)
- fix(ext/node): handle node child_process with --v8-options flag (#24804)
- fix(ext/node): prevent panic in http2.connect with uppercase header names
(#24780)
- fix(ext/webgpu): don't crash while constructing GPUOutOfMemoryError (#24807)
- fix(http): Adjust hostname display for Windows when using 0.0.0.0 (#24698)
- fix(node): Rework node:child_process IPC (#24763)
- fix(node): support wildcards in package.json imports (#24794)
- fix(node/fs/promises): watch should be async iterable (#24805)
- fix(node/timers/promises): add scheduler APIs (#24802)
- fix(npmrc): skip loading .npmrc in home dir on permission error (#24758)
- fix(types): fix streams types (#24770)
- fix(unstable/compile): handle byonm import in sub dir (#24755)
- fix: actually add missing `node:readline/promises` module (#24772)
- fix: adapt to new jupyter runtime API and include session IDs (#24762)
- perf(ext/fetch): speed up `resp.clone()` (#24812)
- perf(ext/node): improve `Buffer` from string performance (#24567)
### 1.45.4 / 2024.07.26
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)
- fix(ext/fetch): respect authority from URL (#24705)
- fix(ext/fetch): use correct ALPN to proxies (#24696)
- fix(ext/node): read correct CPU usage stats on Linux (#24732)
- fix(ext/node/net): emit `error` before `close` when connection is refused
(#24656)
- fix(future): Emit `deno install` warning less often, suggest `deno install` in
error message (#24706)
- fix(lsp): rewrite import for 'infer return type' action (#24685)
- fix(node): better detection for when to surface node resolution errors
(#24653)
- fix(node): cjs pkg dynamically importing esm-only pkg fails (#24730)
- fix(node/worker_threads): support `port.once()` (#24725)
- fix(publish): workspace included license file had incorrect path (#24747)
- fix(unstable): move sloppy-import warnings to lint rule (#24710)
- fix(upgrade): do not error if config in cwd invalid (#24689)
- fix(workspaces/publish): include the license file from the workspace root if
not in pkg (#24714)
- fix: enable the reporting of parsing related problems when running deno lint
(#24332)
- fix: support `npm:bindings` and `npm:callsites` packages (#24727)
- fix: update lsp error message of 'relative import path' to 'use deno add' for
npm/jsr packages (#24524)
- fix: decode percent-encoding source string in `Error.stack` (#24709)
- perf: update deno_doc (#24700)
### 1.45.3 / 2024.07.22
- Reland "refactor(fetch): reimplement fetch with hyper instead of reqwest"
(#24593)
- fix(cli): Create child node_modules for conflicting dependency versions,
respect aliases in package.json (#24609)
- fix(cli): Respect implied BYONM from DENO_FUTURE in `deno task` (#24652)
- fix(cli): add NAPI support in standalone mode (#24642)
- fix(cron): improve error message for invalid cron names (#24644)
- fix(docs): fix some deno.land/manual broken urls (#24557)
- fix(ext/console): Error Cause Not Inspect-Formatted when printed (#24526)
- fix(ext/node): do not expose `self` global in node (#24637)
- fix(ext/node): http request uploads of subarray of buffer should work (#24603)
- fix(ext/node): stat.mode on windows (#24434)
- fix(fmt): was sometimes putting comments in front of commas in parameter lists
(#24650)
- fix(init): use bare specifier for `jsr:@std/assert` (#24581)
- fix(lsp): hang when caching failed (#24651)
- fix(lsp): scope attribution for asset documents (#24663)
- fix(lsp): support npm workspaces and fix some resolution issues (#24627)
- fix(node): Fix `--allow-scripts` with no `deno.json` (#24533)
- fix(node): support `tty.hasColors()` and `tty.getColorDepth()` (#24619)
- fix(npm): handle packages with only pre-released 0.0.0 versions (#24563)
- fix(publish): surface syntax errors when using --no-check (#24620)
- fix(publish): warn about missing license file (#24677)
- fix(std/http2): release window capacity back to remote stream (#24576)
- fix(types): Conform lib.deno_web.d.ts to lib.dom.d.ts and lib.webworker.d.ts
(#24599)
- fix(workspace): do not resolve to self for npm pkg depending on matching req
(#24591)
- fix(workspace): support resolving bare specifiers to npm pkgs within a
workspace (#24611)
- fix: make vendor cache manifest more deterministic (#24658)
- fix: missing `emitWarning` import (#24587)
- perf(ext/node): optimize fs.exists[Sync] (#24613)
### 1.45.2 / 2024.07.12
- fix(cli/init): broken link in deno init sample template (#24545)
- fix(config): regression - should not discover npm workspace for nested
deno.json not in workspace (#24559)
- fix(ext/node): handle prefix mapping for IPv4-mapped IPv6 addresses (#24546)
- fix(ext/webgpu): GPUDevice.createRenderPipelineAsync should return a Promise
(#24349)
- fix(node): revert invalid package target change (#24539)
- fix(publish): show dirty files on dirty check failure (#24541)
- fix: include already seen deps in lockfile dep tracking (#24556)
- fix: unblock fsevents native module (#24542)
- perf(ext/crypto): make randomUUID() 5x faster (#24510)
### 1.45.1 / 2024.07.11
- fix(node): Ignore broken default install scripts (#24534)
- fix(npm): only warn about lifecycle scripts not being run when setting up
directory (#24530)
- fix(workspace): allow using --import-map flag with workspace (#24527)
### 1.45.0 / 2024.07.10
- BREAKING(unstable/ffi): remove callback reentrant flag (#24367)
- feat(cli): Add `--frozen` flag to error out if lockfile is out of date
(#24355)
- feat(cli): `deno init --lib` (#22499)
- feat(compile): support `--env` (#24166)
- feat(ext/crypto): make `deriveBits` `length` parameter optional and nullable
(#24426)
- feat(ext/web): add `Blob.prototype.bytes()` (#24148)
- feat(jsr): support publishing jsr packages in npm workspaces (#24507)
- feat(jupyter): support `confirm` and `prompt` in notebooks (#23592)
- feat(lsp): ts language service scopes (#24345)
- feat(node): Support executing npm package lifecycle scripts
(preinstall/install/postinstall) (#24487)
- feat(workspace): support object config (#24483)
- feat: Deprecate `--lock-write` flag (#24436)
- feat: Upgrade to TypeScript 5.5.2 (#24326)
- feat: add `__tests__` to test file detection defaults (#24443)
- feat: deprecate `deno vendor` (#22183)
- feat: npm workspace and better Deno workspace support (#24334)
- feat: support wildcards in npm workspaces (#24471)
- feat: upgrade deno_core (#24364)
- feat: upgrade deno_core to 0.293.0 (#24482)
- fix(check): CJS types importing dual ESM/CJS package should prefer CJS types
(#24492)
- fix(compile): atomically write compile output (#24378)
- fix(compile): prevent setting unstable feature twice (#24381)
- fix(ext/node): Add `fs.lutimes` / `fs.lutimesSync` (#23172)
- fix(ext/node): add `Module.parent` (#24333)
- fix(ext/node): add ServerResponse#appendHeader (#24216)
- fix(ext/node): add Symbol.toStringTag to KeyObject instances (#24377)
- fix(ext/node): discover .npmrc in user's homedir (#24021)
- fix(ext/node): don't panic on invalid utf-8 in pem (#24303)
- fix(ext/node): don't wait for end() call to send http client request (#24390)
- fix(ext/node): http chunked writes hangs (#24428)
- fix(ext/node): ignore stream error during enqueue (#24243)
- fix(ext/node): make next tick queue resilient to `Array.prototype` tampering
(#24361)
- fix(ext/node): rewrite `crypto.Hash` (#24302)
- fix(ext/node): rewrite digest handling (#24392)
- fix(ext/node): use cppgc for node:zlib (#24267)
- fix(ext/webgpu): fix `GPUUncapturedErrorEvent` parent type (#24369)
- fix(ext/websocket): drop connection when close frame not ack (#24301)
- fix(lsp): correct scope attribution for injected @types/node (#24404)
- fix(lsp): do sloppy resolution for node-to-node imports in byonm (#24481)
- fix(lsp): don't use global cache paths for scope allocation (#24353)
- fix(lsp): inherit workspace-root-only fields in members (#24440)
- fix(lsp): respect lockfile redirects entries for resolution (#24365)
- fix(lsp): use CliLockfile (#24387)
- fix(net): handle panic on Windows for Unix socket usage in Deno.serve()
(#24423)
- fix(net): set correct max size for Datagram (#21611)
- fix(node): Implement `fs.lchown` (and `process.getegid`) (#24418)
- fix(node): add missing readline/promises module (#24336)
- fix(node/assert): throws not checking error instance (#24466)
- fix(node/http): don't error if request destroyed before send (#24497)
- fix(node/http): don't send destroyed requests (#24498)
- fix(node/http): don't throw on .address() before .listen() (#24432)
- fix(node/http): support all `.writeHead()` signatures (#24469)
- fix(node/perf_hooks): stub eventLoopUtilization (#24501)
- fix(node/v8): stub serializer methods (#24502)
- fix(permissions): handle ipv6 addresses correctly (#24397)
- fix(publish): unfurling should always be done with the package json (#24435)
- fix(task): do not propagate env changes outside subshells (#24279)
- fix(windows): check USERPROFILE env var for finding home directory (#24384)
- fix(workspace): better cli file argument handling (#24447)
- fix: Add sys permission kinds for node compat (#24242)
- fix: add warning for invalid unstable feature use in deno.json/jsonc (#24120)
- fix: do not download compilerOptions -> types when not type checking (#24473)
- fix: do not return undefined for missing global properties (#24474)
- fix: make .setup-cache.bin in node_modules more reproducible (#24480)
- fix: memory leak when transpiling (#24490)
- fix: node-api get_value_string_utf8 should use utf8_length (#24193)
- fix: panic when piping "deno help" or "deno --version" (#22917)
- fix: test in presence of `.npmrc` (#24486)
- fix: upgrade deno_core to 0.291.0 (#24297)
- perf(ext/node): improve `Buffer.from(buffer)` (#24352)
- perf(ext/websocket): avoid global interceptor overhead (#24284)
- perf(ws): optimize fastwebsockets in release profile (#24277)
- perf: optimize Buffer.from("base64") for forgiving-base64 conforming input
(#24346)
### 1.44.4 / 2024.06.19
- Revert "chore: upgrade to reqwest 0.12.4 and rustls 0.22 (#24056)" (#24262)

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.152.0"
version = "0.158.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "1.44.4"
version = "1.46.0-rc.0"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -65,20 +65,23 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true }
deno_config = { workspace = true, features = ["workspace"] }
deno_config = { version = "=0.28.0", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.141.0", features = ["html", "syntect"] }
deno_emit = "=0.43.0"
deno_graph = { version = "=0.79.0", features = ["tokio_executor"] }
deno_lint = { version = "=0.60.0", features = ["docs"] }
deno_doc = { version = "0.146.0", features = ["html", "syntect"] }
deno_emit = "=0.44.0"
deno_graph = { version = "=0.81.2" }
deno_lint = { version = "=0.62.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm = "=0.21.4"
deno_package_json.workspace = true
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver = "=0.5.6"
deno_semver = "=0.5.10"
deno_task_shell = "=0.17.0"
deno_terminal.workspace = true
eszip = "=0.72.1"
eszip = "=0.73.0"
libsui = "0.1.0"
napi_sym.workspace = true
node_resolver.workspace = true
async-trait.workspace = true
base32.workspace = true
@ -87,9 +90,9 @@ bincode = "=1.3.3"
bytes.workspace = true
cache_control.workspace = true
chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.4.17", features = ["env", "string"] }
clap_complete = "=4.4.7"
clap_complete_fig = "=4.4.2"
clap = { version = "=4.5.13", features = ["env", "string", "wrap_help"] }
clap_complete = "=4.5.12"
clap_complete_fig = "=4.5.2"
color-print = "0.3.5"
console_static_text.workspace = true
dashmap = "5.5.3"
@ -98,8 +101,8 @@ dissimilar = "=1.0.4"
dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3"
dprint-plugin-jupyter = "=0.1.3"
dprint-plugin-markdown = "=0.17.1"
dprint-plugin-typescript = "=0.91.1"
dprint-plugin-markdown = "=0.17.2"
dprint-plugin-typescript = "=0.91.6"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
@ -107,15 +110,20 @@ faster-hex.workspace = true
flate2.workspace = true
fs3.workspace = true
glob = "0.3.1"
http.workspace = true
http-body.workspace = true
http-body-util.workspace = true
hyper-util.workspace = true
import_map = { version = "=0.20.0", features = ["ext"] }
indexmap.workspace = true
jsonc-parser.workspace = true
jupyter_runtime = { package = "runtimelib", version = "0.13.0" }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
lazy-regex.workspace = true
libc.workspace = true
libz-sys.workspace = true
log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true
malva = "=0.8.0"
memmem.workspace = true
monch.workspace = true
notify.workspace = true
@ -125,10 +133,10 @@ p256.workspace = true
pathdiff = "0.2.1"
percent-encoding.workspace = true
phf.workspace = true
pretty_yaml = "=0.4.0"
quick-junit = "^0.3.5"
rand = { workspace = true, features = ["small_rng"] }
regex.workspace = true
reqwest.workspace = true
ring.workspace = true
rustyline.workspace = true
rustyline-derive = "=0.7.0"
@ -137,6 +145,7 @@ serde_repr.workspace = true
sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] }
strsim = "0.11.1"
tar.workspace = true
tempfile.workspace = true
text-size = "=1.1.0"
@ -146,9 +155,11 @@ tokio.workspace = true
tokio-util.workspace = true
tower-lsp.workspace = true
twox-hash.workspace = true
typed-arena = "=2.0.1"
typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] }
which.workspace = true
zeromq.workspace = true
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
zstd.workspace = true
[target.'cfg(windows)'.dependencies]

View file

@ -7,8 +7,64 @@ use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
#[cfg(test)] // happens to only be used by the tests at the moment
pub struct DenoConfigFsAdapter<'a>(
pub &'a dyn deno_runtime::deno_fs::FileSystem,
);
#[cfg(test)]
impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
fn read_to_string_lossy(
&self,
path: &std::path::Path,
) -> Result<String, std::io::Error> {
self
.0
.read_text_file_lossy_sync(path, None)
.map_err(|err| err.into_io_error())
}
fn stat_sync(
&self,
path: &std::path::Path,
) -> Result<deno_config::fs::FsMetadata, std::io::Error> {
self
.0
.stat_sync(path)
.map(|stat| deno_config::fs::FsMetadata {
is_file: stat.is_file,
is_directory: stat.is_directory,
is_symlink: stat.is_symlink,
})
.map_err(|err| err.into_io_error())
}
fn read_dir(
&self,
path: &std::path::Path,
) -> Result<Vec<deno_config::fs::FsDirEntry>, std::io::Error> {
self
.0
.read_dir_sync(path)
.map_err(|err| err.into_io_error())
.map(|entries| {
entries
.into_iter()
.map(|e| deno_config::fs::FsDirEntry {
path: path.join(e.name),
metadata: deno_config::fs::FsMetadata {
is_file: e.is_file,
is_directory: e.is_directory,
is_symlink: e.is_symlink,
},
})
.collect()
})
}
}
pub fn deno_json_deps(
config: &deno_config::ConfigFile,
config: &deno_config::deno_json::ConfigFile,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(config.json.imports.as_ref())
.into_iter()

File diff suppressed because it is too large Load diff

View file

@ -1,14 +1,18 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeSet;
use std::path::PathBuf;
use deno_config::deno_json::ConfigFile;
use deno_config::workspace::Workspace;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::PackageJson;
use crate::args::ConfigFile;
use crate::cache;
use crate::util::fs::atomic_write_file_with_retries;
use crate::Flags;
@ -92,9 +96,37 @@ impl CliLockfile {
pub fn discover(
flags: &Flags,
maybe_config_file: Option<&ConfigFile>,
maybe_package_json: Option<&PackageJson>,
workspace: &Workspace,
) -> Result<Option<CliLockfile>, AnyError> {
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
let Some(pkg_json) = maybe_pkg_json else {
return Default::default();
};
pkg_json
.resolve_local_package_json_deps()
.values()
.filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => Some(req),
PackageJsonDepValue::Workspace(_) => None,
})
.map(|r| format!("npm:{}", r))
.collect()
}
fn deno_json_deps(
maybe_deno_json: Option<&ConfigFile>,
) -> BTreeSet<String> {
maybe_deno_json
.map(|c| {
crate::args::deno_json::deno_json_deps(c)
.into_iter()
.map(|req| req.to_string())
.collect()
})
.unwrap_or_default()
}
if flags.no_lock
|| matches!(
flags.subcommand,
@ -109,23 +141,9 @@ impl CliLockfile {
let filename = match flags.lock {
Some(ref lock) => PathBuf::from(lock),
None => match maybe_config_file {
Some(config_file) => {
if config_file.specifier.scheme() == "file" {
match config_file.resolve_lockfile_path()? {
Some(path) => path,
None => return Ok(None),
}
} else {
return Ok(None);
}
}
None => match maybe_package_json {
Some(package_json) => {
package_json.path.parent().unwrap().join("deno.lock")
}
None => return Ok(None),
},
None => match workspace.resolve_lockfile_path()? {
Some(path) => path,
None => return Ok(None),
},
};
@ -141,6 +159,54 @@ impl CliLockfile {
} else {
Self::read_from_path(filename, flags.frozen_lockfile)?
};
// initialize the lockfile with the workspace's configuration
let root_url = workspace.root_dir();
let root_folder = workspace.root_folder_configs();
let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
},
members: workspace
.config_folders()
.iter()
.filter(|(folder_url, _)| *folder_url != root_url)
.filter_map(|(folder_url, folder)| {
Some((
{
// should never be None here, but just ignore members that
// do fail for this
let mut relative_path = root_url.make_relative(folder_url)?;
if relative_path.ends_with('/') {
// make it slightly cleaner by removing the trailing slash
relative_path.pop();
}
relative_path
},
{
let config = WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(folder.pkg_json.as_deref()),
dependencies: deno_json_deps(folder.deno_json.as_deref()),
};
if config.package_json_deps.is_empty()
&& config.dependencies.is_empty()
{
// exclude empty workspace members
return None;
}
config
},
))
})
.collect(),
};
lockfile.set_workspace_config(deno_lockfile::SetWorkspaceConfigOptions {
no_npm: flags.no_npm,
no_config: flags.config_flag == super::ConfigFlag::Disabled,
config,
});
Ok(Some(lockfile))
}
pub fn read_from_path(

File diff suppressed because it is too large Load diff

View file

@ -3,22 +3,31 @@
use std::path::PathBuf;
use std::sync::Arc;
use deno_config::package_json::PackageJsonDepValue;
use deno_config::workspace::Workspace;
use deno_package_json::PackageJsonDepValue;
use deno_semver::package::PackageReq;
#[derive(Debug)]
pub struct InstallNpmRemotePkg {
pub alias: String,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf,
pub req: PackageReq,
}
#[derive(Debug)]
pub struct InstallNpmWorkspacePkg {
pub alias: String,
pub pkg_dir: PathBuf,
// todo(24419): use this when setting up the node_modules dir
#[allow(dead_code)]
pub base_dir: PathBuf,
pub target_dir: PathBuf,
}
// todo(#24419): this is not correct, but it's good enough for now.
// We need deno_npm to be able to understand workspace packages and
// then have a way to properly lay them out on the file system
#[derive(Debug, Default)]
pub struct PackageJsonInstallDepsProvider {
remote_pkg_reqs: Vec<PackageReq>,
remote_pkgs: Vec<InstallNpmRemotePkg>,
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
}
@ -29,27 +38,35 @@ impl PackageJsonInstallDepsProvider {
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
let mut workspace_pkgs = Vec::new();
let mut remote_pkg_reqs = Vec::new();
let mut remote_pkgs = Vec::new();
let workspace_npm_pkgs = workspace.npm_packages();
for pkg_json in workspace.package_jsons() {
let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_reqs = Vec::with_capacity(deps.len());
let mut pkg_pkgs = Vec::with_capacity(deps.len());
for (alias, dep) in deps {
let Ok(dep) = dep else {
continue;
};
match dep {
PackageJsonDepValue::Req(pkg_req) => {
if let Some(pkg) = workspace_npm_pkgs
.iter()
.find(|pkg| pkg.matches_req(&pkg_req))
{
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_req(&pkg_req)
// do not resolve to the current package
&& pkg.pkg_json.path != pkg_json.path
});
if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias,
pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_reqs.push(pkg_req)
pkg_pkgs.push(InstallNpmRemotePkg {
alias,
base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req,
});
}
}
PackageJsonDepValue::Workspace(version_req) => {
@ -58,27 +75,28 @@ impl PackageJsonInstallDepsProvider {
}) {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias,
pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
base_dir: pkg_json.dir_path().to_path_buf(),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
}
}
}
}
// sort within each package
pkg_reqs.sort();
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
remote_pkg_reqs.extend(pkg_reqs);
remote_pkgs.extend(pkg_pkgs);
}
remote_pkg_reqs.shrink_to_fit();
remote_pkgs.shrink_to_fit();
workspace_pkgs.shrink_to_fit();
Self {
remote_pkg_reqs,
remote_pkgs,
workspace_pkgs,
}
}
pub fn remote_pkg_reqs(&self) -> &Vec<PackageReq> {
&self.remote_pkg_reqs
pub fn remote_pkgs(&self) -> &Vec<InstallNpmRemotePkg> {
&self.remote_pkgs
}
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> {

View file

@ -1,4 +1,4 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
let total = 5;
let current = "";

View file

@ -1,4 +1,4 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
/** @jsx h */
import results from "./deno.json" assert { type: "json" };

View file

@ -1,3 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { renderToReadableStream } from "https://esm.run/react-dom/server";
import * as React from "https://esm.run/react";
const { serve } = Deno;

View file

@ -124,6 +124,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
"check",
"--reload",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
@ -135,6 +137,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
"--reload",
"--no-check",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
@ -144,6 +148,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
&[
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
@ -154,6 +160,8 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
"bundle",
"--no-check",
"--unstable",
"--config",
"tests/config/deno.json",
"tests/util/std/http/file_server_test.ts",
],
None,
@ -320,6 +328,8 @@ fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
deno_exe.to_str().unwrap(),
"bundle",
"--unstable",
"--config",
"tests/config/deno.json",
url,
&path,
],

View file

@ -1,3 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { bench, run } from "mitata";
import { createRequire } from "module";

View file

@ -1,5 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// From https://github.com/just-js/benchmarks/tree/main/01-stdio
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
#include <stdlib.h>
#include <stdio.h>
@ -26,4 +26,4 @@ int main(int argc, char *argv[]) {
exit(1);
}
fprintf(stdout, "size %lu reads %u blocksize %u\n", size, reads, blocksize);
}
}

View file

@ -470,7 +470,7 @@ mod tests {
};
static TEST_DB_BLACKHOLE: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: "syntax error", // intentially cause an error
table_initializer: "syntax error", // intentionally cause an error
on_version_change: "",
preheat_queries: &[],
on_failure: CacheFailure::Blackhole,

22
cli/cache/emit.rs vendored
View file

@ -6,6 +6,7 @@ use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::unsync::sync::AtomicFlag;
use serde::Deserialize;
use serde::Serialize;
@ -19,10 +20,10 @@ struct EmitMetadata {
}
/// The cache that stores previously emitted files.
#[derive(Clone)]
pub struct EmitCache {
disk_cache: DiskCache,
cli_version: &'static str,
emit_failed_flag: AtomicFlag,
}
impl EmitCache {
@ -30,6 +31,7 @@ impl EmitCache {
Self {
disk_cache,
cli_version: crate::version::deno(),
emit_failed_flag: Default::default(),
}
}
@ -87,12 +89,10 @@ impl EmitCache {
code: &[u8],
) {
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error saving emit data ({specifier}): {err}");
} else {
log::debug!("Error saving emit data({}): {}", specifier, err);
}
// might error in cases such as a readonly file system
log::debug!("Error saving emit data ({}): {}", specifier, err);
// assume the cache can't be written to and disable caching to it
self.emit_failed_flag.raise();
}
}
@ -102,6 +102,11 @@ impl EmitCache {
source_hash: u64,
code: &[u8],
) -> Result<(), AnyError> {
if self.emit_failed_flag.is_raised() {
log::debug!("Skipped emit cache save of {}", specifier);
return Ok(());
}
let meta_filename = self
.get_meta_filename(specifier)
.ok_or_else(|| anyhow!("Could not get meta filename."))?;
@ -161,6 +166,7 @@ mod test {
let cache = EmitCache {
disk_cache: disk_cache.clone(),
cli_version: "1.0.0",
emit_failed_flag: Default::default(),
};
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
@ -192,6 +198,7 @@ mod test {
let cache = EmitCache {
disk_cache: disk_cache.clone(),
cli_version: "2.0.0",
emit_failed_flag: Default::default(),
};
assert_eq!(cache.get_emit_code(&specifier1, 10), None);
cache.set_emit_code(&specifier1, 5, emit_code1.as_bytes());
@ -200,6 +207,7 @@ mod test {
let cache = EmitCache {
disk_cache,
cli_version: "2.0.0",
emit_failed_flag: Default::default(),
};
assert_eq!(
cache.get_emit_code(&specifier1, 5).map(to_string),

4
cli/cache/mod.rs vendored
View file

@ -106,7 +106,7 @@ pub use deno_cache_dir::HttpCache;
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
emit_cache: EmitCache,
emit_cache: Arc<EmitCache>,
file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,
@ -118,7 +118,7 @@ pub struct FetchCacher {
impl FetchCacher {
pub fn new(
emit_cache: EmitCache,
emit_cache: Arc<EmitCache>,
file_fetcher: Arc<FileFetcher>,
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,

View file

@ -18,7 +18,7 @@ use deno_graph::ModuleGraph;
use std::sync::Arc;
pub struct Emitter {
emit_cache: EmitCache,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options:
Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>,
@ -28,7 +28,7 @@ pub struct Emitter {
impl Emitter {
pub fn new(
emit_cache: EmitCache,
emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions,
emit_options: deno_ast::EmitOptions,

View file

@ -17,7 +17,6 @@ use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use import_map::ImportMapError;
use std::fmt::Write;
fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
"URIError"
@ -30,7 +29,6 @@ fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError;
use deno_graph::WorkspaceLoadError;
match err {
ModuleGraphError::ResolutionError(err)
@ -72,10 +70,6 @@ fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
| JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound",
},
ModuleLoadError::Workspace(err) => match err {
WorkspaceLoadError::MemberInvalidExportPath { .. } => "TypeError",
WorkspaceLoadError::MissingMemberExports { .. } => "NotFound",
},
},
},
}
@ -112,17 +106,5 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class)
})
.unwrap_or_else(|| {
if cfg!(debug) {
log::warn!(
"Error '{}' contains boxed error of unknown type:{}",
e,
e.chain().fold(String::new(), |mut output, e| {
let _ = write!(output, "\n {e:?}");
output
})
);
}
"Error"
})
.unwrap_or("Error")
}

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::CliLockfile;
use crate::args::get_root_cert_store;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::Flags;
@ -44,6 +45,7 @@ use crate::resolver::SloppyImportsResolver;
use crate::standalone::DenoCompileBinaryWriter;
use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector;
use crate::tools::lint::LintRuleProvider;
use crate::tools::run::hmr::HmrRunner;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path_maybe_not_exists;
@ -51,59 +53,61 @@ use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions;
use std::collections::BTreeSet;
use std::path::PathBuf;
use deno_config::package_json::PackageJsonDepValue;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
use deno_config::ConfigFile;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
use deno_lockfile::WorkspaceMemberConfig;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer;
use log::warn;
use node_resolver::analyze::NodeCodeTranslator;
use once_cell::sync::OnceCell;
use std::future::Future;
use std::sync::Arc;
pub struct CliFactoryBuilder {
watcher_communicator: Option<Arc<WatcherCommunicator>>,
struct CliRootCertStoreProvider {
cell: OnceCell<RootCertStore>,
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
}
impl CliFactoryBuilder {
pub fn new() -> Self {
impl CliRootCertStoreProvider {
pub fn new(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Self {
Self {
watcher_communicator: None,
cell: Default::default(),
maybe_root_path,
maybe_ca_stores,
maybe_ca_data,
}
}
}
pub fn build_from_flags(self, flags: Flags) -> Result<CliFactory, AnyError> {
Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?)))
}
pub fn build_from_flags_for_watcher(
mut self,
flags: Flags,
watcher_communicator: Arc<WatcherCommunicator>,
) -> Result<CliFactory, AnyError> {
self.watcher_communicator = Some(watcher_communicator);
self.build_from_flags(flags)
}
pub fn build_from_cli_options(self, options: Arc<CliOptions>) -> CliFactory {
CliFactory {
watcher_communicator: self.watcher_communicator,
options,
services: Default::default(),
}
impl RootCertStoreProvider for CliRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
self
.cell
.get_or_try_init(|| {
get_root_cert_store(
self.maybe_root_path.clone(),
self.maybe_ca_stores.clone(),
self.maybe_ca_data.clone(),
)
})
.map_err(|e| e.into())
}
}
@ -116,6 +120,10 @@ impl<T> Default for Deferred<T> {
}
impl<T> Deferred<T> {
pub fn from_value(value: T) -> Self {
Self(once_cell::unsync::OnceCell::from(value))
}
#[inline(always)]
pub fn get_or_try_init(
&self,
@ -149,17 +157,17 @@ impl<T> Deferred<T> {
#[derive(Default)]
struct CliFactoryServices {
cli_options: Deferred<Arc<CliOptions>>,
deno_dir_provider: Deferred<Arc<DenoDirProvider>>,
caches: Deferred<Arc<Caches>>,
file_fetcher: Deferred<Arc<FileFetcher>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>,
http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<EmitCache>,
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
lockfile: Deferred<Option<Arc<CliLockfile>>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
@ -173,6 +181,7 @@ struct CliFactoryServices {
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
@ -184,40 +193,66 @@ struct CliFactoryServices {
pub struct CliFactory {
watcher_communicator: Option<Arc<WatcherCommunicator>>,
options: Arc<CliOptions>,
flags: Arc<Flags>,
services: CliFactoryServices,
}
impl CliFactory {
pub fn from_flags(flags: Flags) -> Result<Self, AnyError> {
CliFactoryBuilder::new().build_from_flags(flags)
pub fn from_flags(flags: Arc<Flags>) -> Self {
Self {
flags,
watcher_communicator: None,
services: Default::default(),
}
}
pub fn from_cli_options(options: Arc<CliOptions>) -> Self {
CliFactoryBuilder::new().build_from_cli_options(options)
pub fn from_cli_options(cli_options: Arc<CliOptions>) -> Self {
let (cli_options, flags) = cli_options.into_self_and_flags();
CliFactory {
watcher_communicator: None,
flags,
services: CliFactoryServices {
cli_options: Deferred::from_value(cli_options),
..Default::default()
},
}
}
pub fn cli_options(&self) -> &Arc<CliOptions> {
&self.options
pub fn from_flags_for_watcher(
flags: Arc<Flags>,
watcher_communicator: Arc<WatcherCommunicator>,
) -> Self {
CliFactory {
watcher_communicator: Some(watcher_communicator),
flags,
services: Default::default(),
}
}
pub fn deno_dir_provider(&self) -> &Arc<DenoDirProvider> {
self.services.deno_dir_provider.get_or_init(|| {
Arc::new(DenoDirProvider::new(
self.options.maybe_custom_root().clone(),
))
pub fn cli_options(&self) -> Result<&Arc<CliOptions>, AnyError> {
self.services.cli_options.get_or_try_init(|| {
CliOptions::from_flags(self.flags.clone()).map(Arc::new)
})
}
pub fn deno_dir_provider(&self) -> Result<&Arc<DenoDirProvider>, AnyError> {
self.services.deno_dir_provider.get_or_try_init(|| {
Ok(Arc::new(DenoDirProvider::new(
self.cli_options()?.maybe_custom_root().clone(),
)))
})
}
pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> {
Ok(self.deno_dir_provider().get_or_create()?)
Ok(self.deno_dir_provider()?.get_or_create()?)
}
pub fn caches(&self) -> Result<&Arc<Caches>, AnyError> {
self.services.caches.get_or_try_init(|| {
let caches = Arc::new(Caches::new(self.deno_dir_provider().clone()));
let cli_options = self.cli_options()?;
let caches = Arc::new(Caches::new(self.deno_dir_provider()?.clone()));
// Warm up the caches we know we'll likely need based on the CLI mode
match self.options.sub_command() {
match cli_options.sub_command() {
DenoSubcommand::Run(_)
| DenoSubcommand::Serve(_)
| DenoSubcommand::Bench(_)
@ -225,11 +260,11 @@ impl CliFactory {
| DenoSubcommand::Check(_) => {
_ = caches.dep_analysis_db();
_ = caches.node_analysis_db();
if self.options.type_check_mode().is_true() {
if cli_options.type_check_mode().is_true() {
_ = caches.fast_check_db();
_ = caches.type_checking_cache_db();
}
if self.options.code_cache_enabled() {
if cli_options.code_cache_enabled() {
_ = caches.code_cache_db();
}
}
@ -244,10 +279,13 @@ impl CliFactory {
}
pub fn root_cert_store_provider(&self) -> &Arc<dyn RootCertStoreProvider> {
self
.services
.root_cert_store_provider
.get_or_init(|| self.options.resolve_root_cert_store_provider())
self.services.root_cert_store_provider.get_or_init(|| {
Arc::new(CliRootCertStoreProvider::new(
None,
self.flags.ca_stores.clone(),
self.flags.ca_data.clone(),
))
})
}
pub fn text_only_progress_bar(&self) -> &ProgressBar {
@ -269,7 +307,7 @@ impl CliFactory {
pub fn http_cache(&self) -> Result<&Arc<dyn HttpCache>, AnyError> {
self.services.http_cache.get_or_try_init(|| {
let global_cache = self.global_http_cache()?.clone();
match self.options.vendor_dir_path() {
match self.cli_options()?.vendor_dir_path() {
Some(local_path) => {
let local_cache =
LocalHttpCache::new(local_path.clone(), global_cache);
@ -284,17 +322,18 @@ impl CliFactory {
self.services.http_client_provider.get_or_init(|| {
Arc::new(HttpClientProvider::new(
Some(self.root_cert_store_provider().clone()),
self.options.unsafely_ignore_certificate_errors().clone(),
self.flags.unsafely_ignore_certificate_errors.clone(),
))
})
}
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
self.services.file_fetcher.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(FileFetcher::new(
self.http_cache()?.clone(),
self.options.cache_setting(),
!self.options.no_remote(),
cli_options.cache_setting(),
!cli_options.no_remote(),
self.http_client_provider().clone(),
self.blob_store().clone(),
Some(self.text_only_progress_bar().clone()),
@ -306,97 +345,6 @@ impl CliFactory {
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
}
pub fn maybe_lockfile(&self) -> &Option<Arc<CliLockfile>> {
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
let Some(pkg_json) = maybe_pkg_json else {
return Default::default();
};
pkg_json
.resolve_local_package_json_deps()
.values()
.filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => Some(req),
PackageJsonDepValue::Workspace(_) => None,
})
.map(|r| format!("npm:{}", r))
.collect()
}
fn deno_json_deps(
maybe_deno_json: Option<&ConfigFile>,
) -> BTreeSet<String> {
maybe_deno_json
.map(|c| {
crate::args::deno_json::deno_json_deps(c)
.into_iter()
.map(|req| req.to_string())
.collect()
})
.unwrap_or_default()
}
self.services.lockfile.get_or_init(|| {
let maybe_lockfile = self.options.maybe_lockfile();
// initialize the lockfile with the workspace's configuration
if let Some(lockfile) = &maybe_lockfile {
let (root_url, root_folder) = self.options.workspace.root_folder();
let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
},
members: self
.options
.workspace
.config_folders()
.iter()
.filter(|(folder_url, _)| *folder_url != root_url)
.filter_map(|(folder_url, folder)| {
Some((
{
// should never be None here, but just ignore members that
// do fail for this
let mut relative_path = root_url.make_relative(folder_url)?;
if relative_path.ends_with('/') {
// make it slightly cleaner by removing the trailing slash
relative_path.pop();
}
relative_path
},
{
let config = WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(
folder.pkg_json.as_deref(),
),
dependencies: deno_json_deps(folder.deno_json.as_deref()),
};
if config.package_json_deps.is_empty()
&& config.dependencies.is_empty()
{
// exclude empty workspace members
return None;
}
config
},
))
})
.collect(),
};
lockfile.set_workspace_config(
deno_lockfile::SetWorkspaceConfigOptions {
no_npm: self.options.no_npm(),
no_config: self.options.no_config(),
config,
},
);
}
maybe_lockfile
})
}
pub async fn npm_resolver(
&self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
@ -405,25 +353,26 @@ impl CliFactory {
.npm_resolver
.get_or_try_init_async(async {
let fs = self.fs();
let cli_options = self.cli_options()?;
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory.
create_cli_npm_resolver(if self.options.use_byonm() && !matches!(self.options.sub_command(), DenoSubcommand::Install(_)) {
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: fs.clone(),
root_node_modules_dir: match self.options.node_modules_dir_path() {
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() {
Some(node_modules_path) => node_modules_path.to_path_buf(),
// path needs to be canonicalized for node resolution
// (node_modules_dir_path above is already canonicalized)
None => canonicalize_path_maybe_not_exists(self.options.initial_cwd())?
None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())?
.join("node_modules"),
},
}),
})
} else {
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
snapshot: match self.options.resolve_npm_resolution_snapshot()? {
snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
Some(snapshot) => {
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot))
}
None => match self.maybe_lockfile().as_ref() {
None => match cli_options.maybe_lockfile() {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
@ -432,24 +381,42 @@ impl CliFactory {
None => CliNpmResolverManagedSnapshotOption::Specified(None),
},
},
maybe_lockfile: self.maybe_lockfile().as_ref().cloned(),
maybe_lockfile: cli_options.maybe_lockfile().cloned(),
fs: fs.clone(),
http_client_provider: self.http_client_provider().clone(),
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(),
cache_setting: self.options.cache_setting(),
cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: self.options.node_modules_dir_path().cloned(),
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(),
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace(
&self.options.workspace,
cli_options.workspace(),
)),
npm_system_info: self.options.npm_system_info(),
npmrc: self.options.npmrc().clone()
npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
})
}).await
}.boxed_local())
.await
}
pub fn sloppy_imports_resolver(
&self,
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> {
self
.services
.sloppy_imports_resolver
.get_or_try_init(|| {
Ok(
self
.cli_options()?
.unstable_sloppy_imports()
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))),
)
})
.map(|maybe| maybe.as_ref())
}
pub async fn workspace_resolver(
&self,
) -> Result<&Arc<WorkspaceResolver>, AnyError> {
@ -457,11 +424,11 @@ impl CliFactory {
.services
.workspace_resolver
.get_or_try_init_async(async {
let resolver = self
.options
let cli_options = self.cli_options()?;
let resolver = cli_options
.create_workspace_resolver(
self.file_fetcher()?,
if self.options.use_byonm() {
if cli_options.use_byonm() {
PackageJsonDepResolution::Disabled
} else {
// todo(dsherret): this should be false for nodeModulesDir: true
@ -491,27 +458,22 @@ impl CliFactory {
.resolver
.get_or_try_init_async(
async {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
sloppy_imports_resolver: if self.options.unstable_sloppy_imports() {
Some(SloppyImportsResolver::new(self.fs().clone()))
} else {
None
},
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
node_resolver: Some(self.cli_node_resolver().await?.clone()),
npm_resolver: if self.options.no_npm() {
npm_resolver: if cli_options.no_npm() {
None
} else {
Some(self.npm_resolver().await?.clone())
},
workspace_resolver: self.workspace_resolver().await?.clone(),
bare_node_builtins_enabled: self
.options
bare_node_builtins_enabled: cli_options
.unstable_bare_node_builtins(),
maybe_jsx_import_source_config: self
.options
.workspace
maybe_jsx_import_source_config: cli_options
.workspace()
.to_maybe_jsx_import_source_config()?,
maybe_vendor_dir: self.options.vendor_dir_path(),
maybe_vendor_dir: cli_options.vendor_dir_path(),
})))
}
.boxed_local(),
@ -530,9 +492,9 @@ impl CliFactory {
.get_or_init(|| maybe_file_watcher_reporter)
}
pub fn emit_cache(&self) -> Result<&EmitCache, AnyError> {
pub fn emit_cache(&self) -> Result<&Arc<EmitCache>, AnyError> {
self.services.emit_cache.get_or_try_init(|| {
Ok(EmitCache::new(self.deno_dir()?.gen_cache.clone()))
Ok(Arc::new(EmitCache::new(self.deno_dir()?.gen_cache.clone())))
})
}
@ -559,9 +521,9 @@ impl CliFactory {
pub fn emitter(&self) -> Result<&Arc<Emitter>, AnyError> {
self.services.emitter.get_or_try_init(|| {
let ts_config_result = self
.options
.resolve_ts_config_for_emit(TsConfigType::Emit)?;
let cli_options = self.cli_options()?;
let ts_config_result =
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
warn!("{}", ignored_options);
}
@ -578,6 +540,13 @@ impl CliFactory {
})
}
pub async fn lint_rule_provider(&self) -> Result<LintRuleProvider, AnyError> {
Ok(LintRuleProvider::new(
self.sloppy_imports_resolver()?.cloned(),
Some(self.workspace_resolver().await?.clone()),
))
}
pub async fn node_resolver(&self) -> Result<&Arc<NodeResolver>, AnyError> {
self
.services
@ -585,7 +554,7 @@ impl CliFactory {
.get_or_try_init_async(
async {
Ok(Arc::new(NodeResolver::new(
self.fs().clone(),
DenoFsNodeResolverEnv::new(self.fs().clone()),
self.npm_resolver().await?.clone().into_npm_resolver(),
)))
}
@ -609,7 +578,7 @@ impl CliFactory {
Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer,
self.fs().clone(),
DenoFsNodeResolverEnv::new(self.fs().clone()),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone().into_npm_resolver(),
)))
@ -622,9 +591,10 @@ impl CliFactory {
.services
.type_checker
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(TypeChecker::new(
self.caches()?.clone(),
self.options.clone(),
cli_options.clone(),
self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
@ -640,15 +610,16 @@ impl CliFactory {
.services
.module_graph_builder
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphBuilder::new(
self.options.clone(),
cli_options.clone(),
self.caches()?.clone(),
self.fs().clone(),
self.resolver().await?.clone(),
self.npm_resolver().await?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
self.maybe_lockfile().clone(),
cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(),
self.emit_cache()?.clone(),
self.file_fetcher()?.clone(),
@ -665,8 +636,9 @@ impl CliFactory {
.services
.module_graph_creator
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphCreator::new(
self.options.clone(),
cli_options.clone(),
self.npm_resolver().await?.clone(),
self.module_graph_builder().await?.clone(),
self.type_checker().await?.clone(),
@ -683,7 +655,7 @@ impl CliFactory {
.main_graph_container
.get_or_try_init_async(async {
Ok(Arc::new(MainModuleGraphContainer::new(
self.cli_options().clone(),
self.cli_options()?.clone(),
self.module_load_preparer().await?.clone(),
)))
})
@ -694,7 +666,8 @@ impl CliFactory {
&self,
) -> Result<&Option<Arc<InspectorServer>>, AnyError> {
self.services.maybe_inspector_server.get_or_try_init(|| {
match self.options.resolve_inspector_server() {
let cli_options = self.cli_options()?;
match cli_options.resolve_inspector_server() {
Ok(server) => Ok(server.map(Arc::new)),
Err(err) => Err(err),
}
@ -708,9 +681,10 @@ impl CliFactory {
.services
.module_load_preparer
.get_or_try_init_async(async {
let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleLoadPreparer::new(
self.options.clone(),
self.maybe_lockfile().clone(),
cli_options.clone(),
cli_options.maybe_lockfile().cloned(),
self.module_graph_builder().await?.clone(),
self.text_only_progress_bar().clone(),
self.type_checker().await?.clone(),
@ -731,7 +705,7 @@ impl CliFactory {
.cli_node_resolver
.get_or_try_init_async(async {
Ok(Arc::new(CliNodeResolver::new(
Some(self.cjs_resolutions().clone()),
self.cjs_resolutions().clone(),
self.fs().clone(),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
@ -740,61 +714,64 @@ impl CliFactory {
.await
}
pub fn feature_checker(&self) -> &Arc<FeatureChecker> {
self.services.feature_checker.get_or_init(|| {
pub fn feature_checker(&self) -> Result<&Arc<FeatureChecker>, AnyError> {
self.services.feature_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?;
let mut checker = FeatureChecker::default();
checker.set_exit_cb(Box::new(crate::unstable_exit_cb));
checker.set_warn_cb(Box::new(crate::unstable_warn_cb));
if self.options.legacy_unstable_flag() {
if cli_options.legacy_unstable_flag() {
checker.enable_legacy_unstable();
checker.warn_on_legacy_unstable();
}
let unstable_features = self.options.unstable_features();
let unstable_features = cli_options.unstable_features();
for (flag_name, _, _) in crate::UNSTABLE_GRANULAR_FLAGS {
if unstable_features.contains(&flag_name.to_string()) {
checker.enable_feature(flag_name);
}
}
Arc::new(checker)
Ok(Arc::new(checker))
})
}
pub async fn create_compile_binary_writer(
&self,
) -> Result<DenoCompileBinaryWriter, AnyError> {
let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new(
self.deno_dir()?,
self.file_fetcher()?,
self.http_client_provider(),
self.npm_resolver().await?.as_ref(),
self.workspace_resolver().await?.as_ref(),
self.options.npm_system_info(),
cli_options.npm_system_info(),
))
}
pub async fn create_cli_main_worker_factory(
&self,
) -> Result<CliMainWorkerFactory, AnyError> {
let cli_options = self.cli_options()?;
let node_resolver = self.node_resolver().await?;
let npm_resolver = self.npm_resolver().await?;
let fs = self.fs();
let cli_node_resolver = self.cli_node_resolver().await?;
let maybe_file_watcher_communicator = if self.options.has_hmr() {
let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap())
} else {
None
};
Ok(CliMainWorkerFactory::new(
StorageKeyResolver::from_options(&self.options),
self.options.sub_command().clone(),
StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(),
npm_resolver.clone(),
node_resolver.clone(),
self.blob_store().clone(),
Box::new(CliModuleLoaderFactory::new(
&self.options,
if self.options.code_cache_enabled() {
cli_options,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
@ -816,18 +793,18 @@ impl CliFactory {
self.fs().clone(),
maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(),
self.maybe_lockfile().clone(),
self.feature_checker().clone(),
cli_options.maybe_lockfile().cloned(),
self.feature_checker()?.clone(),
self.create_cli_main_worker_options()?,
self.options.node_ipc_fd(),
self.options.serve_port(),
self.options.serve_host(),
self.options.enable_future_features(),
cli_options.node_ipc_fd(),
cli_options.serve_port(),
cli_options.serve_host(),
cli_options.enable_future_features(),
// TODO(bartlomieju): temporarily disabled
// self.options.disable_deprecated_api_warning,
// cli_options.disable_deprecated_api_warning,
true,
self.options.verbose_deprecated_api_warning,
if self.options.code_cache_enabled() {
cli_options.verbose_deprecated_api_warning,
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
None
@ -838,7 +815,8 @@ impl CliFactory {
fn create_cli_main_worker_options(
&self,
) -> Result<CliMainWorkerOptions, AnyError> {
let create_hmr_runner = if self.options.has_hmr() {
let cli_options = self.cli_options()?;
let create_hmr_runner = if cli_options.has_hmr() {
let watcher_communicator = self.watcher_communicator.clone().unwrap();
let emitter = self.emitter()?.clone();
let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| {
@ -853,7 +831,7 @@ impl CliFactory {
None
};
let create_coverage_collector =
if let Some(coverage_dir) = self.options.coverage_dir() {
if let Some(coverage_dir) = cli_options.coverage_dir() {
let coverage_dir = PathBuf::from(coverage_dir);
let fn_: crate::worker::CreateCoverageCollectorCb =
Box::new(move |session| {
@ -865,36 +843,34 @@ impl CliFactory {
};
Ok(CliMainWorkerOptions {
argv: self.options.argv().clone(),
argv: cli_options.argv().clone(),
// This optimization is only available for "run" subcommand
// because we need to register new ops for testing and jupyter
// integration.
skip_op_registration: self.options.sub_command().is_run(),
log_level: self.options.log_level().unwrap_or(log::Level::Info).into(),
enable_op_summary_metrics: self.options.enable_op_summary_metrics(),
enable_testing_features: self.options.enable_testing_features(),
has_node_modules_dir: self.options.has_node_modules_dir(),
hmr: self.options.has_hmr(),
inspect_brk: self.options.inspect_brk().is_some(),
inspect_wait: self.options.inspect_wait().is_some(),
strace_ops: self.options.strace_ops().clone(),
is_inspecting: self.options.is_inspecting(),
is_npm_main: self.options.is_npm_main(),
location: self.options.location_flag().clone(),
skip_op_registration: cli_options.sub_command().is_run(),
log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(),
enable_op_summary_metrics: cli_options.enable_op_summary_metrics(),
enable_testing_features: cli_options.enable_testing_features(),
has_node_modules_dir: cli_options.has_node_modules_dir(),
hmr: cli_options.has_hmr(),
inspect_brk: cli_options.inspect_brk().is_some(),
inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(),
is_npm_main: cli_options.is_npm_main(),
location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno
argv0: self
.options
argv0: cli_options
.take_binary_npm_command_name()
.or(std::env::args().next()),
node_debug: std::env::var("NODE_DEBUG").ok(),
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
seed: self.options.seed(),
unsafely_ignore_certificate_errors: self
.options
seed: cli_options.seed(),
unsafely_ignore_certificate_errors: cli_options
.unsafely_ignore_certificate_errors()
.clone(),
unstable: self.options.legacy_unstable_flag(),
unstable: cli_options.legacy_unstable_flag(),
create_hmr_runner,
create_coverage_collector,
})

View file

@ -98,7 +98,7 @@ impl MainModuleGraphContainer {
&self,
files: &[String],
) -> Result<Vec<ModuleSpecifier>, AnyError> {
let excludes = self.cli_options.workspace.resolve_config_excludes()?;
let excludes = self.cli_options.workspace().resolve_config_excludes()?;
Ok(
files
.iter()

View file

@ -364,7 +364,7 @@ pub struct ModuleGraphBuilder {
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache,
emit_cache: Arc<cache::EmitCache>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
}
@ -381,7 +381,7 @@ impl ModuleGraphBuilder {
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache,
emit_cache: Arc<cache::EmitCache>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
) -> Self {
@ -481,7 +481,11 @@ impl ModuleGraphBuilder {
}
}
let maybe_imports = self.options.to_maybe_imports()?;
let maybe_imports = if options.graph_kind.include_types() {
self.options.to_compiler_option_types()?
} else {
Vec::new()
};
let analyzer = self
.module_info_cache
.as_module_analyzer(&self.parsed_source_cache);
@ -496,8 +500,6 @@ impl ModuleGraphBuilder {
.maybe_file_watcher_reporter
.as_ref()
.map(|r| r.as_reporter());
let workspace_members =
self.options.resolve_deno_graph_workspace_members()?;
let mut locker = self
.lockfile
.as_ref()
@ -511,7 +513,6 @@ impl ModuleGraphBuilder {
imports: maybe_imports,
is_dynamic: options.is_dynamic,
passthrough_jsr_specifiers: false,
workspace_members: &workspace_members,
executor: Default::default(),
file_system: &DenoGraphFsAdapter(self.fs.as_ref()),
jsr_url_provider: &CliJsrUrlProvider,
@ -746,8 +747,8 @@ fn enhanced_sloppy_imports_error_message(
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => {
let additional_message = SloppyImportsResolver::new(fs.clone())
.resolve(specifier, ResolutionMode::Execution)
.as_suggestion_message()?;
.resolve(specifier, ResolutionMode::Execution)?
.as_suggestion_message();
Some(format!(
"{} {} or run with --unstable-sloppy-imports",
error,

View file

@ -12,18 +12,23 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_fetch::reqwest::header::HeaderName;
use deno_runtime::deno_fetch::reqwest::header::HeaderValue;
use deno_runtime::deno_fetch::reqwest::header::ACCEPT;
use deno_runtime::deno_fetch::reqwest::header::AUTHORIZATION;
use deno_runtime::deno_fetch::reqwest::header::IF_NONE_MATCH;
use deno_runtime::deno_fetch::reqwest::header::LOCATION;
use deno_runtime::deno_fetch::reqwest::StatusCode;
use deno_runtime::deno_fetch::CreateHttpClientOptions;
use deno_runtime::deno_tls::RootCertStoreProvider;
use http::header::HeaderName;
use http::header::HeaderValue;
use http::header::ACCEPT;
use http::header::AUTHORIZATION;
use http::header::CONTENT_LENGTH;
use http::header::IF_NONE_MATCH;
use http::header::LOCATION;
use http::StatusCode;
use http_body_util::BodyExt;
use std::collections::HashMap;
use std::sync::Arc;
use std::thread::ThreadId;
@ -208,8 +213,7 @@ pub struct HttpClientProvider {
// it's not safe to share a reqwest::Client across tokio runtimes,
// so we store these Clients keyed by thread id
// https://github.com/seanmonstar/reqwest/issues/1148#issuecomment-910868788
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
clients_by_thread_id: Mutex<HashMap<ThreadId, reqwest::Client>>,
clients_by_thread_id: Mutex<HashMap<ThreadId, deno_fetch::Client>>,
}
impl std::fmt::Debug for HttpClientProvider {
@ -270,9 +274,15 @@ pub struct BadResponseError {
#[derive(Debug, Error)]
pub enum DownloadError {
#[error(transparent)]
Reqwest(#[from] reqwest::Error),
Fetch(AnyError),
#[error(transparent)]
ToStr(#[from] reqwest::header::ToStrError),
UrlParse(#[from] deno_core::url::ParseError),
#[error(transparent)]
HttpParse(#[from] http::Error),
#[error(transparent)]
Json(#[from] serde_json::Error),
#[error(transparent)]
ToStr(#[from] http::header::ToStrError),
#[error("Redirection from '{}' did not provide location header", .request_url)]
NoRedirectHeader { request_url: Url },
#[error("Too many redirects.")]
@ -283,8 +293,7 @@ pub enum DownloadError {
#[derive(Debug)]
pub struct HttpClient {
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
client: reqwest::Client,
client: deno_fetch::Client,
// don't allow sending this across threads because then
// it might be shared accidentally across tokio runtimes
// which will cause issues
@ -295,22 +304,56 @@ pub struct HttpClient {
impl HttpClient {
// DO NOT make this public. You should always be creating one of these from
// the HttpClientProvider
#[allow(clippy::disallowed_types)] // reqwest::Client allowed here
fn new(client: reqwest::Client) -> Self {
fn new(client: deno_fetch::Client) -> Self {
Self {
client,
_unsend_marker: deno_core::unsync::UnsendMarker::default(),
}
}
// todo(dsherret): don't expose `reqwest::RequestBuilder` because it
// is `Sync` and could accidentally be shared with multiple tokio runtimes
pub fn get(&self, url: impl reqwest::IntoUrl) -> reqwest::RequestBuilder {
self.client.get(url)
pub fn get(&self, url: Url) -> Result<RequestBuilder, http::Error> {
let body = http_body_util::Empty::new()
.map_err(|never| match never {})
.boxed();
let mut req = http::Request::new(body);
*req.uri_mut() = url.as_str().parse()?;
Ok(RequestBuilder {
client: self.client.clone(),
req,
})
}
pub fn post(&self, url: impl reqwest::IntoUrl) -> reqwest::RequestBuilder {
self.client.post(url)
pub fn post(
&self,
url: Url,
body: deno_fetch::ReqBody,
) -> Result<RequestBuilder, http::Error> {
let mut req = http::Request::new(body);
*req.method_mut() = http::Method::POST;
*req.uri_mut() = url.as_str().parse()?;
Ok(RequestBuilder {
client: self.client.clone(),
req,
})
}
pub fn post_json<S>(
&self,
url: Url,
ser: &S,
) -> Result<RequestBuilder, DownloadError>
where
S: serde::Serialize,
{
let json = deno_core::serde_json::to_vec(ser)?;
let body = http_body_util::Full::new(json.into())
.map_err(|never| match never {})
.boxed();
let builder = self.post(url, body)?;
Ok(builder.header(
http::header::CONTENT_TYPE,
"application/json".parse().map_err(http::Error::from)?,
))
}
/// Asynchronously fetches the given HTTP URL one pass only.
@ -322,24 +365,32 @@ impl HttpClient {
&self,
args: FetchOnceArgs<'a>,
) -> Result<FetchOnceResult, AnyError> {
let mut request = self.client.get(args.url.clone());
let body = http_body_util::Empty::new()
.map_err(|never| match never {})
.boxed();
let mut request = http::Request::new(body);
*request.uri_mut() = args.url.as_str().parse()?;
if let Some(etag) = args.maybe_etag {
let if_none_match_val = HeaderValue::from_str(&etag)?;
request = request.header(IF_NONE_MATCH, if_none_match_val);
request
.headers_mut()
.insert(IF_NONE_MATCH, if_none_match_val);
}
if let Some(auth_token) = args.maybe_auth_token {
let authorization_val = HeaderValue::from_str(&auth_token.to_string())?;
request = request.header(AUTHORIZATION, authorization_val);
request
.headers_mut()
.insert(AUTHORIZATION, authorization_val);
}
if let Some(accept) = args.maybe_accept {
let accepts_val = HeaderValue::from_str(&accept)?;
request = request.header(ACCEPT, accepts_val);
request.headers_mut().insert(ACCEPT, accepts_val);
}
let response = match request.send().await {
let response = match self.client.clone().send(request).await {
Ok(resp) => resp,
Err(err) => {
if err.is_connect() || err.is_timeout() {
if err.is_connect_error() {
return Ok(FetchOnceResult::RequestError(err.to_string()));
}
return Err(err.into());
@ -406,18 +457,12 @@ impl HttpClient {
Ok(FetchOnceResult::Code(body, result_headers))
}
pub async fn download_text(
&self,
url: impl reqwest::IntoUrl,
) -> Result<String, AnyError> {
pub async fn download_text(&self, url: Url) -> Result<String, AnyError> {
let bytes = self.download(url).await?;
Ok(String::from_utf8(bytes)?)
}
pub async fn download(
&self,
url: impl reqwest::IntoUrl,
) -> Result<Vec<u8>, AnyError> {
pub async fn download(&self, url: Url) -> Result<Vec<u8>, AnyError> {
let maybe_bytes = self.download_inner(url, None, None).await?;
match maybe_bytes {
Some(bytes) => Ok(bytes),
@ -427,7 +472,7 @@ impl HttpClient {
pub async fn download_with_progress(
&self,
url: impl reqwest::IntoUrl,
url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>,
progress_guard: &UpdateGuard,
) -> Result<Option<Vec<u8>>, DownloadError> {
@ -438,26 +483,26 @@ impl HttpClient {
pub async fn get_redirected_url(
&self,
url: impl reqwest::IntoUrl,
url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>,
) -> Result<Url, AnyError> {
let response = self.get_redirected_response(url, maybe_header).await?;
Ok(response.url().clone())
let (_, url) = self.get_redirected_response(url, maybe_header).await?;
Ok(url)
}
async fn download_inner(
&self,
url: impl reqwest::IntoUrl,
url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>,
progress_guard: Option<&UpdateGuard>,
) -> Result<Option<Vec<u8>>, DownloadError> {
let response = self.get_redirected_response(url, maybe_header).await?;
let (response, _) = self.get_redirected_response(url, maybe_header).await?;
if response.status() == 404 {
return Ok(None);
} else if !response.status().is_success() {
let status = response.status();
let maybe_response_text = response.text().await.ok();
let maybe_response_text = body_to_string(response).await.ok();
return Err(DownloadError::BadResponse(BadResponseError {
status_code: status,
response_text: maybe_response_text
@ -469,60 +514,78 @@ impl HttpClient {
get_response_body_with_progress(response, progress_guard)
.await
.map(Some)
.map_err(Into::into)
.map_err(DownloadError::Fetch)
}
async fn get_redirected_response(
&self,
url: impl reqwest::IntoUrl,
mut url: Url,
mut maybe_header: Option<(HeaderName, HeaderValue)>,
) -> Result<reqwest::Response, DownloadError> {
let mut url = url.into_url()?;
let mut builder = self.get(url.clone());
) -> Result<(http::Response<deno_fetch::ResBody>, Url), DownloadError> {
let mut req = self.get(url.clone())?.build();
if let Some((header_name, header_value)) = maybe_header.as_ref() {
builder = builder.header(header_name, header_value);
req.headers_mut().append(header_name, header_value.clone());
}
let mut response = builder.send().await?;
let mut response = self
.client
.clone()
.send(req)
.await
.map_err(|e| DownloadError::Fetch(e.into()))?;
let status = response.status();
if status.is_redirection() {
for _ in 0..5 {
let new_url = resolve_redirect_from_response(&url, &response)?;
let mut builder = self.get(new_url.clone());
let mut req = self.get(new_url.clone())?.build();
if new_url.origin() == url.origin() {
if let Some((header_name, header_value)) = maybe_header.as_ref() {
builder = builder.header(header_name, header_value);
req.headers_mut().append(header_name, header_value.clone());
}
} else {
maybe_header = None;
}
let new_response = builder.send().await?;
let new_response = self
.client
.clone()
.send(req)
.await
.map_err(|e| DownloadError::Fetch(e.into()))?;
let status = new_response.status();
if status.is_redirection() {
response = new_response;
url = new_url;
} else {
return Ok(new_response);
return Ok((new_response, new_url));
}
}
Err(DownloadError::TooManyRedirects)
} else {
Ok(response)
Ok((response, url))
}
}
}
async fn get_response_body_with_progress(
response: reqwest::Response,
response: http::Response<deno_fetch::ResBody>,
progress_guard: Option<&UpdateGuard>,
) -> Result<Vec<u8>, reqwest::Error> {
) -> Result<Vec<u8>, AnyError> {
use http_body::Body as _;
if let Some(progress_guard) = progress_guard {
if let Some(total_size) = response.content_length() {
let mut total_size = response.body().size_hint().exact();
if total_size.is_none() {
total_size = response
.headers()
.get(CONTENT_LENGTH)
.and_then(|val| val.to_str().ok())
.and_then(|s| s.parse::<u64>().ok());
}
if let Some(total_size) = total_size {
progress_guard.set_total_size(total_size);
let mut current_size = 0;
let mut data = Vec::with_capacity(total_size as usize);
let mut stream = response.bytes_stream();
let mut stream = response.into_body().into_data_stream();
while let Some(item) = stream.next().await {
let bytes = item?;
current_size += bytes.len() as u64;
@ -532,7 +595,7 @@ async fn get_response_body_with_progress(
return Ok(data);
}
}
let bytes = response.bytes().await?;
let bytes = response.collect().await?.to_bytes();
Ok(bytes.into())
}
@ -563,9 +626,9 @@ fn resolve_url_from_location(base_url: &Url, location: &str) -> Url {
}
}
fn resolve_redirect_from_response(
fn resolve_redirect_from_response<B>(
request_url: &Url,
response: &reqwest::Response,
response: &http::Response<B>,
) -> Result<Url, DownloadError> {
debug_assert!(response.status().is_redirection());
if let Some(location) = response.headers().get(LOCATION) {
@ -580,6 +643,49 @@ fn resolve_redirect_from_response(
}
}
pub async fn body_to_string<B>(body: B) -> Result<String, AnyError>
where
B: http_body::Body,
AnyError: From<B::Error>,
{
let bytes = body.collect().await?.to_bytes();
let s = std::str::from_utf8(&bytes)?;
Ok(s.into())
}
pub async fn body_to_json<B, D>(body: B) -> Result<D, AnyError>
where
B: http_body::Body,
AnyError: From<B::Error>,
D: serde::de::DeserializeOwned,
{
let bytes = body.collect().await?.to_bytes();
let val = deno_core::serde_json::from_slice(&bytes)?;
Ok(val)
}
pub struct RequestBuilder {
client: deno_fetch::Client,
req: http::Request<deno_fetch::ReqBody>,
}
impl RequestBuilder {
pub fn header(mut self, name: HeaderName, value: HeaderValue) -> Self {
self.req.headers_mut().append(name, value);
self
}
pub async fn send(
self,
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
self.client.send(self.req).await.map_err(Into::into)
}
pub fn build(self) -> http::Request<deno_fetch::ReqBody> {
self.req
}
}
#[allow(clippy::print_stdout)]
#[allow(clippy::print_stderr)]
#[cfg(test)]
@ -600,14 +706,20 @@ mod test {
// make a request to the redirect server
let text = client
.download_text("http://localhost:4546/subdir/redirects/redirect1.js")
.download_text(
Url::parse("http://localhost:4546/subdir/redirects/redirect1.js")
.unwrap(),
)
.await
.unwrap();
assert_eq!(text, "export const redirect = 1;\n");
// now make one to the infinite redirects server
let err = client
.download_text("http://localhost:4549/subdir/redirects/redirect1.js")
.download_text(
Url::parse("http://localhost:4549/subdir/redirects/redirect1.js")
.unwrap(),
)
.await
.err()
.unwrap();

View file

@ -8,8 +8,8 @@ use super::resolver::LspResolver;
use super::tsc;
use crate::args::jsr_url;
use crate::tools::lint::create_linter;
use deno_lint::linter::LintConfig;
use crate::tools::lint::CliLinter;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::SourceRange;
@ -23,9 +23,6 @@ use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::rules::LintRule;
use deno_runtime::deno_node::NpmResolver;
use deno_runtime::deno_node::PathClean;
use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference;
@ -36,6 +33,7 @@ use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use deno_semver::Version;
use import_map::ImportMap;
use node_resolver::NpmResolver;
use once_cell::sync::Lazy;
use regex::Regex;
use std::cmp::Ordering;
@ -73,8 +71,9 @@ static PREFERRED_FIXES: Lazy<HashMap<&'static str, (u32, bool)>> =
.collect()
});
static IMPORT_SPECIFIER_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r#"\sfrom\s+["']([^"']*)["']"#);
static IMPORT_SPECIFIER_RE: Lazy<Regex> = lazy_regex::lazy_regex!(
r#"\sfrom\s+["']([^"']*)["']|import\s*\(\s*["']([^"']*)["']\s*\)"#
);
const SUPPORTED_EXTENSIONS: &[&str] = &[
".ts", ".tsx", ".js", ".jsx", ".mjs", ".mts", ".cjs", ".cts", ".d.ts",
@ -148,8 +147,10 @@ impl Reference {
}
}
fn as_lsp_range_from_diagnostic(diagnostic: &LintDiagnostic) -> Range {
as_lsp_range(diagnostic.range, &diagnostic.text_info)
fn as_lsp_range_from_lint_diagnostic(
diagnostic_range: &LintDiagnosticRange,
) -> Range {
as_lsp_range(diagnostic_range.range, &diagnostic_range.text_info)
}
fn as_lsp_range(
@ -172,37 +173,39 @@ fn as_lsp_range(
pub fn get_lint_references(
parsed_source: &deno_ast::ParsedSource,
lint_rules: Vec<&'static dyn LintRule>,
lint_config: LintConfig,
linter: &CliLinter,
) -> Result<Vec<Reference>, AnyError> {
let linter = create_linter(lint_rules);
let lint_diagnostics = linter.lint_with_ast(parsed_source, lint_config);
let lint_diagnostics = linter.lint_with_ast(parsed_source);
Ok(
lint_diagnostics
.into_iter()
.map(|d| Reference {
range: as_lsp_range_from_diagnostic(&d),
category: Category::Lint {
message: d.message,
code: d.code,
hint: d.hint,
quick_fixes: d
.fixes
.into_iter()
.map(|f| DataQuickFix {
description: f.description.to_string(),
changes: f
.changes
.into_iter()
.map(|change| DataQuickFixChange {
range: as_lsp_range(change.range, &d.text_info),
new_text: change.new_text.to_string(),
})
.collect(),
})
.collect(),
},
.filter_map(|d| {
let range = d.range.as_ref()?;
Some(Reference {
range: as_lsp_range_from_lint_diagnostic(range),
category: Category::Lint {
message: d.details.message,
code: d.details.code.to_string(),
hint: d.details.hint,
quick_fixes: d
.details
.fixes
.into_iter()
.map(|f| DataQuickFix {
description: f.description.to_string(),
changes: f
.changes
.into_iter()
.map(|change| DataQuickFixChange {
range: as_lsp_range(change.range, &range.text_info),
new_text: change.new_text.to_string(),
})
.collect(),
})
.collect(),
},
})
})
.collect(),
)
@ -528,7 +531,8 @@ pub fn fix_ts_import_changes(
.map(|line| {
// This assumes that there's only one import per line.
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
let specifier = captures.get(1).unwrap().as_str();
let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer)
{

View file

@ -107,7 +107,7 @@ impl DenoTestCollector {
for prop in &obj_lit.props {
if let ast::PropOrSpread::Prop(prop) = prop {
if let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() {
if let ast::PropName::Ident(ast::Ident { sym, .. }) =
if let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
{
if sym == "name" {

View file

@ -18,6 +18,7 @@ use crate::util::path::is_importable_ext;
use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Range;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_ast::LineAndColumnIndex;
@ -192,6 +193,8 @@ pub async fn get_import_completions(
get_npm_completions(specifier, &text, &range, npm_search_api).await
{
Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) = get_node_completions(&text, &range) {
Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) =
get_import_map_completions(specifier, &text, &range, maybe_import_map)
{
@ -215,16 +218,13 @@ pub async fn get_import_completions(
module_registries,
)
.await;
let offset = if position.character > range.start.character {
(position.character - range.start.character) as usize
} else {
0
};
let maybe_list = module_registries
.get_completions(&text, offset, &range, |s| {
.get_completions(&text, &range, resolved.as_ref(), |s| {
documents.exists(s, file_referrer)
})
.await;
let maybe_list = maybe_list
.or_else(|| module_registries.get_origin_completions(&text, &range));
let list = maybe_list.unwrap_or_else(|| CompletionList {
items: get_workspace_completions(specifier, &text, &range, documents),
is_incomplete: false,
@ -735,6 +735,40 @@ async fn get_npm_completions(
})
}
/// Get completions for `node:` specifiers.
fn get_node_completions(
specifier: &str,
range: &lsp::Range,
) -> Option<CompletionList> {
if !specifier.starts_with("node:") {
return None;
}
let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|name| {
let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
.collect();
Some(CompletionList {
is_incomplete: false,
items,
})
}
/// Get workspace completions that include modules in the Deno cache which match
/// the current specifier string.
fn get_workspace_completions(
@ -804,7 +838,7 @@ mod tests {
fs_sources: &[(&str, &str)],
) -> Documents {
let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri()));
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
let mut documents = Documents::default();
documents.update_config(
&Default::default(),
@ -824,8 +858,8 @@ mod tests {
.global()
.set(&specifier, HashMap::default(), source.as_bytes())
.expect("could not cache file");
let document =
documents.get_or_load(&specifier, &temp_dir.uri().join("$").unwrap());
let document = documents
.get_or_load(&specifier, Some(&temp_dir.uri().join("$").unwrap()));
assert!(document.is_some(), "source could not be setup");
}
documents

File diff suppressed because it is too large Load diff

View file

@ -15,15 +15,18 @@ use super::tsc::TsServer;
use super::urls::LspClientUrl;
use super::urls::LspUrlMap;
use crate::args::LintOptions;
use crate::graph_util;
use crate::graph_util::enhanced_resolution_error_message;
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
use crate::resolver::SloppyImportsResolution;
use crate::resolver::SloppyImportsResolver;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider;
use crate::util::path::to_percent_decoded_str;
use deno_ast::MediaType;
use deno_config::deno_json::LintConfig;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock;
@ -36,11 +39,10 @@ use deno_core::unsync::spawn_blocking;
use deno_core::unsync::JoinHandle;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_lint::linter::LintConfig;
use deno_lint::rules::LintRule;
use deno_runtime::deno_fs;
use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime;
@ -48,9 +50,11 @@ use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use import_map::ImportMap;
use import_map::ImportMapError;
use log::error;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::atomic::AtomicUsize;
use std::sync::Arc;
use std::thread;
@ -814,25 +818,25 @@ fn generate_lint_diagnostics(
continue;
}
let version = document.maybe_lsp_version();
let (lint_options, lint_config, lint_rules) = config
let (lint_config, linter) = config
.tree
.scope_for_specifier(specifier)
.and_then(|s| config_data_by_scope.get(s))
.map(|d| {
(
d.lint_options.clone(),
d.lint_config.clone(),
d.lint_rules.clone(),
)
})
.map(|d| (d.lint_config.clone(), d.linter.clone()))
.unwrap_or_else(|| {
(
Arc::default(),
LintConfig {
default_jsx_factory: None,
default_jsx_fragment_factory: None,
},
Arc::default(),
Arc::new(LintConfig::new_with_base(PathBuf::from("/"))),
Arc::new(CliLinter::new(CliLinterOptions {
configured_rules: {
let lint_rule_provider = LintRuleProvider::new(None, None);
lint_rule_provider.resolve_lint_rules(Default::default(), None)
},
fix: false,
deno_lint_config: deno_lint::linter::LintConfig {
default_jsx_factory: None,
default_jsx_fragment_factory: None,
},
})),
)
});
diagnostics_vec.push(DiagnosticRecord {
@ -841,9 +845,8 @@ fn generate_lint_diagnostics(
version,
diagnostics: generate_document_lint_diagnostics(
&document,
&lint_options,
lint_config,
lint_rules.rules.clone(),
&lint_config,
&linter,
),
},
});
@ -853,17 +856,16 @@ fn generate_lint_diagnostics(
fn generate_document_lint_diagnostics(
document: &Document,
lint_options: &LintOptions,
lint_config: LintConfig,
lint_rules: Vec<&'static dyn LintRule>,
lint_config: &LintConfig,
linter: &CliLinter,
) -> Vec<lsp::Diagnostic> {
if !lint_options.files.matches_specifier(document.specifier()) {
if !lint_config.files.matches_specifier(document.specifier()) {
return Vec::new();
}
match document.maybe_parsed_source() {
Some(Ok(parsed_source)) => {
if let Ok(references) =
analysis::get_lint_references(parsed_source, lint_rules, lint_config)
analysis::get_lint_references(parsed_source, linter)
{
references
.into_iter()
@ -1232,16 +1234,14 @@ impl DenoDiagnostic {
pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic {
fn no_local_message(
specifier: &ModuleSpecifier,
sloppy_resolution: SloppyImportsResolution,
maybe_sloppy_resolution: Option<&SloppyImportsResolution>,
) -> String {
let mut message = format!(
"Unable to load a local module: {}\n",
to_percent_decoded_str(specifier.as_ref())
);
if let Some(additional_message) =
sloppy_resolution.as_suggestion_message()
{
message.push_str(&additional_message);
if let Some(res) = maybe_sloppy_resolution {
message.push_str(&res.as_suggestion_message());
message.push('.');
} else {
message.push_str("Please check the file path.");
@ -1258,23 +1258,36 @@ impl DenoDiagnostic {
Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => {
let sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
let data = sloppy_resolution.as_lsp_quick_fix_message().map(|message| {
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({
"specifier": specifier,
"to": sloppy_resolution.as_specifier(),
"message": message,
"to": res.as_specifier(),
"message": res.as_quick_fix_message(),
})
});
(lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, sloppy_resolution), data)
(lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, maybe_sloppy_resolution.as_ref()), data)
},
Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))),
Self::ResolutionError(err) => (
Self::ResolutionError(err) => {
let mut message;
message = enhanced_resolution_error_message(err);
if let deno_graph::ResolutionError::ResolverError {error, ..} = err{
if let ResolveError::Other(resolve_error, ..) = (*error).as_ref() {
if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = resolve_error.downcast_ref::<ImportMapError>() {
if specifier.chars().next().unwrap_or('\0') == '@'{
let hint = format!("\nHint: Use [deno add {}] to add the dependency.", specifier);
message.push_str(hint.as_str());
}
}
}
}
(
lsp::DiagnosticSeverity::ERROR,
enhanced_resolution_error_message(err),
message,
graph_util::get_resolution_error_bare_node_specifier(err)
.map(|specifier| json!({ "specifier": specifier }))
),
)},
Self::InvalidNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unknown Node built-in module: {}", specifier.path()), None),
Self::BareNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::WARNING, format!("\"{}\" is resolved to \"node:{}\". If you want to use a built-in Node module, add a \"node:\" prefix.", specifier, specifier), Some(json!({ "specifier": specifier }))),
};
@ -1354,21 +1367,20 @@ fn diagnose_resolution(
let mut diagnostics = vec![];
match resolution {
Resolution::Ok(resolved) => {
let file_referrer = referrer_doc.file_referrer();
let specifier = &resolved.specifier;
let managed_npm_resolver = snapshot
.resolver
.maybe_managed_npm_resolver(referrer_doc.file_referrer());
let managed_npm_resolver =
snapshot.resolver.maybe_managed_npm_resolver(file_referrer);
for (_, headers) in snapshot
.resolver
.redirect_chain_headers(specifier, referrer_doc.file_referrer())
.redirect_chain_headers(specifier, file_referrer)
{
if let Some(message) = headers.get("x-deno-warning") {
diagnostics.push(DenoDiagnostic::DenoWarn(message.clone()));
}
}
if let Some(doc) = snapshot
.documents
.get_or_load(specifier, referrer_doc.specifier())
if let Some(doc) =
snapshot.documents.get_or_load(specifier, file_referrer)
{
if let Some(headers) = doc.maybe_headers() {
if let Some(message) = headers.get("x-deno-warning") {
@ -1479,7 +1491,7 @@ fn diagnose_dependency(
.config
.tree
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
.and_then(|d| d.import_map.as_ref());
.and_then(|d| d.resolver.maybe_import_map());
if let Some(import_map) = import_map {
if let Resolution::Ok(resolved) = &dependency.maybe_code {
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
@ -1519,7 +1531,7 @@ fn diagnose_dependency(
// If not @deno-types, diagnose the types if the code errored because
// it's likely resolving into the node_modules folder, which might be
// erroring correctly due to resolution only being for bundlers. Let this
// fail at runtime if necesarry, but don't bother erroring in the editor
// fail at runtime if necessary, but don't bother erroring in the editor
|| !is_types_deno_types && matches!(dependency.maybe_type, Resolution::Ok(_))
&& matches!(dependency.maybe_code, Resolution::Err(_))
{
@ -1530,7 +1542,7 @@ fn diagnose_dependency(
dependency.is_dynamic,
dependency.maybe_attribute_type.as_deref(),
referrer_doc,
import_map.map(|i| i.as_ref()),
import_map,
)
.iter()
.flat_map(|diag| {
@ -1554,7 +1566,7 @@ fn diagnose_dependency(
dependency.is_dynamic,
dependency.maybe_attribute_type.as_deref(),
referrer_doc,
import_map.map(|i| i.as_ref()),
import_map,
)
.iter()
.map(|diag| diag.to_lsp_diagnostic(&range)),
@ -1606,7 +1618,6 @@ fn generate_deno_diagnostics(
#[cfg(test)]
mod tests {
use super::*;
use crate::lsp::cache::LspCache;
use crate::lsp::config::Config;
@ -1616,7 +1627,8 @@ mod tests {
use crate::lsp::documents::LanguageId;
use crate::lsp::language_server::StateSnapshot;
use crate::lsp::resolver::LspResolver;
use deno_config::ConfigFile;
use deno_config::deno_json::ConfigFile;
use pretty_assertions::assert_eq;
use std::sync::Arc;
use test_util::TempDir;
@ -1646,16 +1658,17 @@ mod tests {
async fn setup(
sources: &[(&str, &str, i32, LanguageId)],
maybe_import_map: Option<(&str, &str)>,
) -> StateSnapshot {
) -> (TempDir, StateSnapshot) {
let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri()));
let mut config = Config::new_with_roots([resolve_url("file:///").unwrap()]);
if let Some((base_url, json_string)) = maybe_import_map {
let base_url = resolve_url(base_url).unwrap();
let root_uri = temp_dir.uri();
let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap()));
let mut config = Config::new_with_roots([root_uri.clone()]);
if let Some((relative_path, json_string)) = maybe_import_map {
let base_url = root_uri.join(relative_path).unwrap();
let config_file = ConfigFile::new(
json_string,
base_url,
&deno_config::ConfigParseOptions::default(),
&deno_config::deno_json::ConfigParseOptions::default(),
)
.unwrap();
config.tree.inject_config_file(config_file).await;
@ -1664,9 +1677,8 @@ mod tests {
Arc::new(LspResolver::from_config(&config, &cache, None).await);
let mut documents = Documents::default();
documents.update_config(&config, &resolver, &cache, &Default::default());
for (specifier, source, version, language_id) in sources {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
for (relative_path, source, version, language_id) in sources {
let specifier = root_uri.join(relative_path).unwrap();
documents.open(
specifier.clone(),
*version,
@ -1675,20 +1687,23 @@ mod tests {
None,
);
}
StateSnapshot {
project_version: 0,
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
resolver,
}
(
temp_dir,
StateSnapshot {
project_version: 0,
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
resolver,
},
)
}
#[tokio::test]
async fn test_enabled_then_disabled_specifier() {
let snapshot = setup(
let (_, snapshot) = setup(
&[(
"file:///a.ts",
"a.ts",
r#"import * as b from "./b.ts";
let a: any = "a";
let c: number = "a";
@ -1781,23 +1796,23 @@ let c: number = "a";
#[tokio::test]
async fn test_deno_diagnostics_with_import_map() {
let snapshot = setup(
let (temp_dir, snapshot) = setup(
&[
(
"file:///std/assert/mod.ts",
"std/assert/mod.ts",
"export function assert() {}",
1,
LanguageId::TypeScript,
),
(
"file:///a/file.ts",
"a/file.ts",
"import { assert } from \"../std/assert/mod.ts\";\n\nassert();\n",
1,
LanguageId::TypeScript,
),
],
Some((
"file:///a/import-map.json",
"a/deno.json",
r#"{
"imports": {
"/~/std/": "../std/"
@ -1811,11 +1826,13 @@ let c: number = "a";
let actual = generate_deno_diagnostics(&snapshot, &config, token);
assert_eq!(actual.len(), 2);
for record in actual {
match record.specifier.as_str() {
"file:///std/assert/mod.ts" => {
let relative_specifier =
temp_dir.uri().make_relative(&record.specifier).unwrap();
match relative_specifier.as_str() {
"std/assert/mod.ts" => {
assert_eq!(json!(record.versioned.diagnostics), json!([]))
}
"file:///a/file.ts" => assert_eq!(
"a/file.ts" => assert_eq!(
json!(record.versioned.diagnostics),
json!([
{
@ -1917,9 +1934,9 @@ let c: number = "a";
#[tokio::test]
async fn duplicate_diagnostics_for_duplicate_imports() {
let snapshot = setup(
let (_, snapshot) = setup(
&[(
"file:///a.ts",
"a.ts",
r#"
// @deno-types="bad.d.ts"
import "bad.js";
@ -1993,9 +2010,9 @@ let c: number = "a";
#[tokio::test]
async fn unable_to_load_a_local_module() {
let snapshot = setup(
let (temp_dir, snapshot) = setup(
&[(
"file:///a.ts",
"a.ts",
r#"
import { } from "./🦕.ts";
"#,
@ -2027,7 +2044,10 @@ let c: number = "a";
"severity": 1,
"code": "no-local",
"source": "deno",
"message": "Unable to load a local module: file:///🦕.ts\nPlease check the file path.",
"message": format!(
"Unable to load a local module: {}🦕.ts\nPlease check the file path.",
temp_dir.uri(),
),
}
])
);

View file

@ -153,7 +153,7 @@ impl AssetOrDocument {
pub fn scope(&self) -> Option<&ModuleSpecifier> {
match self {
AssetOrDocument::Asset(_) => None,
AssetOrDocument::Asset(asset_doc) => Some(asset_doc.specifier()),
AssetOrDocument::Document(doc) => doc.scope(),
}
}
@ -1180,11 +1180,10 @@ impl Documents {
pub fn get_or_load(
&self,
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<Arc<Document>> {
let file_referrer = self.get_file_referrer(referrer);
let specifier =
self.resolve_document_specifier(specifier, file_referrer.as_deref())?;
self.resolve_document_specifier(specifier, file_referrer)?;
if let Some(document) = self.open_docs.get(&specifier) {
Some(document.clone())
} else {
@ -1193,7 +1192,7 @@ impl Documents {
&self.resolver,
&self.config,
&self.cache,
file_referrer.as_deref(),
file_referrer,
)
}
}
@ -1464,7 +1463,7 @@ impl Documents {
specifier = s;
media_type = Some(mt);
}
let Some(doc) = self.get_or_load(&specifier, referrer) else {
let Some(doc) = self.get_or_load(&specifier, file_referrer) else {
let media_type =
media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier));
return Some((specifier, media_type));
@ -1597,7 +1596,9 @@ fn analyze_module(
mod tests {
use super::*;
use crate::lsp::cache::LspCache;
use deno_config::ConfigFile;
use deno_config::deno_json::ConfigFile;
use deno_config::deno_json::ConfigParseOptions;
use deno_core::serde_json;
use deno_core::serde_json::json;
use pretty_assertions::assert_eq;
@ -1751,7 +1752,7 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
&deno_config::ConfigParseOptions::default(),
&ConfigParseOptions::default(),
)
.unwrap(),
)
@ -1795,7 +1796,7 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
&deno_config::ConfigParseOptions::default(),
&ConfigParseOptions::default(),
)
.unwrap(),
)

View file

@ -20,11 +20,9 @@ use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use serde::Deserialize;
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
use super::config::Config;
use super::config::ConfigData;
use super::search::PackageSearchApi;
@ -44,26 +42,31 @@ impl JsrCacheResolver {
pub fn new(
cache: Arc<dyn HttpCache>,
config_data: Option<&ConfigData>,
config: &Config,
) -> Self {
let nv_by_req = DashMap::new();
let info_by_nv = DashMap::new();
let info_by_name = DashMap::new();
let mut workspace_scope_by_name = HashMap::new();
if let Some(config_data) = config_data {
let config_data_by_scope = config.tree.data_by_scope();
for member_scope in config_data.workspace_members.as_ref() {
let Some(member_data) = config_data_by_scope.get(member_scope) else {
for jsr_pkg_config in config_data.member_dir.workspace.jsr_packages() {
let Some(exports) = &jsr_pkg_config.config_file.json.exports else {
continue;
};
let Some(package_config) = member_data.package_config.as_ref() else {
let Some(version) = &jsr_pkg_config.config_file.json.version else {
continue;
};
let Ok(version) = Version::parse_standard(version) else {
continue;
};
let nv = PackageNv {
name: jsr_pkg_config.name.clone(),
version: version.clone(),
};
info_by_name.insert(
package_config.nv.name.clone(),
nv.name.clone(),
Some(Arc::new(JsrPackageInfo {
versions: [(
package_config.nv.version.clone(),
nv.version.clone(),
JsrPackageInfoVersion { yanked: false },
)]
.into_iter()
@ -71,16 +74,21 @@ impl JsrCacheResolver {
})),
);
info_by_nv.insert(
package_config.nv.clone(),
nv.clone(),
Some(Arc::new(JsrPackageVersionInfo {
exports: package_config.exports.clone(),
exports: exports.clone(),
module_graph_1: None,
module_graph_2: None,
manifest: Default::default(),
})),
);
workspace_scope_by_name
.insert(package_config.nv.name.clone(), member_scope.clone());
workspace_scope_by_name.insert(
nv.name.clone(),
ModuleSpecifier::from_directory_path(
jsr_pkg_config.config_file.dir_path(),
)
.unwrap(),
);
}
}
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
@ -148,7 +156,7 @@ impl JsrCacheResolver {
let maybe_nv = self.req_to_nv(&req);
let nv = maybe_nv.as_ref()?;
let info = self.package_version_info(nv)?;
let path = info.export(&normalize_export_name(req_ref.sub_path()))?;
let path = info.export(&req_ref.export_name())?;
if let Some(workspace_scope) = self.workspace_scope_by_name.get(&nv.name) {
workspace_scope.join(path).ok()
} else {
@ -258,30 +266,6 @@ fn read_cached_url(
.ok()?
}
// TODO(nayeemrmn): This is duplicated from a private function in deno_graph
// 0.65.1. Make it public or cleanup otherwise.
fn normalize_export_name(sub_path: Option<&str>) -> Cow<str> {
let Some(sub_path) = sub_path else {
return Cow::Borrowed(".");
};
if sub_path.is_empty() || matches!(sub_path, "/" | ".") {
Cow::Borrowed(".")
} else {
let sub_path = if sub_path.starts_with('/') {
Cow::Owned(format!(".{}", sub_path))
} else if !sub_path.starts_with("./") {
Cow::Owned(format!("./{}", sub_path))
} else {
Cow::Borrowed(sub_path)
};
if let Some(prefix) = sub_path.strip_suffix('/') {
Cow::Owned(prefix.to_string())
} else {
sub_path
}
}
}
#[derive(Debug)]
pub struct CliJsrSearchApi {
file_fetcher: Arc<FileFetcher>,

View file

@ -2,7 +2,7 @@
use base64::Engine;
use deno_ast::MediaType;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
@ -15,7 +15,6 @@ use deno_core::url;
use deno_core::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::Resolution;
use deno_runtime::deno_fs::DenoConfigFsAdapter;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_semver::jsr::JsrPackageReqReference;
@ -74,7 +73,6 @@ use super::lsp_custom::TaskDefinition;
use super::npm::CliNpmSearchApi;
use super::parent_process_checker;
use super::performance::Performance;
use super::performance::PerformanceMark;
use super::refactor;
use super::registries::ModuleRegistry;
use super::resolver::LspResolver;
@ -89,10 +87,12 @@ use super::tsc::TsServer;
use super::urls;
use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::has_flag_env_var;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::UnstableFmtOptions;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::graph_util;
@ -121,6 +121,7 @@ impl RootCertStoreProvider for LspRootCertStoreProvider {
#[derive(Debug, Clone)]
pub struct LanguageServer {
client: Client,
pub inner: Arc<tokio::sync::RwLock<Inner>>,
/// This is used to block out standard request handling until the complete
/// user configuration has been fetched. This is done in the `initialized`
@ -129,6 +130,7 @@ pub struct LanguageServer {
/// `workspace/configuration` requests in the `initialize` handler. See:
/// https://github.com/Microsoft/language-server-protocol/issues/567#issuecomment-2085131917
init_flag: AsyncFlag,
performance: Arc<Performance>,
shutdown_flag: AsyncFlag,
}
@ -222,9 +224,15 @@ pub struct Inner {
impl LanguageServer {
pub fn new(client: Client, shutdown_flag: AsyncFlag) -> Self {
let performance = Arc::new(Performance::default());
Self {
inner: Arc::new(tokio::sync::RwLock::new(Inner::new(client))),
client: client.clone(),
inner: Arc::new(tokio::sync::RwLock::new(Inner::new(
client,
performance.clone(),
))),
init_flag: Default::default(),
performance,
shutdown_flag,
}
}
@ -237,9 +245,6 @@ impl LanguageServer {
referrer: ModuleSpecifier,
force_global_cache: bool,
) -> LspResult<Option<Value>> {
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
async fn create_graph_for_caching(
cli_options: CliOptions,
roots: Vec<ModuleSpecifier>,
@ -284,55 +289,46 @@ impl LanguageServer {
Ok(())
}
// prepare the cache inside the lock
let maybe_prepare_cache_result = {
let mut inner = self.inner.write().await;
match inner.prepare_cache(specifiers, referrer, force_global_cache) {
Ok(maybe_cache_result) => maybe_cache_result,
Err(err) => {
lsp_warn!("Error preparing caching: {:#}", err);
self
.inner
.read()
.await
.client
.show_message(MessageType::WARNING, err);
return Err(LspError::internal_error());
}
}
};
if let Some(result) = maybe_prepare_cache_result {
// cache outside the lock
let cli_options = result.cli_options;
let roots = result.roots;
let open_docs = result.open_docs;
let handle = spawn(async move {
create_graph_for_caching(cli_options, roots, open_docs).await
});
if let Err(err) = handle.await.unwrap() {
lsp_warn!("Error caching: {:#}", err);
self
.inner
.read()
.await
.client
.show_message(MessageType::WARNING, err);
}
// now get the lock back to update with the new information
let mut inner = self.inner.write().await;
inner.resolver.did_cache();
inner.refresh_npm_specifiers().await;
inner.diagnostics_server.invalidate_all();
inner.project_changed([], true);
inner
.ts_server
.cleanup_semantic_cache(inner.snapshot())
.await;
inner.send_diagnostics_update();
inner.send_testing_update();
inner.performance.measure(result.mark);
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
// prepare the cache inside the lock
let mark = self
.performance
.mark_with_args("lsp.cache", (&specifiers, &referrer));
let prepare_cache_result = self.inner.write().await.prepare_cache(
specifiers,
referrer,
force_global_cache,
);
match prepare_cache_result {
Ok(result) => {
// cache outside the lock
let cli_options = result.cli_options;
let roots = result.roots;
let open_docs = result.open_docs;
let handle = spawn(async move {
create_graph_for_caching(cli_options, roots, open_docs).await
});
if let Err(err) = handle.await.unwrap() {
lsp_warn!("Error caching: {:#}", err);
self.client.show_message(MessageType::WARNING, err);
}
// now get the lock back to update with the new information
self.inner.write().await.post_cache().await;
self.performance.measure(mark);
}
Err(err) => {
lsp_warn!("Error preparing caching: {:#}", err);
self.client.show_message(MessageType::WARNING, err);
return Err(LspError::internal_error());
}
}
Ok(Some(json!(true)))
}
@ -376,20 +372,7 @@ impl LanguageServer {
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
let inner = self.inner.read().await;
if let Some(testing_server) = &inner.maybe_testing_server {
match params.map(serde_json::from_value) {
Some(Ok(params)) => {
testing_server
.run_request(params, inner.config.workspace_settings().clone())
.await
}
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
}
} else {
Err(LspError::invalid_request())
}
self.inner.read().await.test_run_request(params).await
}
pub async fn test_run_cancel_request(
@ -399,16 +382,7 @@ impl LanguageServer {
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
if let Some(testing_server) = &self.inner.read().await.maybe_testing_server
{
match params.map(serde_json::from_value) {
Some(Ok(params)) => testing_server.run_cancel_request(params),
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
}
} else {
Err(LspError::invalid_request())
}
self.inner.read().await.test_run_cancel_request(params)
}
pub async fn virtual_text_document(
@ -437,10 +411,9 @@ impl LanguageServer {
}
pub async fn refresh_configuration(&self) {
let (client, folders, capable) = {
let (folders, capable) = {
let inner = self.inner.read().await;
(
inner.client.clone(),
inner.config.workspace_folders.clone(),
inner.config.workspace_configuration_capable(),
)
@ -451,7 +424,8 @@ impl LanguageServer {
for (_, folder) in folders.as_ref() {
scopes.push(Some(folder.uri.clone()));
}
let configs = client
let configs = self
.client
.when_outside_lsp_lock()
.workspace_configuration(scopes)
.await;
@ -466,8 +440,10 @@ impl LanguageServer {
for (folder_uri, _) in folders.as_ref() {
folder_settings.push((folder_uri.clone(), configs.next().unwrap()));
}
let mut inner = self.inner.write().await;
inner
self
.inner
.write()
.await
.config
.set_workspace_settings(unscoped, folder_settings);
}
@ -476,7 +452,7 @@ impl LanguageServer {
}
impl Inner {
fn new(client: Client) -> Self {
fn new(client: Client, performance: Arc<Performance>) -> Self {
let cache = LspCache::default();
let http_client_provider = Arc::new(HttpClientProvider::new(None, None));
let module_registry = ModuleRegistry::new(
@ -488,7 +464,6 @@ impl Inner {
let npm_search_api =
CliNpmSearchApi::new(module_registry.file_fetcher.clone());
let documents = Documents::default();
let performance = Arc::new(Performance::default());
let config = Config::default();
let ts_server = Arc::new(TsServer::new(performance.clone()));
let diagnostics_state = Arc::new(DiagnosticsState::default());
@ -1180,6 +1155,36 @@ impl Inner {
self.performance.measure(mark);
}
async fn did_change_configuration(
&mut self,
params: DidChangeConfigurationParams,
) {
if !self.config.workspace_configuration_capable() {
let config = params.settings.as_object().map(|settings| {
let deno =
serde_json::to_value(settings.get(SETTINGS_SECTION)).unwrap();
let javascript =
serde_json::to_value(settings.get("javascript")).unwrap();
let typescript =
serde_json::to_value(settings.get("typescript")).unwrap();
WorkspaceSettings::from_raw_settings(deno, javascript, typescript)
});
if let Some(settings) = config {
self.config.set_workspace_settings(settings, vec![]);
}
};
self.update_debug_flag();
self.update_global_cache().await;
self.refresh_workspace_files();
self.refresh_config_tree().await;
self.update_cache();
self.refresh_resolver().await;
self.refresh_documents_config().await;
self.diagnostics_server.invalidate_all();
self.send_diagnostics_update();
self.send_testing_update();
}
async fn did_change_watched_files(
&mut self,
params: DidChangeWatchedFilesParams,
@ -1322,14 +1327,15 @@ impl Inner {
if !self
.config
.tree
.fmt_options_for_specifier(&specifier)
.fmt_config_for_specifier(&specifier)
.files
.matches_specifier(&specifier)
{
return Ok(None);
}
let document =
file_referrer.and_then(|r| self.documents.get_or_load(&specifier, &r));
let document = self
.documents
.get_or_load(&specifier, file_referrer.as_ref());
let Some(document) = document else {
return Ok(None);
};
@ -1352,11 +1358,24 @@ impl Inner {
let mut fmt_options = self
.config
.tree
.fmt_options_for_specifier(&specifier)
.fmt_config_for_specifier(&specifier)
.options
.clone();
fmt_options.use_tabs = Some(!params.options.insert_spaces);
fmt_options.indent_width = Some(params.options.tab_size as u8);
let maybe_workspace = self
.config
.tree
.data_for_specifier(&specifier)
.map(|d| &d.member_dir.workspace);
let unstable_options = UnstableFmtOptions {
css: maybe_workspace
.map(|w| w.has_unstable("fmt-css"))
.unwrap_or(false),
yaml: maybe_workspace
.map(|w| w.has_unstable("fmt-yaml"))
.unwrap_or(false),
};
let document = document.clone();
move || {
let format_result = match document.maybe_parsed_source() {
@ -1374,7 +1393,12 @@ impl Inner {
.map(|ext| file_path.with_extension(ext))
.unwrap_or(file_path);
// it's not a js/ts file, so attempt to format its contents
format_file(&file_path, document.content(), &fmt_options)
format_file(
&file_path,
document.content(),
&fmt_options,
&unstable_options,
)
}
};
match format_result {
@ -1425,7 +1449,7 @@ impl Inner {
{
let dep_doc = dep
.get_code()
.and_then(|s| self.documents.get_or_load(s, &specifier));
.and_then(|s| self.documents.get_or_load(s, file_referrer));
let dep_maybe_types_dependency =
dep_doc.as_ref().map(|d| d.maybe_types_dependency());
let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_maybe_types_dependency) {
@ -1606,7 +1630,7 @@ impl Inner {
(&self
.config
.tree
.fmt_options_for_specifier(&specifier)
.fmt_config_for_specifier(&specifier)
.options)
.into(),
tsc::UserPreferences::from_config_for_specifier(
@ -1771,7 +1795,7 @@ impl Inner {
(&self
.config
.tree
.fmt_options_for_specifier(&code_action_data.specifier)
.fmt_config_for_specifier(&code_action_data.specifier)
.options)
.into(),
tsc::UserPreferences::from_config_for_specifier(
@ -1805,7 +1829,10 @@ impl Inner {
LspError::internal_error()
})?;
code_action
} else if kind.as_str().starts_with(CodeActionKind::REFACTOR.as_str()) {
} else if let Some(kind_suffix) = kind
.as_str()
.strip_prefix(CodeActionKind::REFACTOR.as_str())
{
let mut code_action = params;
let action_data: refactor::RefactorCodeActionData = from_value(data)
.map_err(|err| {
@ -1814,7 +1841,7 @@ impl Inner {
})?;
let asset_or_doc = self.get_asset_or_document(&action_data.specifier)?;
let line_index = asset_or_doc.line_index();
let refactor_edit_info = self
let mut refactor_edit_info = self
.ts_server
.get_edits_for_refactor(
self.snapshot(),
@ -1822,7 +1849,7 @@ impl Inner {
(&self
.config
.tree
.fmt_options_for_specifier(&action_data.specifier)
.fmt_config_for_specifier(&action_data.specifier)
.options)
.into(),
line_index.offset_tsc(action_data.range.start)?
@ -1836,6 +1863,17 @@ impl Inner {
asset_or_doc.scope().cloned(),
)
.await?;
if kind_suffix == ".rewrite.function.returnType" {
refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier,
&refactor_edit_info.edits,
&self.get_ts_response_import_mapper(&action_data.specifier),
)
.map_err(|err| {
error!("Unable to remap changes: {:#}", err);
LspError::internal_error()
})?
}
code_action.edit = refactor_edit_info.to_workspace_edit(self)?;
code_action
} else {
@ -1857,7 +1895,9 @@ impl Inner {
.config
.tree
.data_for_specifier(file_referrer)
.and_then(|d| d.import_map.as_ref().map(|i| i.as_ref())),
// todo(dsherret): this should probably just take the resolver itself
// as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()),
self.resolver.as_ref(),
)
}
@ -2178,7 +2218,9 @@ impl Inner {
.config
.tree
.data_for_specifier(file_referrer)
.and_then(|d| d.import_map.as_ref().map(|i| i.as_ref())),
// todo(dsherret): this should probably just take the resolver itself
// as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()),
)
.await;
}
@ -2213,7 +2255,7 @@ impl Inner {
(&self
.config
.tree
.fmt_options_for_specifier(&specifier)
.fmt_config_for_specifier(&specifier)
.options)
.into(),
scope.cloned(),
@ -2268,11 +2310,7 @@ impl Inner {
self.snapshot(),
GetCompletionDetailsArgs {
format_code_settings: Some(
(&self
.config
.tree
.fmt_options_for_specifier(specifier)
.options)
(&self.config.tree.fmt_config_for_specifier(specifier).options)
.into(),
),
preferences: Some(
@ -2846,7 +2884,7 @@ impl Inner {
let format_code_settings = (&self
.config
.tree
.fmt_options_for_specifier(&old_specifier)
.fmt_config_for_specifier(&old_specifier)
.options)
.into();
changes.extend(
@ -2996,98 +3034,17 @@ impl tower_lsp::LanguageServer for LanguageServer {
async fn initialized(&self, _: InitializedParams) {
self.refresh_configuration().await;
let mut registrations = Vec::with_capacity(2);
let (client, http_client) = {
let (registrations, http_client) = {
let mut inner = self.inner.write().await;
init_log_file(inner.config.log_file());
inner.update_debug_flag();
inner.update_global_cache().await;
inner.refresh_workspace_files();
inner.refresh_config_tree().await;
inner.update_cache();
inner.refresh_resolver().await;
inner.refresh_documents_config().await;
let registrations = inner.initialized().await;
inner.task_queue.start(self.clone());
self.init_flag.raise();
if inner.config.did_change_watched_files_capable() {
// we are going to watch all the JSON files in the workspace, and the
// notification handler will pick up any of the changes of those files we
// are interested in.
let options = DidChangeWatchedFilesRegistrationOptions {
watchers: vec![FileSystemWatcher {
glob_pattern: GlobPattern::String(
"**/*.{json,jsonc,lock}".to_string(),
),
kind: None,
}],
};
registrations.push(Registration {
id: "workspace/didChangeWatchedFiles".to_string(),
method: "workspace/didChangeWatchedFiles".to_string(),
register_options: Some(serde_json::to_value(options).unwrap()),
});
}
if inner.config.will_rename_files_capable() {
let options = FileOperationRegistrationOptions {
filters: vec![FileOperationFilter {
scheme: Some("file".to_string()),
pattern: FileOperationPattern {
glob: "**/*".to_string(),
matches: None,
options: None,
},
}],
};
registrations.push(Registration {
id: "workspace/willRenameFiles".to_string(),
method: "workspace/willRenameFiles".to_string(),
register_options: Some(serde_json::to_value(options).unwrap()),
});
}
if inner.config.testing_api_capable() {
let test_server = testing::TestServer::new(
inner.client.clone(),
inner.performance.clone(),
inner.config.root_uri().cloned(),
);
inner.maybe_testing_server = Some(test_server);
}
let mut config_events = vec![];
for (scope_uri, config_data) in inner.config.tree.data_by_scope().iter() {
if let Some(config_file) = &config_data.config_file {
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(),
file_uri: config_file.specifier.clone(),
typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::DenoJson,
});
}
if let Some(package_json) = &config_data.package_json {
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(),
file_uri: package_json.specifier(),
typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::PackageJson,
});
}
}
if !config_events.is_empty() {
inner
.client
.send_did_change_deno_configuration_notification(
lsp_custom::DidChangeDenoConfigurationNotificationParams {
changes: config_events,
},
);
}
(inner.client.clone(), inner.http_client_provider.clone())
(registrations, inner.http_client_provider.clone())
};
self.init_flag.raise();
for registration in registrations {
if let Err(err) = client
if let Err(err) = self
.client
.when_outside_lsp_lock()
.register_capability(vec![registration])
.await
@ -3097,6 +3054,7 @@ impl tower_lsp::LanguageServer for LanguageServer {
}
if upgrade_check_enabled() {
let client = self.client.clone();
// spawn to avoid lsp send/sync requirement, but also just
// to ensure this initialized method returns quickly
spawn(async move {
@ -3161,39 +3119,17 @@ impl tower_lsp::LanguageServer for LanguageServer {
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
let mark = {
let inner = self.inner.read().await;
inner
.performance
.mark_with_args("lsp.did_change_configuration", &params)
};
let mark = self
.performance
.mark_with_args("lsp.did_change_configuration", &params);
self.refresh_configuration().await;
let mut inner = self.inner.write().await;
if !inner.config.workspace_configuration_capable() {
let config = params.settings.as_object().map(|settings| {
let deno =
serde_json::to_value(settings.get(SETTINGS_SECTION)).unwrap();
let javascript =
serde_json::to_value(settings.get("javascript")).unwrap();
let typescript =
serde_json::to_value(settings.get("typescript")).unwrap();
WorkspaceSettings::from_raw_settings(deno, javascript, typescript)
});
if let Some(settings) = config {
inner.config.set_workspace_settings(settings, vec![]);
}
};
inner.update_debug_flag();
inner.update_global_cache().await;
inner.refresh_workspace_files();
inner.refresh_config_tree().await;
inner.update_cache();
inner.refresh_resolver().await;
inner.refresh_documents_config().await;
inner.diagnostics_server.invalidate_all();
inner.send_diagnostics_update();
inner.send_testing_update();
inner.performance.measure(mark);
self
.inner
.write()
.await
.did_change_configuration(params)
.await;
self.performance.measure(mark);
}
async fn did_change_watched_files(
@ -3218,43 +3154,22 @@ impl tower_lsp::LanguageServer for LanguageServer {
if !self.init_flag.is_raised() {
self.init_flag.wait_raised().await;
}
let mark = {
let mut inner = self.inner.write().await;
let mark = inner
.performance
.mark_with_args("lsp.did_change_workspace_folders", &params);
let mut workspace_folders = params
.event
.added
.into_iter()
.map(|folder| {
(
inner.url_map.normalize_url(&folder.uri, LspUrlKind::Folder),
folder,
)
})
.collect::<Vec<(ModuleSpecifier, WorkspaceFolder)>>();
for (specifier, folder) in inner.config.workspace_folders.as_ref() {
if !params.event.removed.is_empty()
&& params.event.removed.iter().any(|f| f.uri == folder.uri)
{
continue;
}
workspace_folders.push((specifier.clone(), folder.clone()));
}
inner.config.set_workspace_folders(workspace_folders);
mark
};
let mark = self
.performance
.mark_with_args("lsp.did_change_workspace_folders", &params);
self
.inner
.write()
.await
.pre_did_change_workspace_folders(params);
self.refresh_configuration().await;
let mut inner = self.inner.write().await;
inner.refresh_workspace_files();
inner.refresh_config_tree().await;
inner.refresh_resolver().await;
inner.refresh_documents_config().await;
inner.diagnostics_server.invalidate_all();
inner.send_diagnostics_update();
inner.send_testing_update();
inner.performance.measure(mark);
self
.inner
.write()
.await
.post_did_change_workspace_folders()
.await;
self.performance.measure(mark);
}
async fn document_symbol(
@ -3516,20 +3431,101 @@ struct PrepareCacheResult {
cli_options: CliOptions,
roots: Vec<ModuleSpecifier>,
open_docs: Vec<Arc<Document>>,
mark: PerformanceMark,
}
// These are implementations of custom commands supported by the LSP
impl Inner {
async fn initialized(&mut self) -> Vec<Registration> {
let mut registrations = Vec::with_capacity(2);
init_log_file(self.config.log_file());
self.update_debug_flag();
self.update_global_cache().await;
self.refresh_workspace_files();
self.refresh_config_tree().await;
self.update_cache();
self.refresh_resolver().await;
self.refresh_documents_config().await;
if self.config.did_change_watched_files_capable() {
// we are going to watch all the JSON files in the workspace, and the
// notification handler will pick up any of the changes of those files we
// are interested in.
let options = DidChangeWatchedFilesRegistrationOptions {
watchers: vec![FileSystemWatcher {
glob_pattern: GlobPattern::String(
"**/*.{json,jsonc,lock}".to_string(),
),
kind: None,
}],
};
registrations.push(Registration {
id: "workspace/didChangeWatchedFiles".to_string(),
method: "workspace/didChangeWatchedFiles".to_string(),
register_options: Some(serde_json::to_value(options).unwrap()),
});
}
if self.config.will_rename_files_capable() {
let options = FileOperationRegistrationOptions {
filters: vec![FileOperationFilter {
scheme: Some("file".to_string()),
pattern: FileOperationPattern {
glob: "**/*".to_string(),
matches: None,
options: None,
},
}],
};
registrations.push(Registration {
id: "workspace/willRenameFiles".to_string(),
method: "workspace/willRenameFiles".to_string(),
register_options: Some(serde_json::to_value(options).unwrap()),
});
}
if self.config.testing_api_capable() {
let test_server = testing::TestServer::new(
self.client.clone(),
self.performance.clone(),
self.config.root_uri().cloned(),
);
self.maybe_testing_server = Some(test_server);
}
let mut config_events = vec![];
for (scope_uri, config_data) in self.config.tree.data_by_scope().iter() {
if let Some(config_file) = config_data.maybe_deno_json() {
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(),
file_uri: config_file.specifier.clone(),
typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::DenoJson,
});
}
if let Some(package_json) = config_data.maybe_pkg_json() {
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
scope_uri: scope_uri.clone(),
file_uri: package_json.specifier(),
typ: lsp_custom::DenoConfigurationChangeType::Added,
configuration_type: lsp_custom::DenoConfigurationType::PackageJson,
});
}
}
if !config_events.is_empty() {
self.client.send_did_change_deno_configuration_notification(
lsp_custom::DidChangeDenoConfigurationNotificationParams {
changes: config_events,
},
);
}
registrations
}
fn prepare_cache(
&mut self,
specifiers: Vec<ModuleSpecifier>,
referrer: ModuleSpecifier,
force_global_cache: bool,
) -> Result<Option<PrepareCacheResult>, AnyError> {
let mark = self
.performance
.mark_with_args("lsp.cache", (&specifiers, &referrer));
) -> Result<PrepareCacheResult, AnyError> {
let config_data = self.config.tree.data_for_specifier(&referrer);
let mut roots = if !specifiers.is_empty() {
specifiers
@ -3542,7 +3538,7 @@ impl Inner {
if let Some(npm_reqs) = self
.documents
.npm_reqs_by_scope()
.get(&config_data.map(|d| d.scope.clone()))
.get(&config_data.map(|d| d.scope.as_ref().clone()))
{
roots.extend(
npm_reqs
@ -3555,28 +3551,32 @@ impl Inner {
let initial_cwd = config_data
.and_then(|d| d.scope.to_file_path().ok())
.unwrap_or_else(|| self.initial_cwd.clone());
// todo: we need a way to convert config data to a Workspace
let workspace = Arc::new(Workspace::discover(
deno_config::workspace::WorkspaceDiscoverStart::Dirs(&[
initial_cwd.clone()
]),
&WorkspaceDiscoverOptions {
fs: &DenoConfigFsAdapter::new(&deno_runtime::deno_fs::RealFs),
pkg_json_cache: None,
config_parse_options: deno_config::ConfigParseOptions {
include_task_comments: false,
let workspace = match config_data {
Some(d) => d.member_dir.clone(),
None => Arc::new(WorkspaceDirectory::discover(
deno_config::workspace::WorkspaceDiscoverStart::Paths(&[
initial_cwd.clone()
]),
&WorkspaceDiscoverOptions {
fs: Default::default(), // use real fs,
deno_json_cache: None,
pkg_json_cache: None,
workspace_cache: None,
config_parse_options: deno_config::deno_json::ConfigParseOptions {
include_task_comments: false,
},
additional_config_file_names: &[],
discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"),
maybe_vendor_override: if force_global_cache {
Some(deno_config::workspace::VendorEnablement::Disable)
} else {
None
},
},
additional_config_file_names: &[],
discover_pkg_json: true,
maybe_vendor_override: if force_global_cache {
Some(deno_config::workspace::VendorEnablement::Disable)
} else {
None
},
},
)?);
)?),
};
let cli_options = CliOptions::new(
Flags {
Arc::new(Flags {
cache_path: Some(self.cache.deno_dir().root.clone()),
ca_stores: workspace_settings.certificate_stores.clone(),
ca_data: workspace_settings.tls_certificate.clone().map(CaData::File),
@ -3584,10 +3584,9 @@ impl Inner {
.unsafely_ignore_certificate_errors
.clone(),
import_map_path: config_data.and_then(|d| {
if d.import_map_from_settings {
return Some(d.import_map.as_ref()?.base_url().to_string());
}
None
d.import_map_from_settings
.as_ref()
.map(|url| url.to_string())
}),
node_modules_dir: Some(
config_data
@ -3597,7 +3596,7 @@ impl Inner {
// bit of a hack to force the lsp to cache the @types/node package
type_check_mode: crate::args::TypeCheckMode::Local,
..Default::default()
},
}),
initial_cwd,
config_data.and_then(|d| d.lockfile.clone()),
config_data
@ -3608,12 +3607,57 @@ impl Inner {
)?;
let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable);
Ok(Some(PrepareCacheResult {
Ok(PrepareCacheResult {
cli_options,
open_docs,
roots,
mark,
}))
})
}
async fn post_cache(&mut self) {
self.resolver.did_cache();
self.refresh_npm_specifiers().await;
self.diagnostics_server.invalidate_all();
self.project_changed([], true);
self.ts_server.cleanup_semantic_cache(self.snapshot()).await;
self.send_diagnostics_update();
self.send_testing_update();
}
fn pre_did_change_workspace_folders(
&mut self,
params: DidChangeWorkspaceFoldersParams,
) {
let mut workspace_folders = params
.event
.added
.into_iter()
.map(|folder| {
(
self.url_map.normalize_url(&folder.uri, LspUrlKind::Folder),
folder,
)
})
.collect::<Vec<(ModuleSpecifier, WorkspaceFolder)>>();
for (specifier, folder) in self.config.workspace_folders.as_ref() {
if !params.event.removed.is_empty()
&& params.event.removed.iter().any(|f| f.uri == folder.uri)
{
continue;
}
workspace_folders.push((specifier.clone(), folder.clone()));
}
self.config.set_workspace_folders(workspace_folders);
}
async fn post_did_change_workspace_folders(&mut self) {
self.refresh_workspace_files();
self.refresh_config_tree().await;
self.refresh_resolver().await;
self.refresh_documents_config().await;
self.diagnostics_server.invalidate_all();
self.send_diagnostics_update();
self.send_testing_update();
}
fn get_performance(&self) -> Value {
@ -3621,6 +3665,40 @@ impl Inner {
json!({ "averages": averages })
}
async fn test_run_request(
&self,
params: Option<Value>,
) -> LspResult<Option<Value>> {
if let Some(testing_server) = &self.maybe_testing_server {
match params.map(serde_json::from_value) {
Some(Ok(params)) => {
testing_server
.run_request(params, self.config.workspace_settings().clone())
.await
}
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
}
} else {
Err(LspError::invalid_request())
}
}
fn test_run_cancel_request(
&self,
params: Option<Value>,
) -> LspResult<Option<Value>> {
if let Some(testing_server) = &self.maybe_testing_server {
match params.map(serde_json::from_value) {
Some(Ok(params)) => testing_server.run_cancel_request(params),
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
}
} else {
Err(LspError::invalid_request())
}
}
fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
let mut result = vec![];
for config_file in self.config.tree.config_files() {

View file

@ -33,6 +33,7 @@ use deno_graph::Dependency;
use deno_runtime::deno_permissions::PermissionsContainer;
use log::error;
use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
@ -86,23 +87,23 @@ enum CompletionType {
/// Determine if a completion at a given offset is a string literal or a key/
/// variable.
fn get_completion_type(
offset: usize,
char_offset: usize,
tokens: &[Token],
match_result: &MatchResult,
) -> Option<CompletionType> {
let mut len = 0_usize;
let mut char_count = 0_usize;
for (index, token) in tokens.iter().enumerate() {
match token {
Token::String(s) => {
len += s.chars().count();
if offset < len {
char_count += s.chars().count();
if char_offset < char_count {
return Some(CompletionType::Literal(s.clone()));
}
}
Token::Key(k) => {
if let Some(prefix) = &k.prefix {
len += prefix.chars().count();
if offset < len {
char_count += prefix.chars().count();
if char_offset < char_count {
return Some(CompletionType::Key {
key: k.clone(),
prefix: Some(prefix.clone()),
@ -110,7 +111,7 @@ fn get_completion_type(
});
}
}
if offset < len {
if char_offset < char_count {
return None;
}
if let StringOrNumber::String(name) = &k.name {
@ -118,8 +119,8 @@ fn get_completion_type(
.get(name)
.map(|s| s.to_string(Some(k), false))
.unwrap_or_default();
len += value.chars().count();
if offset <= len {
char_count += value.chars().count();
if char_offset <= char_count {
return Some(CompletionType::Key {
key: k.clone(),
prefix: None,
@ -128,8 +129,8 @@ fn get_completion_type(
}
}
if let Some(suffix) = &k.suffix {
len += suffix.chars().count();
if offset <= len {
char_count += suffix.chars().count();
if char_offset <= char_count {
return Some(CompletionType::Literal(suffix.clone()));
}
}
@ -449,49 +450,6 @@ impl ModuleRegistry {
}
}
fn complete_literal(
&self,
s: String,
completions: &mut HashMap<String, lsp::CompletionItem>,
current_specifier: &str,
offset: usize,
range: &lsp::Range,
) {
let label = if s.starts_with('/') {
s[0..].to_string()
} else {
s.to_string()
};
let full_text = format!(
"{}{}{}",
&current_specifier[..offset],
s,
&current_specifier[offset..]
);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.clone(),
}));
let filter_text = Some(full_text);
completions.insert(
s,
lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FOLDER),
filter_text,
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
},
);
}
/// Disable a registry, removing its configuration, if any, from memory.
pub fn disable(&mut self, origin: &str) {
let Ok(origin_url) = Url::parse(origin) else {
@ -654,339 +612,366 @@ impl ModuleRegistry {
/// any, for the specifier.
pub async fn get_completions(
&self,
current_specifier: &str,
offset: usize,
text: &str,
range: &lsp::Range,
resolved: Option<&ModuleSpecifier>,
specifier_exists: impl Fn(&ModuleSpecifier) -> bool,
) -> Option<lsp::CompletionList> {
if let Ok(specifier) = Url::parse(current_specifier) {
let origin = base_url(&specifier);
let origin_len = origin.chars().count();
if offset >= origin_len {
if let Some(registries) = self.origins.get(&origin) {
let path = &specifier[Position::BeforePath..];
let path_offset = offset - origin_len;
let mut completions = HashMap::<String, lsp::CompletionItem>::new();
let mut is_incomplete = false;
let mut did_match = false;
for registry in registries {
let tokens = parse(&registry.schema, None)
.map_err(|e| {
error!(
"Error parsing registry schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
let mut i = tokens.len();
let last_key_name = StringOrNumber::String(
tokens
.iter()
.last()
.map(|t| {
if let Token::Key(key) = t {
if let StringOrNumber::String(s) = &key.name {
return s.clone();
}
}
"".to_string()
})
.unwrap_or_default(),
);
loop {
let matcher = Matcher::new(&tokens[..i], None)
.map_err(|e| {
error!(
"Error creating matcher for schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
if let Some(match_result) = matcher.matches(path) {
did_match = true;
let completion_type =
get_completion_type(path_offset, &tokens, &match_result);
match completion_type {
Some(CompletionType::Literal(s)) => self.complete_literal(
s,
&mut completions,
current_specifier,
offset,
range,
),
Some(CompletionType::Key { key, prefix, index }) => {
let maybe_url = registry.get_url_for_key(&key);
if let Some(url) = maybe_url {
if let Some(items) = self
.get_variable_items(
&key,
url,
&specifier,
&tokens,
&match_result,
)
.await
{
let compiler = Compiler::new(&tokens[..=index], None);
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => (items, None, false),
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let mut label = if let Some(p) = &prefix {
format!("{p}{item}")
} else {
item.clone()
};
if label.ends_with('/') {
label.pop();
}
let kind = if key.name == last_key_name
&& !item.ends_with('/')
{
Some(lsp::CompletionItemKind::FILE)
} else {
Some(lsp::CompletionItemKind::FOLDER)
};
let mut params = match_result.params.clone();
params.insert(
key.name.clone(),
StringOrVec::from_str(&item, &key),
);
let mut path =
compiler.to_path(&params).unwrap_or_default();
if path.ends_with('/') {
path.pop();
}
let item_specifier = base.join(&path).ok()?;
let full_text = item_specifier.as_str();
let text_edit = Some(lsp::CompletionTextEdit::Edit(
lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
},
));
let command = if key.name == last_key_name
&& !item.ends_with('/')
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&specifier),
]),
})
} else {
None
};
let detail = Some(format!("({})", key.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data_with_match(
registry,
&specifier,
&tokens,
&match_result,
&key,
&item,
);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
};
completions.insert(
item,
lsp::CompletionItem {
label,
kind,
detail,
sort_text,
filter_text,
text_edit,
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
}
}
None => (),
}
break;
let resolved = resolved
.map(Cow::Borrowed)
.or_else(|| ModuleSpecifier::parse(text).ok().map(Cow::Owned))?;
let resolved_str = resolved.as_str();
let origin = base_url(&resolved);
let origin_char_count = origin.chars().count();
let registries = self.origins.get(&origin)?;
let path = &resolved[Position::BeforePath..];
let path_char_offset = resolved_str.chars().count() - origin_char_count;
let mut completions = HashMap::<String, lsp::CompletionItem>::new();
let mut is_incomplete = false;
let mut did_match = false;
for registry in registries {
let tokens = parse(&registry.schema, None)
.map_err(|e| {
error!(
"Error parsing registry schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
let mut i = tokens.len();
let last_key_name = StringOrNumber::String(
tokens
.iter()
.last()
.map(|t| {
if let Token::Key(key) = t {
if let StringOrNumber::String(s) = &key.name {
return s.clone();
}
i -= 1;
// If we have fallen though to the first token, and we still
// didn't get a match
if i == 0 {
match &tokens[i] {
// so if the first token is a string literal, we will return
// that as a suggestion
Token::String(s) => {
if s.starts_with(path) {
let label = s.to_string();
}
"".to_string()
})
.unwrap_or_default(),
);
loop {
let matcher = Matcher::new(&tokens[..i], None)
.map_err(|e| {
error!(
"Error creating matcher for schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
if let Some(match_result) = matcher.matches(path) {
did_match = true;
let completion_type =
get_completion_type(path_char_offset, &tokens, &match_result);
match completion_type {
Some(CompletionType::Literal(s)) => {
let label = s;
let full_text = format!("{text}{label}");
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.clone(),
}));
let filter_text = Some(full_text);
completions.insert(
label.clone(),
lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FOLDER),
filter_text,
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
},
);
}
Some(CompletionType::Key { key, prefix, index }) => {
let maybe_url = registry.get_url_for_key(&key);
if let Some(url) = maybe_url {
if let Some(items) = self
.get_variable_items(
&key,
url,
&resolved,
&tokens,
&match_result,
)
.await
{
let compiler = Compiler::new(&tokens[..=index], None);
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => (items, None, false),
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let mut label = if let Some(p) = &prefix {
format!("{p}{item}")
} else {
item.clone()
};
if label.ends_with('/') {
label.pop();
}
let kind =
if key.name == last_key_name && !item.ends_with('/') {
Some(lsp::CompletionItemKind::FILE)
} else {
Some(lsp::CompletionItemKind::FOLDER)
};
let mut params = match_result.params.clone();
params.insert(
key.name.clone(),
StringOrVec::from_str(&item, &key),
);
let mut path =
compiler.to_path(&params).unwrap_or_default();
if path.ends_with('/') {
path.pop();
}
let item_specifier = base.join(&path).ok()?;
let full_text = if let Some(suffix) =
item_specifier.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
item_specifier.to_string()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
}));
let command = if key.name == last_key_name
&& !item.ends_with('/')
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&resolved),
]),
})
} else {
None
};
let detail = Some(format!("({})", key.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data_with_match(
registry,
&resolved,
&tokens,
&match_result,
&key,
&item,
);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
)
};
completions.insert(
item,
lsp::CompletionItem {
label,
kind,
detail,
sort_text,
filter_text,
text_edit,
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
}
}
None => (),
}
break;
}
i -= 1;
// If we have fallen though to the first token, and we still
// didn't get a match
if i == 0 {
match &tokens[i] {
// so if the first token is a string literal, we will return
// that as a suggestion
Token::String(s) => {
if s.starts_with(path) {
let label = s.to_string();
let kind = Some(lsp::CompletionItemKind::FOLDER);
let mut url = resolved.as_ref().clone();
url.set_path(s);
let full_text = if let Some(suffix) =
url.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
url.to_string()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
}));
let filter_text = Some(full_text.to_string());
completions.insert(
s.to_string(),
lsp::CompletionItem {
label,
kind,
filter_text,
sort_text: Some("1".to_string()),
text_edit,
preselect: Some(true),
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
},
);
}
}
// if the token though is a key, and the key has a prefix, and
// the path matches the prefix, we will go and get the items
// for that first key and return them.
Token::Key(k) => {
if let Some(prefix) = &k.prefix {
let maybe_url = registry.get_url_for_key(k);
if let Some(url) = maybe_url {
if let Some(items) = self.get_items(url).await {
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => (items, None, false),
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let path = format!("{prefix}{item}");
let kind = Some(lsp::CompletionItemKind::FOLDER);
let mut url = specifier.clone();
url.set_path(s);
let full_text = url.as_str();
let item_specifier = base.join(&path).ok()?;
let full_text = if let Some(suffix) =
item_specifier.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
item_specifier.to_string()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
new_text: full_text.clone(),
}));
let command = if k.name == last_key_name
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&resolved),
]),
})
} else {
None
};
let detail = Some(format!("({})", k.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data(registry, &resolved, k, &path);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
};
completions.insert(
s.to_string(),
item.clone(),
lsp::CompletionItem {
label,
label: item,
kind,
detail,
sort_text,
filter_text,
sort_text: Some("1".to_string()),
text_edit,
preselect: Some(true),
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
// if the token though is a key, and the key has a prefix, and
// the path matches the prefix, we will go and get the items
// for that first key and return them.
Token::Key(k) => {
if let Some(prefix) = &k.prefix {
let maybe_url = registry.get_url_for_key(k);
if let Some(url) = maybe_url {
if let Some(items) = self.get_items(url).await {
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => {
(items, None, false)
}
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let path = format!("{prefix}{item}");
let kind = Some(lsp::CompletionItemKind::FOLDER);
let item_specifier = base.join(&path).ok()?;
let full_text = item_specifier.as_str();
let text_edit = Some(
lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
}),
);
let command = if k.name == last_key_name
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&specifier),
]),
})
} else {
None
};
let detail = Some(format!("({})", k.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data(registry, &specifier, k, &path);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
};
completions.insert(
item.clone(),
lsp::CompletionItem {
label: item,
kind,
detail,
sort_text,
filter_text,
text_edit,
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
}
}
}
}
break;
}
}
}
// If we return None, other sources of completions will be looked for
// but if we did at least match part of a registry, we should send an
// empty vector so that no-completions will be sent back to the client
return if completions.is_empty() && !did_match {
None
} else {
Some(lsp::CompletionList {
items: completions.into_values().collect(),
is_incomplete,
})
};
break;
}
}
}
self.get_origin_completions(current_specifier, range)
// If we return None, other sources of completions will be looked for
// but if we did at least match part of a registry, we should send an
// empty vector so that no-completions will be sent back to the client
if completions.is_empty() && !did_match {
None
} else {
Some(lsp::CompletionList {
items: completions.into_values().collect(),
is_incomplete,
})
}
}
pub async fn get_documentation(
@ -1316,9 +1301,7 @@ mod tests {
character: 21,
},
};
let completions = module_registry
.get_completions("h", 1, &range, |_| false)
.await;
let completions = module_registry.get_origin_completions("h", &range);
assert!(completions.is_some());
let completions = completions.unwrap().items;
assert_eq!(completions.len(), 1);
@ -1340,9 +1323,8 @@ mod tests {
character: 36,
},
};
let completions = module_registry
.get_completions("http://localhost", 16, &range, |_| false)
.await;
let completions =
module_registry.get_origin_completions("http://localhost", &range);
assert!(completions.is_some());
let completions = completions.unwrap().items;
assert_eq!(completions.len(), 1);
@ -1377,7 +1359,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545", 21, &range, |_| false)
.get_completions("http://localhost:4545", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1393,7 +1375,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/", 22, &range, |_| false)
.get_completions("http://localhost:4545/", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1409,7 +1391,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/", 24, &range, |_| false)
.get_completions("http://localhost:4545/x/", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap();
@ -1434,7 +1416,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/a", 25, &range, |_| false)
.get_completions("http://localhost:4545/x/a", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap();
@ -1470,7 +1452,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/a@", 26, &range, |_| false)
.get_completions("http://localhost:4545/x/a@", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1493,7 +1475,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/a@v1.", 29, &range, |_| false)
.get_completions("http://localhost:4545/x/a@v1.", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1516,9 +1498,12 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/a@v1.0.0/", 33, &range, |_| {
false
})
.get_completions(
"http://localhost:4545/x/a@v1.0.0/",
&range,
None,
|_| false,
)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1541,9 +1526,12 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/x/a@v1.0.0/b", 34, &range, |_| {
false
})
.get_completions(
"http://localhost:4545/x/a@v1.0.0/b",
&range,
None,
|_| false,
)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1565,8 +1553,8 @@ mod tests {
let completions = module_registry
.get_completions(
"http://localhost:4545/x/a@v1.0.0/b/",
35,
&range,
None,
|_| false,
)
.await;
@ -1602,7 +1590,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/", 22, &range, |_| false)
.get_completions("http://localhost:4545/", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1631,12 +1619,16 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/cde@", 26, &range, |_| false)
.get_completions("http://localhost:4545/cde@", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
assert_eq!(completions.len(), 2);
for completion in completions {
if let Some(filter_text) = completion.filter_text {
if !"http://localhost:4545/cde@".contains(&filter_text) {
continue;
}
}
assert!(completion.text_edit.is_some());
if let lsp::CompletionTextEdit::Edit(edit) = completion.text_edit.unwrap()
{
@ -1674,7 +1666,7 @@ mod tests {
},
};
let completions = module_registry
.get_completions("http://localhost:4545/", 22, &range, |_| false)
.get_completions("http://localhost:4545/", &range, None, |_| false)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
@ -1693,6 +1685,48 @@ mod tests {
}
}
#[tokio::test]
async fn test_registry_completions_import_map() {
let _g = test_util::http_server();
let temp_dir = TempDir::new();
let location = temp_dir.path().join("registries").to_path_buf();
let mut module_registry = ModuleRegistry::new(
location,
Arc::new(HttpClientProvider::new(None, None)),
);
module_registry.enable("http://localhost:4545/").await;
let range = lsp::Range {
start: lsp::Position {
line: 0,
character: 20,
},
end: lsp::Position {
line: 0,
character: 33,
},
};
let completions = module_registry
.get_completions(
"localhost4545/",
&range,
Some(&ModuleSpecifier::parse("http://localhost:4545/").unwrap()),
|_| false,
)
.await;
assert!(completions.is_some());
let completions = completions.unwrap().items;
assert_eq!(completions.len(), 3);
for completion in completions {
assert!(completion.text_edit.is_some());
if let lsp::CompletionTextEdit::Edit(edit) = completion.text_edit.unwrap()
{
assert_eq!(edit.new_text, format!("localhost4545{}", completion.label));
} else {
unreachable!("unexpected text edit");
}
}
}
#[test]
fn test_parse_replacement_variables() {
let actual = parse_replacement_variables(

View file

@ -16,10 +16,10 @@ use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::SloppyImportsResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@ -28,15 +28,12 @@ use deno_ast::MediaType;
use deno_cache_dir::HttpCache;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::source::Resolver;
use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolution;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path;
@ -45,6 +42,10 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolution;
use node_resolver::NodeResolutionMode;
use node_resolver::NpmResolver;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
@ -83,7 +84,6 @@ impl Default for LspScopeResolver {
impl LspScopeResolver {
async fn from_config_data(
config_data: Option<&Arc<ConfigData>>,
config: &Config,
cache: &LspCache,
http_client_provider: Option<&Arc<HttpClientProvider>>,
) -> Self {
@ -104,18 +104,16 @@ impl LspScopeResolver {
node_resolver.as_ref(),
);
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
cache.for_specifier(config_data.map(|d| &d.scope)),
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.map(|d| d.as_ref()),
config,
)));
let redirect_resolver = Some(Arc::new(RedirectResolver::new(
cache.for_specifier(config_data.map(|d| &d.scope)),
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()),
)));
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver();
let graph_imports = config_data
.and_then(|d| d.config_file.as_ref())
.and_then(|cf| cf.to_maybe_imports().ok())
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
.map(|imports| {
Arc::new(
imports
@ -185,7 +183,6 @@ impl LspResolver {
Arc::new(
LspScopeResolver::from_config_data(
Some(config_data),
config,
cache,
http_client_provider,
)
@ -195,13 +192,8 @@ impl LspResolver {
}
Self {
unscoped: Arc::new(
LspScopeResolver::from_config_data(
None,
config,
cache,
http_client_provider,
)
.await,
LspScopeResolver::from_config_data(None, cache, http_client_provider)
.await,
),
by_scope,
}
@ -343,11 +335,29 @@ impl LspResolver {
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
let resolver = self.get_scope_resolver(Some(specifier));
if let Some(npm_resolver) = &resolver.npm_resolver {
return npm_resolver.in_npm_package(specifier);
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules
// folder for the LSP because it's pretty complicated to deal with multiple scopes
specifier.scheme() == "file"
&& specifier
.path()
.to_ascii_lowercase()
.contains("/node_modules/")
}
false
let global_npm_resolver = self
.get_scope_resolver(Some(specifier))
.npm_resolver
.as_ref()
.and_then(|npm_resolver| npm_resolver.as_managed())
.filter(|r| r.root_node_modules_path().is_none());
if let Some(npm_resolver) = &global_npm_resolver {
if npm_resolver.in_npm_package(specifier) {
return true;
}
}
has_node_modules_dir(specifier)
}
pub fn node_media_type(
@ -365,7 +375,7 @@ impl LspResolver {
pub fn get_closest_package_json(
&self,
referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, AnyError> {
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
return Ok(None);
@ -410,9 +420,14 @@ impl LspResolver {
};
self
.by_scope
.iter()
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))
.map(|(_, r)| r.as_ref())
.values()
.rfind(|r| {
r.config_data
.as_ref()
.map(|d| d.scope_contains_specifier(file_referrer))
.unwrap_or(false)
})
.map(|r| r.as_ref())
.unwrap_or(self.unscoped.as_ref())
}
}
@ -422,20 +437,17 @@ async fn create_npm_resolver(
cache: &LspCache,
http_client_provider: &Arc<HttpClientProvider>,
) -> Option<Arc<dyn CliNpmResolver>> {
let mut byonm_dir = None;
if let Some(config_data) = config_data {
if config_data.byonm {
byonm_dir = Some(config_data.node_modules_dir.clone().or_else(|| {
specifier_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})?)
}
}
let options = if let Some(byonm_dir) = byonm_dir {
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: Arc::new(deno_fs::RealFs),
root_node_modules_dir: byonm_dir,
root_node_modules_dir: config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| {
specifier_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})
}),
})
} else {
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
@ -469,6 +481,7 @@ async fn create_npm_resolver(
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc),
npm_system_info: NpmSystemInfo::default(),
lifecycle_scripts: Default::default(),
})
};
Some(create_cli_npm_resolver_for_lsp(options).await)
@ -477,14 +490,21 @@ async fn create_npm_resolver(
fn create_node_resolver(
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
) -> Option<Arc<CliNodeResolver>> {
use once_cell::sync::Lazy;
// it's not ideal to share this across all scopes and to
// never clear it, but it's fine for the time being
static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> =
Lazy::new(Default::default);
let npm_resolver = npm_resolver?;
let fs = Arc::new(deno_fs::RealFs);
let node_resolver_inner = Arc::new(NodeResolver::new(
fs.clone(),
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
npm_resolver.clone().into_npm_resolver(),
));
Some(Arc::new(CliNodeResolver::new(
None,
CJS_RESOLUTIONS.clone(),
fs,
node_resolver_inner,
npm_resolver.clone(),
@ -496,35 +516,30 @@ fn create_graph_resolver(
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
node_resolver: Option<&Arc<CliNodeResolver>>,
) -> Arc<CliGraphResolver> {
let config_file = config_data.and_then(|d| d.config_file.as_deref());
let unstable_sloppy_imports =
config_file.is_some_and(|cf| cf.has_unstable("sloppy-imports"));
let workspace = config_data.map(|d| &d.member_dir.workspace);
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
node_resolver: node_resolver.cloned(),
npm_resolver: npm_resolver.cloned(),
workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
config_data.and_then(|d| d.import_map.as_ref().map(|i| (**i).clone())),
config_data
.and_then(|d| d.package_json.clone())
.into_iter()
.collect(),
if config_data.map(|d| d.byonm).unwrap_or(false) {
PackageJsonDepResolution::Disabled
} else {
// todo(dsherret): this should also be disabled for when using
// auto-install with a node_modules directory
PackageJsonDepResolution::Enabled
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else(
|| {
Arc::new(WorkspaceResolver::new_raw(
// this is fine because this is only used before initialization
Arc::new(ModuleSpecifier::parse("file:///").unwrap()),
None,
Vec::new(),
Vec::new(),
PackageJsonDepResolution::Disabled,
))
},
)),
maybe_jsx_import_source_config: config_file
.and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: config_file
.map(|cf| cf.has_unstable("bare-node-builtins"))
.unwrap_or(false),
sloppy_imports_resolver: unstable_sloppy_imports.then(|| {
SloppyImportsResolver::new_without_stat_cache(Arc::new(deno_fs::RealFs))
),
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
workspace.to_maybe_jsx_import_source_config().ok().flatten()
}),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: workspace
.is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")),
sloppy_imports_resolver: config_data
.and_then(|d| d.sloppy_imports_resolver.clone()),
}))
}

View file

@ -147,7 +147,7 @@ fn visit_call_expr(
let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() else {
continue;
};
let ast::PropName::Ident(ast::Ident { sym, .. }) =
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
else {
continue;
@ -187,7 +187,7 @@ fn visit_call_expr(
};
match prop.as_ref() {
ast::Prop::KeyValue(key_value_prop) => {
let ast::PropName::Ident(ast::Ident { sym, .. }) =
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
else {
continue;
@ -206,7 +206,7 @@ fn visit_call_expr(
}
}
ast::Prop::Method(method_prop) => {
let ast::PropName::Ident(ast::Ident { sym, .. }) =
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&method_prop.key
else {
continue;
@ -472,7 +472,7 @@ impl Visit for TestCollector {
collector: &mut TestCollector,
node: &ast::CallExpr,
range: &deno_ast::SourceRange,
ns_prop_ident: &ast::Ident,
ns_prop_ident: &ast::IdentName,
member_expr: &ast::MemberExpr,
) {
if ns_prop_ident.sym == "test" {

View file

@ -212,13 +212,15 @@ impl TestRun {
) -> Result<(), AnyError> {
let args = self.get_args();
lsp_log!("Executing test run with arguments: {}", args.join(" "));
let flags = flags_from_vec(args.into_iter().map(From::from).collect())?;
let factory = CliFactory::from_flags(flags)?;
let flags =
Arc::new(flags_from_vec(args.into_iter().map(From::from).collect())?);
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
Permissions::from_options(&factory.cli_options().permissions_options()?)?;
Permissions::from_options(&cli_options.permissions_options()?)?;
let main_graph_container = factory.main_module_graph_container().await?;
test::check_specifiers(
factory.file_fetcher()?,
@ -231,19 +233,18 @@ impl TestRun {
)
.await?;
let (concurrent_jobs, fail_fast) = if let DenoSubcommand::Test(test_flags) =
factory.cli_options().sub_command()
{
(
test_flags
.concurrent_jobs
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
.into(),
test_flags.fail_fast,
)
} else {
unreachable!("Should always be Test subcommand.");
};
let (concurrent_jobs, fail_fast) =
if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() {
(
test_flags
.concurrent_jobs
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
.into(),
test_flags.fail_fast,
)
} else {
unreachable!("Should always be Test subcommand.");
};
// TODO(mmastrac): Temporarily limit concurrency in windows testing to avoid named pipe issue:
// *** Unexpected server pipe failure '"\\\\.\\pipe\\deno_pipe_e30f45c9df61b1e4.1198.222\\0"': 3

View file

@ -215,6 +215,8 @@ pub enum SemicolonPreference {
Remove,
}
// Allow due to false positive https://github.com/rust-lang/rust-clippy/issues/13170
#[allow(clippy::needless_borrows_for_generic_args)]
fn normalize_diagnostic(
diagnostic: &mut crate::tsc::Diagnostic,
specifier_map: &TscSpecifierMap,
@ -2041,12 +2043,10 @@ impl DocumentSpan {
let target_asset_or_doc =
language_server.get_maybe_asset_or_document(&target_specifier)?;
let target_line_index = target_asset_or_doc.line_index();
let file_referrer = language_server
.documents
.get_file_referrer(&target_specifier);
let file_referrer = target_asset_or_doc.file_referrer();
let target_uri = language_server
.url_map
.normalize_specifier(&target_specifier, file_referrer.as_deref())
.normalize_specifier(&target_specifier, file_referrer)
.ok()?;
let (target_range, target_selection_range) =
if let Some(context_span) = &self.context_span {
@ -2090,10 +2090,10 @@ impl DocumentSpan {
language_server.get_maybe_asset_or_document(&specifier)?;
let line_index = asset_or_doc.line_index();
let range = self.text_span.to_range(line_index);
let file_referrer = language_server.documents.get_file_referrer(&specifier);
let file_referrer = asset_or_doc.file_referrer();
let mut target = language_server
.url_map
.normalize_specifier(&specifier, file_referrer.as_deref())
.normalize_specifier(&specifier, file_referrer)
.ok()?
.into_url();
target.set_fragment(Some(&format!(
@ -2151,10 +2151,10 @@ impl NavigateToItem {
let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?;
let line_index = asset_or_doc.line_index();
let file_referrer = language_server.documents.get_file_referrer(&specifier);
let file_referrer = asset_or_doc.file_referrer();
let uri = language_server
.url_map
.normalize_specifier(&specifier, file_referrer.as_deref())
.normalize_specifier(&specifier, file_referrer)
.ok()?;
let range = self.text_span.to_range(line_index);
let location = lsp::Location {
@ -2944,7 +2944,7 @@ pub fn file_text_changes_to_workspace_edit(
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RefactorEditInfo {
edits: Vec<FileTextChanges>,
pub edits: Vec<FileTextChanges>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rename_location: Option<u32>,
}
@ -4230,14 +4230,10 @@ impl State {
}
fn get_document(&self, specifier: &ModuleSpecifier) -> Option<Arc<Document>> {
if let Some(scope) = &self.last_scope {
self.state_snapshot.documents.get_or_load(specifier, scope)
} else {
self
.state_snapshot
.documents
.get_or_load(specifier, &ModuleSpecifier::parse("file:///").unwrap())
}
self
.state_snapshot
.documents
.get_or_load(specifier, self.last_scope.as_ref())
}
fn get_asset_or_document(
@ -4347,9 +4343,17 @@ fn op_release(
fn op_resolve(
state: &mut OpState,
#[string] base: String,
is_base_cjs: bool,
#[serde] specifiers: Vec<String>,
) -> Result<Vec<Option<(String, String)>>, AnyError> {
op_resolve_inner(state, ResolveArgs { base, specifiers })
op_resolve_inner(
state,
ResolveArgs {
base,
is_base_cjs,
specifiers,
},
)
}
struct TscRequestArray {
@ -4551,7 +4555,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
specifier,
doc.file_referrer(),
)?;
let types_doc = documents.get_or_load(&types, specifier)?;
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
Some(types_doc.specifier().clone())
})();
// If there is a types dep, use that as the root instead. But if the doc
@ -4897,7 +4901,7 @@ impl UserPreferences {
config: &config::Config,
specifier: &ModuleSpecifier,
) -> Self {
let fmt_options = config.tree.fmt_options_for_specifier(specifier);
let fmt_options = config.tree.fmt_config_for_specifier(specifier);
let fmt_config = &fmt_options.options;
let base_preferences = Self {
allow_incomplete_completions: Some(true),
@ -5004,8 +5008,8 @@ impl UserPreferences {
// Only use workspace settings for quote style if there's no `deno.json`.
quote_preference: if config
.tree
.config_file_for_specifier(specifier)
.is_some()
.workspace_dir_for_specifier(specifier)
.is_some_and(|ctx| ctx.maybe_deno_json().is_some())
{
base_preferences.quote_preference
} else {
@ -5392,20 +5396,20 @@ mod tests {
async fn setup(
ts_config: Value,
sources: &[(&str, &str, i32, LanguageId)],
) -> (TsServer, Arc<StateSnapshot>, LspCache) {
) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) {
let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.uri()));
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
let mut config = Config::default();
config
.tree
.inject_config_file(
deno_config::ConfigFile::new(
deno_config::deno_json::ConfigFile::new(
&json!({
"compilerOptions": ts_config,
})
.to_string(),
resolve_url("file:///deno.json").unwrap(),
&deno_config::ConfigParseOptions::default(),
temp_dir.uri().join("deno.json").unwrap(),
&Default::default(),
)
.unwrap(),
)
@ -5414,16 +5418,9 @@ mod tests {
Arc::new(LspResolver::from_config(&config, &cache, None).await);
let mut documents = Documents::default();
documents.update_config(&config, &resolver, &cache, &Default::default());
for (specifier, source, version, language_id) in sources {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
documents.open(
specifier.clone(),
*version,
*language_id,
(*source).into(),
None,
);
for (relative_specifier, source, version, language_id) in sources {
let specifier = temp_dir.uri().join(relative_specifier).unwrap();
documents.open(specifier, *version, *language_id, (*source).into(), None);
}
let snapshot = Arc::new(StateSnapshot {
project_version: 0,
@ -5448,7 +5445,7 @@ mod tests {
.collect(),
),
);
(ts_server, snapshot, cache)
(temp_dir, ts_server, snapshot, cache)
}
fn setup_op_state(state_snapshot: Arc<StateSnapshot>) -> OpState {
@ -5477,7 +5474,7 @@ mod tests {
#[tokio::test]
async fn test_get_diagnostics() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5485,22 +5482,22 @@ mod tests {
"lib": [],
}),
&[(
"file:///a.ts",
"a.ts",
r#"console.log("hello deno");"#,
1,
LanguageId::TypeScript,
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": [
specifier.clone(): [
{
"start": {
"line": 0,
@ -5510,7 +5507,7 @@ mod tests {
"line": 0,
"character": 7
},
"fileName": "file:///a.ts",
"fileName": specifier,
"messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the \'lib\' compiler option to include 'dom'.",
"sourceLine": "console.log(\"hello deno\");",
"category": 1,
@ -5523,7 +5520,7 @@ mod tests {
#[tokio::test]
async fn test_get_diagnostics_lib() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5532,24 +5529,24 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"console.log(document.location);"#,
1,
LanguageId::TypeScript,
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] }));
assert_eq!(json!(diagnostics), json!({ specifier: [] }));
}
#[tokio::test]
async fn test_module_resolution() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5557,7 +5554,7 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"
import { B } from "https://deno.land/x/b/mod.ts";
@ -5570,17 +5567,17 @@ mod tests {
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] }));
assert_eq!(json!(diagnostics), json!({ specifier: [] }));
}
#[tokio::test]
async fn test_bad_module_specifiers() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5588,7 +5585,7 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"
import { A } from ".";
"#,
@ -5597,15 +5594,15 @@ mod tests {
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": [{
specifier.clone(): [{
"start": {
"line": 1,
"character": 8
@ -5614,7 +5611,7 @@ mod tests {
"line": 1,
"character": 30
},
"fileName": "file:///a.ts",
"fileName": specifier,
"messageText": "\'A\' is declared but its value is never read.",
"sourceLine": " import { A } from \".\";",
"category": 2,
@ -5626,7 +5623,7 @@ mod tests {
#[tokio::test]
async fn test_remote_modules() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5634,7 +5631,7 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"
import { B } from "https://deno.land/x/b/mod.ts";
@ -5647,17 +5644,17 @@ mod tests {
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] }));
assert_eq!(json!(diagnostics), json!({ specifier: [] }));
}
#[tokio::test]
async fn test_partial_modules() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5665,7 +5662,7 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"
import {
Application,
@ -5681,15 +5678,15 @@ mod tests {
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": [{
specifier.clone(): [{
"start": {
"line": 1,
"character": 8
@ -5698,7 +5695,7 @@ mod tests {
"line": 6,
"character": 55,
},
"fileName": "file:///a.ts",
"fileName": specifier.clone(),
"messageText": "All imports in import declaration are unused.",
"sourceLine": " import {",
"category": 2,
@ -5712,7 +5709,7 @@ mod tests {
"line": 8,
"character": 29
},
"fileName": "file:///a.ts",
"fileName": specifier,
"messageText": "Expression expected.",
"sourceLine": " import * as test from",
"category": 1,
@ -5724,7 +5721,7 @@ mod tests {
#[tokio::test]
async fn test_no_debug_failure() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5732,22 +5729,22 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"const url = new URL("b.js", import."#,
1,
LanguageId::TypeScript,
)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot, vec![specifier], Default::default())
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": [
specifier.clone(): [
{
"start": {
"line": 0,
@ -5757,7 +5754,7 @@ mod tests {
"line": 0,
"character": 35
},
"fileName": "file:///a.ts",
"fileName": specifier,
"messageText": "Identifier expected.",
"sourceLine": "const url = new URL(\"b.js\", import.",
"category": 1,
@ -5770,7 +5767,7 @@ mod tests {
#[tokio::test]
async fn test_request_assets() {
let (ts_server, snapshot, _) = setup(json!({}), &[]).await;
let (_, ts_server, snapshot, _) = setup(json!({}), &[]).await;
let assets = get_isolate_assets(&ts_server, snapshot).await;
let mut asset_names = assets
.iter()
@ -5802,7 +5799,7 @@ mod tests {
#[tokio::test]
async fn test_modify_sources() {
let (ts_server, snapshot, cache) = setup(
let (temp_dir, ts_server, snapshot, cache) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -5810,7 +5807,7 @@ mod tests {
"noEmit": true,
}),
&[(
"file:///a.ts",
"a.ts",
r#"
import * as a from "https://deno.land/x/example/a.ts";
if (a.a === "b") {
@ -5832,15 +5829,19 @@ mod tests {
b"export const b = \"b\";\n",
)
.unwrap();
let specifier = resolve_url("file:///a.ts").unwrap();
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot.clone(), vec![specifier], Default::default())
.get_diagnostics(
snapshot.clone(),
vec![specifier.clone()],
Default::default(),
)
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": [
specifier.clone(): [
{
"start": {
"line": 2,
@ -5850,7 +5851,7 @@ mod tests {
"line": 2,
"character": 17
},
"fileName": "file:///a.ts",
"fileName": specifier,
"messageText": "Property \'a\' does not exist on type \'typeof import(\"https://deno.land/x/example/a\")\'.",
"sourceLine": " if (a.a === \"b\") {",
"code": 2339,
@ -5878,15 +5879,19 @@ mod tests {
[(&specifier_dep, ChangeKind::Opened)],
None,
);
let specifier = resolve_url("file:///a.ts").unwrap();
let specifier = temp_dir.uri().join("a.ts").unwrap();
let diagnostics = ts_server
.get_diagnostics(snapshot.clone(), vec![specifier], Default::default())
.get_diagnostics(
snapshot.clone(),
vec![specifier.clone()],
Default::default(),
)
.await
.unwrap();
assert_eq!(
json!(diagnostics),
json!({
"file:///a.ts": []
specifier: []
})
);
}
@ -5936,17 +5941,17 @@ mod tests {
character: 16,
})
.unwrap();
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"],
"noEmit": true,
}),
&[("file:///a.ts", fixture, 1, LanguageId::TypeScript)],
&[("a.ts", fixture, 1, LanguageId::TypeScript)],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let info = ts_server
.get_completions(
snapshot.clone(),
@ -5961,7 +5966,7 @@ mod tests {
trigger_kind: None,
},
Default::default(),
Some(ModuleSpecifier::parse("file:///").unwrap()),
Some(temp_dir.uri()),
)
.await
.unwrap();
@ -5978,7 +5983,7 @@ mod tests {
preferences: None,
data: None,
},
Some(ModuleSpecifier::parse("file:///").unwrap()),
Some(temp_dir.uri()),
)
.await
.unwrap()
@ -6087,7 +6092,7 @@ mod tests {
character: 33,
})
.unwrap();
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -6095,12 +6100,12 @@ mod tests {
"noEmit": true,
}),
&[
("file:///a.ts", fixture_a, 1, LanguageId::TypeScript),
("file:///b.ts", fixture_b, 1, LanguageId::TypeScript),
("a.ts", fixture_a, 1, LanguageId::TypeScript),
("b.ts", fixture_b, 1, LanguageId::TypeScript),
],
)
.await;
let specifier = resolve_url("file:///a.ts").expect("could not resolve url");
let specifier = temp_dir.uri().join("a.ts").unwrap();
let fmt_options_config = FmtOptionsConfig {
semi_colons: Some(false),
single_quote: Some(true),
@ -6121,7 +6126,7 @@ mod tests {
..Default::default()
},
FormatCodeSettings::from(&fmt_options_config),
Some(ModuleSpecifier::parse("file:///").unwrap()),
Some(temp_dir.uri()),
)
.await
.unwrap();
@ -6147,7 +6152,7 @@ mod tests {
}),
data: entry.data.clone(),
},
Some(ModuleSpecifier::parse("file:///").unwrap()),
Some(temp_dir.uri()),
)
.await
.unwrap()
@ -6196,7 +6201,7 @@ mod tests {
#[tokio::test]
async fn test_get_edits_for_file_rename() {
let (ts_server, snapshot, _) = setup(
let (temp_dir, ts_server, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
@ -6204,21 +6209,16 @@ mod tests {
"noEmit": true,
}),
&[
(
"file:///a.ts",
r#"import "./b.ts";"#,
1,
LanguageId::TypeScript,
),
("file:///b.ts", r#""#, 1, LanguageId::TypeScript),
("a.ts", r#"import "./b.ts";"#, 1, LanguageId::TypeScript),
("b.ts", r#""#, 1, LanguageId::TypeScript),
],
)
.await;
let changes = ts_server
.get_edits_for_file_rename(
snapshot,
resolve_url("file:///b.ts").unwrap(),
resolve_url("file:///🦕.ts").unwrap(),
temp_dir.uri().join("b.ts").unwrap(),
temp_dir.uri().join("🦕.ts").unwrap(),
FormatCodeSettings::default(),
UserPreferences::default(),
)
@ -6227,7 +6227,7 @@ mod tests {
assert_eq!(
changes,
vec![FileTextChanges {
file_name: "file:///a.ts".to_string(),
file_name: temp_dir.uri().join("a.ts").unwrap().to_string(),
text_changes: vec![TextChange {
span: TextSpan {
start: 8,
@ -6272,21 +6272,22 @@ mod tests {
#[tokio::test]
async fn resolve_unknown_dependency() {
let (_, snapshot, _) = setup(
let (temp_dir, _, snapshot, _) = setup(
json!({
"target": "esnext",
"module": "esnext",
"lib": ["deno.ns", "deno.window"],
"noEmit": true,
}),
&[("file:///a.ts", "", 1, LanguageId::TypeScript)],
&[("a.ts", "", 1, LanguageId::TypeScript)],
)
.await;
let mut state = setup_op_state(snapshot);
let resolved = op_resolve_inner(
&mut state,
ResolveArgs {
base: "file:///a.ts".to_string(),
base: temp_dir.uri().join("a.ts").unwrap().to_string(),
is_base_cjs: false,
specifiers: vec!["./b.ts".to_string()],
},
)
@ -6294,7 +6295,7 @@ mod tests {
assert_eq!(
resolved,
vec![Some((
"file:///b.ts".to_string(),
temp_dir.uri().join("b.ts").unwrap().to_string(),
MediaType::TypeScript.as_ts_extension().to_string()
))]
);

View file

@ -21,6 +21,7 @@ mod npm;
mod ops;
mod resolver;
mod standalone;
mod task_runner;
mod tools;
mod tsc;
mod util;
@ -31,11 +32,13 @@ use crate::args::flags_from_vec;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::DENO_FUTURE;
use crate::cache::DenoDir;
use crate::graph_container::ModuleGraphContainer;
use crate::util::display;
use crate::util::v8::get_v8_flags_from_env;
use crate::util::v8::init_v8_flags;
use args::TaskFlags;
use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
@ -49,9 +52,12 @@ use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_terminal::colors;
use factory::CliFactory;
use standalone::MODULE_NOT_FOUND;
use std::env;
use std::future::Future;
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
trait SubcommandOutput {
@ -89,10 +95,13 @@ fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
)
}
async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
let handle = match flags.subcommand.clone() {
DenoSubcommand::Add(add_flags) => spawn_subcommand(async {
tools::registry::add(flags, add_flags).await
tools::registry::add(flags, add_flags, tools::registry::AddCommandName::Add).await
}),
DenoSubcommand::Remove(remove_flags) => spawn_subcommand(async {
tools::registry::remove(flags, remove_flags).await
}),
DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async {
if bench_flags.watch.is_some() {
@ -111,7 +120,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
tools::run::eval_command(flags, eval_flags).await
}),
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
let emitter = factory.emitter()?;
let main_graph_container =
factory.main_module_graph_container().await?;
@ -121,13 +130,21 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
emitter.cache_module_emits(&main_graph_container.graph()).await
}),
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
let main_graph_container =
factory.main_module_graph_container().await?;
main_graph_container
.load_and_type_check_files(&check_flags.files)
.await
}),
DenoSubcommand::Clean => spawn_subcommand(async move {
let deno_dir = DenoDir::new(None)?;
if deno_dir.root.exists() {
std::fs::remove_dir_all(&deno_dir.root)?;
log::info!("{} {}", colors::green("Removed"), deno_dir.root.display());
}
Ok::<(), std::io::Error>(())
}),
DenoSubcommand::Compile(compile_flags) => spawn_subcommand(async {
tools::compile::compile(flags, compile_flags).await
}),
@ -152,6 +169,9 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
DenoSubcommand::Install(install_flags) => spawn_subcommand(async {
tools::installer::install_command(flags, install_flags).await
}),
DenoSubcommand::JSONReference(json_reference) => spawn_subcommand(async move {
display::write_to_stdout_ignore_sigpipe(&deno_core::serde_json::to_vec_pretty(&json_reference.json).unwrap())
}),
DenoSubcommand::Jupyter(jupyter_flags) => spawn_subcommand(async {
tools::jupyter::kernel(flags, jupyter_flags).await
}),
@ -175,16 +195,57 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
}
DenoSubcommand::Run(run_flags) => spawn_subcommand(async move {
if run_flags.is_stdin() {
tools::run::run_from_stdin(flags).await
tools::run::run_from_stdin(flags.clone()).await
} else {
tools::run::run_script(WorkerExecutionMode::Run, flags, run_flags.watch).await
let result = tools::run::run_script(WorkerExecutionMode::Run, flags.clone(), run_flags.watch).await;
match result {
Ok(v) => Ok(v),
Err(script_err) => {
if script_err.to_string().starts_with(MODULE_NOT_FOUND) {
if run_flags.bare {
let mut cmd = args::clap_root();
cmd.build();
let command_names = cmd.get_subcommands().map(|command| command.get_name()).collect::<Vec<_>>();
let suggestions = args::did_you_mean(&run_flags.script, command_names);
if !suggestions.is_empty() {
let mut error = clap::error::Error::<clap::error::DefaultFormatter>::new(clap::error::ErrorKind::InvalidSubcommand).with_cmd(&cmd);
error.insert(
clap::error::ContextKind::SuggestedSubcommand,
clap::error::ContextValue::Strings(suggestions),
);
Err(error.into())
} else {
Err(script_err)
}
} else {
let mut new_flags = flags.deref().clone();
let task_flags = TaskFlags {
cwd: None,
task: Some(run_flags.script.clone()),
};
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone(), true).await;
match result {
Ok(v) => Ok(v),
Err(_) => {
// Return script error for backwards compatibility.
Err(script_err)
}
}
}
} else {
Err(script_err)
}
}
}
}
}),
DenoSubcommand::Serve(serve_flags) => spawn_subcommand(async move {
tools::run::run_script(WorkerExecutionMode::Serve, flags, serve_flags.watch).await
}),
DenoSubcommand::Task(task_flags) => spawn_subcommand(async {
tools::task::execute_script(flags, task_flags).await
tools::task::execute_script(flags, task_flags, false).await
}),
DenoSubcommand::Test(test_flags) => {
spawn_subcommand(async {
@ -230,10 +291,12 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
tools::vendor::vendor(flags, vendor_flags).await
}),
// TODO:
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
tools::registry::publish(flags, publish_flags).await
}),
DenoSubcommand::Help(help_flags) => spawn_subcommand(async move {
display::write_to_stdout_ignore_sigpipe(help_flags.help.ansi().to_string().as_bytes())
}),
};
handle.await?
@ -329,7 +392,7 @@ pub fn main() {
// initialize the V8 platform on a parent thread of all threads that will spawn
// V8 isolates.
let flags = resolve_flags_and_init(args)?;
run_subcommand(flags).await
run_subcommand(Arc::new(flags)).await
};
match create_and_run_current_thread_with_maybe_metrics(future) {
@ -347,7 +410,8 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayHelp
|| err.kind() == clap::error::ErrorKind::DisplayVersion =>
{
err.print().unwrap();
// Ignore results to avoid BrokenPipe errors.
let _ = err.print();
std::process::exit(0);
}
Err(err) => exit_for_error(AnyError::from(err)),
@ -384,16 +448,20 @@ fn resolve_flags_and_init(
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
vec!["--no-harmony-import-assertions".to_string()]
} else {
// If we're still in v1.X version we want to support import assertions.
// V8 12.6 unshipped the support by default, so force it by passing a
// flag.
vec!["--harmony-import-assertions".to_string()]
vec![
// If we're still in v1.X version we want to support import assertions.
// V8 12.6 unshipped the support by default, so force it by passing a
// flag.
"--harmony-import-assertions".to_string(),
// Verify with DENO_FUTURE for now.
"--no-maglev".to_string(),
]
}
}
};
init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env());
deno_core::JsRuntime::init_platform(None);
deno_core::JsRuntime::init_platform(None, !*DENO_FUTURE);
util::logger::init(flags.log_level);
Ok(flags)

View file

@ -18,6 +18,7 @@ mod js;
mod node;
mod npm;
mod resolver;
mod task_runner;
mod util;
mod version;
mod worker;
@ -31,6 +32,7 @@ pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use deno_terminal::colors;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::env::current_exe;
@ -70,15 +72,23 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
}
}
fn load_env_vars(env_vars: &HashMap<String, String>) {
env_vars.iter().for_each(|env_var| {
if env::var(env_var.0).is_err() {
std::env::set_var(env_var.0, env_var.1);
}
})
}
fn main() {
let args: Vec<_> = env::args_os().collect();
let current_exe_path = current_exe().unwrap();
let standalone =
standalone::extract_standalone(&current_exe_path, Cow::Owned(args));
let standalone = standalone::extract_standalone(Cow::Owned(args));
let future = async move {
match standalone {
Ok(Some(future)) => {
let (metadata, eszip) = future.await?;
util::logger::init(metadata.log_level);
load_env_vars(&metadata.env_vars_from_env_file);
let exit_code = standalone::run(eszip, metadata).await?;
std::process::exit(exit_code);
}

View file

@ -35,7 +35,6 @@ use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code;
use crate::worker::ModuleLoaderAndSourceMapGetter;
use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
@ -55,7 +54,6 @@ use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_core::SourceMapGetter;
use deno_graph::source::ResolutionMode;
use deno_graph::source::Resolver;
use deno_graph::GraphKind;
@ -65,15 +63,17 @@ use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_graph::Resolution;
use deno_runtime::code_cache;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionMode;
pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?;
let cli_options = factory.cli_options()?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
if !npm_resolver.ensure_top_level_package_json_install().await? {
if let Some(lockfile) = factory.maybe_lockfile() {
if let Some(lockfile) = cli_options.maybe_lockfile() {
lockfile.error_if_changed()?;
}
@ -107,7 +107,7 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
graph,
&roots,
false,
factory.cli_options().ts_type_lib_window(),
factory.cli_options()?.ts_type_lib_window(),
deno_runtime::deno_permissions::PermissionsContainer::allow_all(),
)
.await?;
@ -293,8 +293,7 @@ impl CliModuleLoaderFactory {
shared: self.shared.clone(),
})));
ModuleLoaderAndSourceMapGetter {
module_loader: loader.clone(),
source_map_getter: Some(loader),
module_loader: loader,
}
}
}
@ -447,15 +446,14 @@ impl<TGraphContainer: ModuleGraphContainer>
specifier: &str,
referrer: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> {
if let Some(result) = self.shared.node_resolver.resolve_if_in_npm_package(
specifier,
referrer,
NodeResolutionMode::Execution,
) {
return match result? {
Some(res) => Ok(res.into_url()),
None => Err(generic_error("not found")),
};
if self.shared.node_resolver.in_npm_package(referrer) {
return Ok(
self
.shared
.node_resolver
.resolve(specifier, referrer, NodeResolutionMode::Execution)?
.into_url(),
);
}
let graph = self.graph_container.graph();
@ -517,7 +515,7 @@ impl<TGraphContainer: ModuleGraphContainer>
.resolve_package_sub_path_from_deno_module(
&package_folder,
module.nv_reference.sub_path(),
referrer,
Some(referrer),
NodeResolutionMode::Execution,
)
.with_context(|| {
@ -614,7 +612,8 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<CodeOrDeferredEmit<'graph>, AnyError> {
if specifier.scheme() == "node" {
unreachable!(); // Node built-in modules should be handled internally.
// Node built-in modules should be handled internally.
unreachable!("Deno bug. {} was misconfigured internally.", specifier);
}
match graph.get(specifier) {
@ -726,6 +725,19 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
Ok(specifier)
}
fn get_host_defined_options<'s>(
&self,
scope: &mut deno_core::v8::HandleScope<'s>,
name: &str,
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
let name = deno_core::ModuleSpecifier::parse(name).ok()?;
if self.0.shared.node_resolver.in_npm_package(&name) {
Some(create_host_defined_options(scope))
} else {
None
}
}
fn load(
&self,
specifier: &ModuleSpecifier,
@ -828,11 +840,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
}
std::future::ready(()).boxed_local()
}
}
impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
for CliModuleLoader<TGraphContainer>
{
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
let specifier = resolve_url(file_name).ok()?;
match specifier.scheme() {
@ -845,7 +853,7 @@ impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
source_map_from_code(source.code.as_bytes())
}
fn get_source_line(
fn get_source_mapped_source_line(
&self,
file_name: &str,
line_number: usize,

View file

@ -2,7 +2,7 @@
[package]
name = "napi_sym"
version = "0.88.0"
version = "0.94.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -6,10 +6,11 @@ use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
use deno_runtime::deno_node::analyze::CjsAnalysisExports;
use deno_runtime::deno_node::analyze::CjsCodeAnalyzer;
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
use node_resolver::analyze::CjsAnalysisExports;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslator;
use serde::Deserialize;
use serde::Serialize;
@ -17,7 +18,8 @@ use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache;
use crate::util::fs::canonicalize_path_maybe_not_exists;
pub type CliNodeCodeTranslator = NodeCodeTranslator<CliCjsCodeAnalyzer>;
pub type CliNodeCodeTranslator =
NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>;
/// Resolves a specifier that is pointing into a node_modules folder.
///

View file

@ -6,16 +6,23 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_config::package_json::PackageJsonDepValue;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::load_pkg_json;
use deno_runtime::deno_node::DenoPkgJsonFsAdapter;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NpmResolver;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_runtime::deno_node::PackageJson;
use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageJsonLoadError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::load_pkg_json;
use node_resolver::NpmResolver;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
@ -27,7 +34,8 @@ use super::InnerCliNpmResolverRef;
pub struct CliNpmResolverByonmCreateOptions {
pub fs: Arc<dyn FileSystem>,
pub root_node_modules_dir: PathBuf,
// todo(dsherret): investigate removing this
pub root_node_modules_dir: Option<PathBuf>,
}
pub fn create_byonm_npm_resolver(
@ -42,7 +50,16 @@ pub fn create_byonm_npm_resolver(
#[derive(Debug)]
pub struct ByonmCliNpmResolver {
fs: Arc<dyn FileSystem>,
root_node_modules_dir: PathBuf,
root_node_modules_dir: Option<PathBuf>,
}
impl ByonmCliNpmResolver {
fn load_pkg_json(
&self,
path: &Path,
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
}
}
impl ByonmCliNpmResolver {
@ -56,9 +73,7 @@ impl ByonmCliNpmResolver {
let mut current_folder = referrer_path.parent()?;
loop {
let pkg_json_path = current_folder.join("package.json");
if let Ok(Some(pkg_json)) =
load_pkg_json(self.fs.as_ref(), &pkg_json_path)
{
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
if let Some(deps) = &pkg_json.dependencies {
if deps.contains_key(dep_name) {
return Some(pkg_json);
@ -115,9 +130,7 @@ impl ByonmCliNpmResolver {
let mut current_path = file_path.as_path();
while let Some(dir_path) = current_path.parent() {
let package_json_path = dir_path.join("package.json");
if let Some(pkg_json) =
load_pkg_json(self.fs.as_ref(), &package_json_path)?
{
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
if let Some(alias) =
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
@ -129,16 +142,14 @@ impl ByonmCliNpmResolver {
}
// otherwise, fall fallback to the project's package.json
let root_pkg_json_path = self
.root_node_modules_dir
.parent()
.unwrap()
.join("package.json");
if let Some(pkg_json) =
load_pkg_json(self.fs.as_ref(), &root_pkg_json_path)?
{
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref()) {
return Ok((pkg_json, alias));
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
let root_pkg_json_path =
root_node_modules_dir.parent().unwrap().join("package.json");
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
{
return Ok((pkg_json, alias));
}
}
}
@ -154,56 +165,54 @@ impl ByonmCliNpmResolver {
}
impl NpmResolver for ByonmCliNpmResolver {
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: Some(
self.root_node_modules_dir.to_string_lossy().to_string(),
),
})
.unwrap()
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
) -> Result<PathBuf, PackageFolderResolveError> {
fn inner(
fs: &dyn FileSystem,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
let referrer_file = specifier_to_file_path(referrer)?;
let mut current_folder = referrer_file.parent().unwrap();
loop {
let node_modules_folder = if current_folder.ends_with("node_modules") {
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_referrer_file = specifier_to_file_path(referrer).ok();
let maybe_start_folder =
maybe_referrer_file.as_ref().and_then(|f| f.parent());
if let Some(start_folder) = maybe_start_folder {
for current_folder in start_folder.ancestors() {
let node_modules_folder = if current_folder.ends_with("node_modules")
{
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name(&node_modules_folder, name);
if fs.is_dir_sync(&sub_dir) {
return Ok(sub_dir);
}
if let Some(parent) = current_folder.parent() {
current_folder = parent;
} else {
break;
let sub_dir = join_package_name(&node_modules_folder, name);
if fs.is_dir_sync(&sub_dir) {
return Ok(sub_dir);
}
}
}
bail!(
"could not find package '{}' from referrer '{}'.",
name,
referrer
);
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
)
}
let path = inner(&*self.fs, name, referrer)?;
Ok(self.fs.realpath_sync(&path)?)
self.fs.realpath_sync(&path).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err.into_io_error(),
}
.into()
})
}
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
@ -213,7 +222,9 @@ impl NpmResolver for ByonmCliNpmResolver {
.to_ascii_lowercase()
.contains("/node_modules/")
}
}
impl NodeRequireResolver for ByonmCliNpmResolver {
fn ensure_read_permission(
&self,
permissions: &mut dyn NodePermissions,
@ -229,11 +240,34 @@ impl NpmResolver for ByonmCliNpmResolver {
}
}
impl NpmProcessStateProvider for ByonmCliNpmResolver {
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.root_node_modules_dir
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}
impl CliNpmResolver for ByonmCliNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
self
}
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
self
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
Arc::new(Self {
fs: self.fs.clone(),
@ -246,7 +280,7 @@ impl CliNpmResolver for ByonmCliNpmResolver {
}
fn root_node_modules_path(&self) -> Option<&PathBuf> {
Some(&self.root_node_modules_dir)
self.root_node_modules_dir.as_ref()
}
fn resolve_pkg_folder_from_deno_module_req(
@ -273,9 +307,14 @@ impl CliNpmResolver for ByonmCliNpmResolver {
concat!(
"Could not find \"{}\" in a node_modules folder. ",
"Deno expects the node_modules/ directory to be up to date. ",
"Did you forget to run `npm install`?"
"Did you forget to run `{}`?"
),
alias,
if *crate::args::DENO_FUTURE {
"deno install"
} else {
"npm install"
}
);
}

View file

@ -197,14 +197,14 @@ pub fn mixed_case_package_name_encode(name: &str) -> String {
// use base32 encoding because it's reversible and the character set
// only includes the characters within 0-9 and A-Z so it can be lower cased
base32::encode(
base32::Alphabet::RFC4648 { padding: false },
base32::Alphabet::Rfc4648Lower { padding: false },
name.as_bytes(),
)
.to_lowercase()
}
pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
base32::decode(base32::Alphabet::RFC4648 { padding: false }, name)
base32::decode(base32::Alphabet::Rfc4648Lower { padding: false }, name)
.and_then(|b| String::from_utf8(b).ok())
}

View file

@ -1,7 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_npm::npm_rc::RegistryConfig;
use reqwest::header;
use http::header;
// TODO(bartlomieju): support more auth methods besides token and basic auth
pub fn maybe_auth_header_for_npm_registry(

View file

@ -11,12 +11,12 @@ use deno_core::error::AnyError;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_semver::package::PackageNv;
use reqwest::StatusCode;
use reqwest::Url;
use http::StatusCode;
use crate::args::CacheSetting;
use crate::http_util::DownloadError;

View file

@ -21,12 +21,17 @@ use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NpmResolver;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::NpmResolver;
use resolution::AddPkgReqsResult;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::args::PackageJsonInstallDepsProvider;
@ -68,6 +73,7 @@ pub struct CliNpmResolverManagedCreateOptions {
pub npm_system_info: NpmSystemInfo,
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
pub npmrc: Arc<ResolvedNpmRc>,
pub lifecycle_scripts: LifecycleScriptsConfig,
}
pub async fn create_managed_npm_resolver_for_lsp(
@ -96,6 +102,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
options.maybe_node_modules_path,
options.npm_system_info,
snapshot,
options.lifecycle_scripts,
)
})
.await
@ -120,6 +127,7 @@ pub async fn create_managed_npm_resolver(
options.maybe_node_modules_path,
options.npm_system_info,
snapshot,
options.lifecycle_scripts,
))
}
@ -136,6 +144,7 @@ fn create_inner(
node_modules_dir_path: Option<PathBuf>,
npm_system_info: NpmSystemInfo,
snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Arc<dyn CliNpmResolver> {
let resolution = Arc::new(NpmResolution::from_serialized(
npm_api.clone(),
@ -158,6 +167,7 @@ fn create_inner(
tarball_cache.clone(),
node_modules_dir_path,
npm_system_info.clone(),
lifecycle_scripts.clone(),
);
Arc::new(ManagedCliNpmResolver::new(
fs,
@ -170,6 +180,7 @@ fn create_inner(
tarball_cache,
text_only_progress_bar,
npm_system_info,
lifecycle_scripts,
))
}
@ -256,6 +267,7 @@ pub struct ManagedCliNpmResolver {
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
top_level_install_flag: AtomicFlag,
lifecycle_scripts: LifecycleScriptsConfig,
}
impl std::fmt::Debug for ManagedCliNpmResolver {
@ -279,6 +291,7 @@ impl ManagedCliNpmResolver {
tarball_cache: Arc<TarballCache>,
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self {
Self {
fs,
@ -292,6 +305,7 @@ impl ManagedCliNpmResolver {
tarball_cache,
npm_system_info,
top_level_install_flag: Default::default(),
lifecycle_scripts,
}
}
@ -463,24 +477,30 @@ impl ManagedCliNpmResolver {
if !self.top_level_install_flag.raise() {
return Ok(false); // already did this
}
let reqs = self.package_json_deps_provider.remote_pkg_reqs();
if reqs.is_empty() {
let pkg_json_remote_pkgs = self.package_json_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() {
return Ok(false);
}
// check if something needs resolving before bothering to load all
// the package information (which is slow)
if reqs
.iter()
.all(|req| self.resolution.resolve_pkg_id_from_pkg_req(req).is_ok())
{
if pkg_json_remote_pkgs.iter().all(|pkg| {
self
.resolution
.resolve_pkg_id_from_pkg_req(&pkg.req)
.is_ok()
}) {
log::debug!(
"All package.json deps resolvable. Skipping top level install."
);
return Ok(false); // everything is already resolvable
}
self.add_package_reqs(reqs).await.map(|_| true)
let pkg_reqs = pkg_json_remote_pkgs
.iter()
.map(|pkg| pkg.req.clone())
.collect::<Vec<_>>();
self.add_package_reqs(&pkg_reqs).await.map(|_| true)
}
pub async fn cache_package_info(
@ -500,34 +520,34 @@ impl ManagedCliNpmResolver {
}
}
impl NpmResolver for ManagedCliNpmResolver {
/// Gets the state of npm for the process.
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(
self
.resolution
.serialized_valid_snapshot()
.into_serialized(),
),
local_node_modules_path: self
.fs_resolver
.node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
impl NpmResolver for ManagedCliNpmResolver {
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
) -> Result<PathBuf, PackageFolderResolveError> {
let path = self
.fs_resolver
.resolve_package_folder_from_package(name, referrer)?;
let path =
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())?;
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())
.map_err(|err| PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err,
})?;
log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
Ok(path)
}
@ -537,7 +557,9 @@ impl NpmResolver for ManagedCliNpmResolver {
debug_assert!(root_dir_url.as_str().ends_with('/'));
specifier.as_ref().starts_with(root_dir_url.as_str())
}
}
impl NodeRequireResolver for ManagedCliNpmResolver {
fn ensure_read_permission(
&self,
permissions: &mut dyn NodePermissions,
@ -547,11 +569,30 @@ impl NpmResolver for ManagedCliNpmResolver {
}
}
impl NpmProcessStateProvider for ManagedCliNpmResolver {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.resolution.serialized_valid_snapshot(),
self.fs_resolver.node_modules_path().map(|p| p.as_path()),
)
}
}
impl CliNpmResolver for ManagedCliNpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
self
}
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
self
}
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> {
self
}
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
// create a new snapshotted npm resolution and resolver
let npm_resolution = Arc::new(NpmResolution::new(
@ -571,6 +612,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.tarball_cache.clone(),
self.root_node_modules_path().map(ToOwned::to_owned),
self.npm_system_info.clone(),
self.lifecycle_scripts.clone(),
),
self.maybe_lockfile.clone(),
self.npm_api.clone(),
@ -580,6 +622,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.tarball_cache.clone(),
self.text_only_progress_bar.clone(),
self.npm_system_info.clone(),
self.lifecycle_scripts.clone(),
))
}

View file

@ -19,6 +19,7 @@ use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError;
use crate::npm::managed::cache::TarballCache;
@ -31,16 +32,25 @@ pub trait NpmPackageFsResolver: Send + Sync {
/// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&PathBuf>;
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
fn package_folder(
&self,
package_id: &NpmPackageId,
) -> Result<PathBuf, AnyError>;
) -> Result<PathBuf, AnyError> {
self.maybe_package_folder(package_id).ok_or_else(|| {
deno_core::anyhow::anyhow!(
"Package folder not found for '{}'",
package_id.as_serialized()
)
})
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError>;
) -> Result<PathBuf, PackageFolderResolveError>;
fn resolve_package_cache_folder_id_from_specifier(
&self,

View file

@ -8,7 +8,6 @@ use std::sync::Arc;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId;
@ -16,6 +15,9 @@ use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
@ -65,29 +67,71 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
None
}
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let folder_id = self
.resolution
.resolve_pkg_cache_folder_id_from_pkg_id(id)
.unwrap();
Ok(self.cache.package_folder_for_id(&folder_id))
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
Some(self.cache.package_folder_for_id(&folder_id))
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
let Some(referrer_pkg_id) = self
) -> Result<PathBuf, PackageFolderResolveError> {
use deno_npm::resolution::PackageNotFoundFromReferrerError;
let Some(referrer_cache_folder_id) = self
.cache
.resolve_package_folder_id_from_specifier(referrer)
else {
bail!("could not find npm package for '{}'", referrer);
return Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
);
};
let pkg = self
let resolve_result = self
.resolution
.resolve_package_from_package(name, &referrer_pkg_id)?;
self.package_folder(&pkg.id)
.resolve_package_from_package(name, &referrer_cache_folder_id);
match resolve_result {
Ok(pkg) => match self.maybe_package_folder(&pkg.id) {
Some(folder) => Ok(folder),
None => Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: Some(format!(
"{} -> {}",
referrer_cache_folder_id,
pkg.id.as_serialized()
)),
}
.into(),
),
},
Err(err) => match *err {
PackageNotFoundFromReferrerError::Referrer(cache_folder_id) => Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: Some(cache_folder_id.to_string()),
}
.into(),
),
PackageNotFoundFromReferrerError::Package {
name,
referrer: cache_folder_id_referrer,
} => Err(
PackageNotFoundError {
package_name: name,
referrer: referrer.clone(),
referrer_extra: Some(cache_folder_id_referrer.to_string()),
}
.into(),
),
},
}
}
fn resolve_package_cache_folder_id_from_specifier(

View file

@ -7,27 +7,20 @@ mod bin_entries;
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::hash_map::Entry;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use crate::args::PackageJsonInstallDepsProvider;
use crate::cache::CACHE_PERM;
use crate::npm::cache_dir::mixed_case_package_name_decode;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use crate::util::fs::clone_dir_recursive;
use crate::util::fs::symlink_dir;
use crate::util::fs::LaxSingleProcessFsFlag;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressMessagePrompt;
use crate::args::LifecycleScriptsConfig;
use crate::args::PackagesAllowedScripts;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
@ -41,10 +34,24 @@ use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodePermissions;
use deno_semver::package::PackageNv;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use serde::Deserialize;
use serde::Serialize;
use crate::args::PackageJsonInstallDepsProvider;
use crate::cache::CACHE_PERM;
use crate::npm::cache_dir::mixed_case_package_name_decode;
use crate::npm::cache_dir::mixed_case_package_name_encode;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use crate::util::fs::clone_dir_recursive;
use crate::util::fs::symlink_dir;
use crate::util::fs::LaxSingleProcessFsFlag;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressMessagePrompt;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
@ -66,6 +73,7 @@ pub struct LocalNpmPackageResolver {
root_node_modules_url: Url,
system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker,
lifecycle_scripts: LifecycleScriptsConfig,
}
impl LocalNpmPackageResolver {
@ -79,6 +87,7 @@ impl LocalNpmPackageResolver {
tarball_cache: Arc<TarballCache>,
node_modules_folder: PathBuf,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self {
Self {
cache,
@ -95,6 +104,7 @@ impl LocalNpmPackageResolver {
.unwrap(),
root_node_modules_path: node_modules_folder,
system_info,
lifecycle_scripts,
}
}
@ -113,7 +123,7 @@ impl LocalNpmPackageResolver {
fn resolve_folder_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<PathBuf>, AnyError> {
) -> Result<Option<PathBuf>, std::io::Error> {
let Some(relative_url) =
self.root_node_modules_url.make_relative(specifier)
else {
@ -130,7 +140,6 @@ impl LocalNpmPackageResolver {
// in `node_modules` directory of the referrer.
canonicalize_path_maybe_not_exists_with_fs(&path, self.fs.as_ref())
.map(Some)
.map_err(|err| err.into())
}
fn resolve_package_folder_from_specifier(
@ -155,32 +164,42 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
Some(&self.root_node_modules_path)
}
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
match self.resolution.resolve_pkg_cache_folder_id_from_pkg_id(id) {
// package is stored at:
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
Some(cache_folder_id) => Ok(
self
.root_node_modules_path
.join(".deno")
.join(get_package_folder_id_folder_name(&cache_folder_id))
.join("node_modules")
.join(&cache_folder_id.nv.name),
),
None => bail!(
"Could not find package information for '{}'",
id.as_serialized()
),
}
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let cache_folder_id = self
.resolution
.resolve_pkg_cache_folder_id_from_pkg_id(id)?;
// package is stored at:
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
Some(
self
.root_node_modules_path
.join(".deno")
.join(get_package_folder_id_folder_name(&cache_folder_id))
.join("node_modules")
.join(&cache_folder_id.nv.name),
)
}
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
let Some(local_path) = self.resolve_folder_for_specifier(referrer)? else {
bail!("could not find npm package for '{}'", referrer);
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_local_path = self
.resolve_folder_for_specifier(referrer)
.map_err(|err| PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.clone(),
source: err,
})?;
let Some(local_path) = maybe_local_path else {
return Err(
ReferrerNotFoundError {
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
);
};
let package_root_path = self.resolve_package_root(&local_path);
let mut current_folder = package_root_path.as_path();
@ -202,11 +221,14 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
}
}
bail!(
"could not find package '{}' from referrer '{}'.",
name,
referrer
);
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.clone(),
referrer_extra: None,
}
.into(),
)
}
fn resolve_package_cache_folder_id_from_specifier(
@ -231,6 +253,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
&self.tarball_cache,
&self.root_node_modules_path,
&self.system_info,
&self.lifecycle_scripts,
)
.await
}
@ -246,7 +269,146 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
}
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
fn resolve_baseline_custom_commands(
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages(
custom_commands,
snapshot,
packages,
local_registry_dir,
)
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = bin_entries::BinEntries::new();
for package in packages {
let package_path =
local_node_modules_package_path(local_registry_dir, package);
if package.bin.is_some() {
bin_entries.add(package.clone(), package_path);
}
}
let bins = bin_entries.into_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
Ok(commands)
}
fn local_node_modules_package_path(
local_registry_dir: &Path,
package: &NpmResolutionPackage,
) -> PathBuf {
local_registry_dir
.join(get_package_folder_id_folder_name(
&package.get_package_cache_folder_id(),
))
.join("node_modules")
.join(&package.id.nv.name)
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
fn resolve_custom_commands_from_deps(
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
local_registry_dir: &Path,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
resolve_custom_commands_from_packages(
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
local_registry_dir,
)
}
fn can_run_scripts(
allow_scripts: &PackagesAllowedScripts,
package_nv: &PackageNv,
) -> bool {
match allow_scripts {
PackagesAllowedScripts::All => true,
// TODO: make this more correct
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
let s = s.strip_prefix("npm:").unwrap_or(s);
s == package_nv.name || s == package_nv.to_string()
}),
PackagesAllowedScripts::None => false,
}
}
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
// but it always fails if the package excludes the `binding.gyp` file when they publish.
// (for example, `fsevents` hits this)
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
}
fn has_lifecycle_scripts(
package: &NpmResolutionPackage,
package_path: &Path,
) -> bool {
if let Some(install) = package.scripts.get("install") {
// default script
if !is_broken_default_install_script(install, package_path) {
return true;
}
}
package.scripts.contains_key("preinstall")
|| package.scripts.contains_key("postinstall")
}
/// Creates a pnpm style folder structure.
#[allow(clippy::too_many_arguments)]
async fn sync_resolution_with_fs(
snapshot: &NpmResolutionSnapshot,
cache: &Arc<NpmCache>,
@ -255,6 +417,7 @@ async fn sync_resolution_with_fs(
tarball_cache: &Arc<TarballCache>,
root_node_modules_dir_path: &Path,
system_info: &NpmSystemInfo,
lifecycle_scripts: &LifecycleScriptsConfig,
) -> Result<(), AnyError> {
if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
return Ok(()); // don't create the directory
@ -293,6 +456,8 @@ async fn sync_resolution_with_fs(
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
HashMap::with_capacity(package_partitions.packages.len());
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
let mut packages_with_scripts = Vec::with_capacity(2);
let mut packages_with_scripts_not_run = Vec::new();
for package in &package_partitions.packages {
if let Some(current_pkg) =
newest_packages_by_name.get_mut(&package.id.nv.name)
@ -317,6 +482,7 @@ async fn sync_resolution_with_fs(
// are forced to be recreated
setup_cache.remove_dep(&package_folder_name);
let folder_path = folder_path.clone();
let bin_entries_to_setup = bin_entries.clone();
cache_futures.push(async move {
tarball_cache
@ -354,6 +520,25 @@ async fn sync_resolution_with_fs(
Ok::<_, AnyError>(())
});
}
let sub_node_modules = folder_path.join("node_modules");
let package_path =
join_package_name(&sub_node_modules, &package.id.nv.name);
if has_lifecycle_scripts(package, &package_path) {
let scripts_run = folder_path.join(".scripts-run");
let has_warned = folder_path.join(".scripts-warned");
if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) {
if !scripts_run.exists() {
packages_with_scripts.push((
package.clone(),
package_path,
scripts_run,
));
}
} else if !scripts_run.exists() && !has_warned.exists() {
packages_with_scripts_not_run.push((has_warned, package.id.nv.clone()));
}
}
}
while let Some(result) = cache_futures.next().await {
@ -422,16 +607,90 @@ async fn sync_resolution_with_fs(
}
}
// 4. Create all the top level packages in the node_modules folder, which are symlinks.
//
// Symlink node_modules/<package_name> to
// node_modules/.deno/<package_id>/node_modules/<package_name>
let mut found_names = HashSet::new();
let mut ids = snapshot.top_level_packages().collect::<Vec<_>>();
let mut found_names: HashMap<&String, &PackageNv> = HashMap::new();
// 4. Create symlinks for package json dependencies
{
for remote in pkg_json_deps_provider.remote_pkgs() {
let remote_pkg = if let Ok(remote_pkg) =
snapshot.resolve_pkg_from_pkg_req(&remote.req)
{
remote_pkg
} else if remote.req.version_req.tag().is_some() {
// couldn't find a match, and `resolve_best_package_id`
// panics if you give it a tag
continue;
} else if let Some(remote_id) = snapshot
.resolve_best_package_id(&remote.req.name, &remote.req.version_req)
{
snapshot.package_from_id(&remote_id).unwrap()
} else {
continue; // skip, package not found
};
let alias_clashes = remote.req.name != remote.alias
&& newest_packages_by_name.contains_key(&remote.alias);
let install_in_child = {
// we'll install in the child if the alias is taken by another package, or
// if there's already a package with the same name but different version
// linked into the root
match found_names.entry(&remote.alias) {
Entry::Occupied(nv) => {
alias_clashes
|| remote.req.name != nv.get().name // alias to a different package (in case of duplicate aliases)
|| !remote.req.version_req.matches(&nv.get().version) // incompatible version
}
Entry::Vacant(entry) => {
entry.insert(&remote_pkg.id.nv);
alias_clashes
}
}
};
let target_folder_name = get_package_folder_id_folder_name(
&remote_pkg.get_package_cache_folder_id(),
);
let local_registry_package_path = join_package_name(
&deno_local_registry_dir
.join(&target_folder_name)
.join("node_modules"),
&remote_pkg.id.nv.name,
);
if install_in_child {
// symlink the dep into the package's child node_modules folder
let dest_path =
remote.base_dir.join("node_modules").join(&remote.alias);
symlink_package_dir(&local_registry_package_path, &dest_path)?;
} else {
// symlink the package into `node_modules/<alias>`
if setup_cache
.insert_root_symlink(&remote_pkg.id.nv.name, &target_folder_name)
{
symlink_package_dir(
&local_registry_package_path,
&join_package_name(root_node_modules_dir_path, &remote.alias),
)?;
}
}
}
}
// 5. Create symlinks for the remaining top level packages in the node_modules folder.
// (These may be present if they are not in the package.json dependencies, such as )
// Symlink node_modules/.deno/<package_id>/node_modules/<package_name> to
// node_modules/<package_name>
let mut ids = snapshot
.top_level_packages()
.filter(|f| !found_names.contains_key(&f.nv.name))
.collect::<Vec<_>>();
ids.sort_by(|a, b| b.cmp(a)); // create determinism and only include the latest version
for id in ids {
if !found_names.insert(&id.nv.name) {
continue; // skip, already handled
match found_names.entry(&id.nv.name) {
Entry::Occupied(_) => {
continue; // skip, already handled
}
Entry::Vacant(entry) => {
entry.insert(&id.nv);
}
}
let package = snapshot.package_from_id(id).unwrap();
let target_folder_name =
@ -451,11 +710,16 @@ async fn sync_resolution_with_fs(
}
}
// 5. Create a node_modules/.deno/node_modules/<package-name> directory with
// 6. Create a node_modules/.deno/node_modules/<package-name> directory with
// the remaining packages
for package in newest_packages_by_name.values() {
if !found_names.insert(&package.id.nv.name) {
continue; // skip, already handled
match found_names.entry(&package.id.nv.name) {
Entry::Occupied(_) => {
continue; // skip, already handled
}
Entry::Vacant(entry) => {
entry.insert(&package.id.nv);
}
}
let target_folder_name =
@ -476,25 +740,109 @@ async fn sync_resolution_with_fs(
}
}
// 6. Set up `node_modules/.bin` entries for packages that need it.
// 7. Set up `node_modules/.bin` entries for packages that need it.
{
let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut());
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
}
// 7. Create symlinks for the workspace packages
// 8. Create symlinks for the workspace packages
{
// todo(#24419): this is not exactly correct because it should
// install correctly for a workspace (potentially in sub directories),
// but this is good enough for a first pass
for workspace in pkg_json_deps_provider.workspace_pkgs() {
symlink_package_dir(
&workspace.pkg_dir,
&workspace.target_dir,
&root_node_modules_dir_path.join(&workspace.alias),
)?;
}
}
if !packages_with_scripts.is_empty() {
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base = resolve_baseline_custom_commands(
snapshot,
&package_partitions.packages,
&deno_local_registry_dir,
)?;
let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap();
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
Some(root_node_modules_dir_path),
);
let mut env_vars = crate::task_runner::real_env_vars();
env_vars.insert(
crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(),
process_state,
);
for (package, package_path, scripts_run_path) in packages_with_scripts {
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = resolve_custom_commands_from_deps(
base.clone(),
&package,
snapshot,
&deno_local_registry_dir,
)?;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = package.scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, &package_path)
{
continue;
}
let exit_code =
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: &package_path,
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd,
argv: &[],
root_node_modules_dir: Some(root_node_modules_dir_path),
})
.await?;
if exit_code != 0 {
anyhow::bail!(
"script '{}' in '{}' failed with exit code {}",
script_name,
package.id.nv,
exit_code,
);
}
}
}
fs::write(scripts_run_path, "")?;
}
}
if !packages_with_scripts_not_run.is_empty() {
let (maybe_install, maybe_install_example) = if *crate::args::DENO_FUTURE {
(
" or `deno install`",
" or `deno install --allow-scripts=pkg1,pkg2`",
)
} else {
("", "")
};
let packages = packages_with_scripts_not_run
.iter()
.map(|(_, p)| format!("npm:{p}"))
.collect::<Vec<_>>()
.join(", ");
log::warn!("{}: Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache`{maybe_install}
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>`{maybe_install_example}):\n {packages}", crate::colors::yellow("warning"));
for (scripts_warned_path, _) in packages_with_scripts_not_run {
let _ignore_err = fs::write(scripts_warned_path, "");
}
}
setup_cache.save();
drop(single_process_lock);
drop(pb_clear_guard);
@ -502,10 +850,13 @@ async fn sync_resolution_with_fs(
Ok(())
}
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
// the file generated from this datastructure is deterministic.
// See: https://github.com/denoland/deno/issues/24479
/// Represents a dependency at `node_modules/.deno/<package_id>/`
struct SetupCacheDep<'a> {
previous: Option<&'a HashMap<String, String>>,
current: &'a mut HashMap<String, String>,
previous: Option<&'a BTreeMap<String, String>>,
current: &'a mut BTreeMap<String, String>,
}
impl<'a> SetupCacheDep<'a> {
@ -521,11 +872,14 @@ impl<'a> SetupCacheDep<'a> {
}
}
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
// the file generated from this datastructure is deterministic.
// See: https://github.com/denoland/deno/issues/24479
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct SetupCacheData {
root_symlinks: HashMap<String, String>,
deno_symlinks: HashMap<String, String>,
dep_symlinks: HashMap<String, HashMap<String, String>>,
root_symlinks: BTreeMap<String, String>,
deno_symlinks: BTreeMap<String, String>,
dep_symlinks: BTreeMap<String, BTreeMap<String, String>>,
}
/// It is very slow to try to re-setup the symlinks each time, so this will
@ -696,13 +1050,14 @@ fn junction_or_symlink_dir(
old_path: &Path,
new_path: &Path,
) -> Result<(), AnyError> {
use deno_core::anyhow::bail;
// Use junctions because they're supported on ntfs file systems without
// needing to elevate privileges on Windows
match junction::create(old_path, new_path) {
Ok(()) => Ok(()),
Err(junction_err) => {
if cfg!(debug) {
if cfg!(debug_assertions) {
// When running the tests, junctions should be created, but if not then
// surface this error.
log::warn!("Error creating junction. {:#}", junction_err);

View file

@ -71,19 +71,16 @@ impl BinEntries {
self.entries.push((package, package_path));
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub(super) fn finish(
mut self,
fn for_each_entry(
&mut self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
mut f: impl FnMut(
&NpmResolutionPackage,
&Path,
&str, // bin name
&str, // bin script
) -> Result<(), AnyError>,
) -> Result<(), AnyError> {
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
)?;
}
if !self.collisions.is_empty() {
// walking the dependency tree to find out the depth of each package
// is sort of expensive, so we only do it if there's a collision
@ -101,13 +98,7 @@ impl BinEntries {
// we already set up a bin entry with this name
continue;
}
set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)?;
f(package, package_path, name, script)?;
}
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for (name, script) in entries {
@ -115,13 +106,7 @@ impl BinEntries {
// we already set up a bin entry with this name
continue;
}
set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)?;
f(package, package_path, name, script)?;
}
}
}
@ -130,6 +115,47 @@ impl BinEntries {
Ok(())
}
/// Collect the bin entries into a vec of (name, script path)
pub(super) fn into_bin_files(
mut self,
snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new();
self
.for_each_entry(snapshot, |_, package_path, name, script| {
bins.push((name.to_string(), package_path.join(script)));
Ok(())
})
.unwrap();
bins
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub(super) fn finish(
mut self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> {
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
)?;
}
self.for_each_entry(snapshot, |package, package_path, name, script| {
set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)
})?;
Ok(())
}
}
// walk the dependency tree to find out the depth of each package

View file

@ -10,6 +10,7 @@ use std::sync::Arc;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
use crate::args::LifecycleScriptsConfig;
use crate::args::PackageJsonInstallDepsProvider;
use crate::util::progress_bar::ProgressBar;
@ -32,6 +33,7 @@ pub fn create_npm_fs_resolver(
tarball_cache: Arc<TarballCache>,
maybe_node_modules_path: Option<PathBuf>,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Arc<dyn NpmPackageFsResolver> {
match maybe_node_modules_path {
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
@ -43,6 +45,7 @@ pub fn create_npm_fs_resolver(
tarball_cache,
node_modules_folder,
system_info,
lifecycle_scripts,
)),
None => Arc::new(GlobalNpmPackageResolver::new(
npm_cache,

View file

@ -13,10 +13,12 @@ use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_npm::registry::NpmPackageInfo;
use deno_runtime::deno_node::NpmResolver;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::deno_node::NpmProcessStateProvider;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::NpmResolver;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher;
@ -63,6 +65,10 @@ pub enum InnerCliNpmResolverRef<'a> {
pub trait CliNpmResolver: NpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>;
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver>;
fn into_process_state_provider(
self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider>;
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
@ -118,6 +124,10 @@ impl NpmFetchResolver {
let maybe_get_nv = || async {
let name = req.name.clone();
let package_info = self.package_info(&name).await?;
if let Some(dist_tag) = req.version_req.tag() {
let version = package_info.dist_tags.get(dist_tag)?.clone();
return Some(PackageNv { name, version });
}
// Find the first matching version of the package.
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
versions.sort();

View file

@ -167,13 +167,12 @@ pub fn op_jupyter_input(
return Ok(None);
}
let msg = JupyterMessage::new(
JupyterMessageContent::InputRequest(InputRequest {
prompt,
password: is_password,
}),
Some(&last_request),
);
let content = InputRequest {
prompt,
password: is_password,
};
let msg = JupyterMessage::new(content, Some(&last_request));
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
return Ok(None);

File diff suppressed because it is too large Load diff

View file

@ -605,11 +605,27 @@
]
},
"workspace": {
"type": "array",
"items": {
"type": "string"
},
"description": "The members of this workspace."
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
},
"description": "The members of this workspace."
},
{
"type": "object",
"properties": {
"members": {
"type": "array",
"items": {
"type": "string"
},
"description": "The members of this workspace."
}
}
}
]
}
}
}

View file

@ -2,10 +2,12 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::env::current_exe;
use std::ffi::OsString;
use std::fs;
use std::fs::File;
use std::future::Future;
use std::io::Read;
use std::io::Seek;
@ -17,6 +19,7 @@ use std::process::Command;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
@ -31,6 +34,7 @@ use deno_npm::NpmSystemInfo;
use deno_runtime::deno_node::PackageJson;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError;
use eszip::EszipRelativeFileBaseUrl;
use indexmap::IndexMap;
@ -50,6 +54,7 @@ use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@ -68,7 +73,7 @@ pub enum NodeModules {
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: String,
root_node_modules_dir: Option<String>,
},
}
@ -78,9 +83,18 @@ pub struct SerializedWorkspaceResolverImportMap {
pub json: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
@ -96,6 +110,7 @@ pub struct Metadata {
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: HashMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
@ -104,16 +119,19 @@ pub struct Metadata {
}
pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
let file_path = current_exe().unwrap();
let mut file = std::fs::File::open(file_path)?;
file.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))?;
let mut trailer = [0; TRAILER_SIZE];
file.read_exact(&mut trailer)?;
let trailer = Trailer::parse(&trailer)?.unwrap();
file.seek(SeekFrom::Start(trailer.npm_vfs_pos))?;
let mut vfs_data = vec![0; trailer.npm_vfs_len() as usize];
file.read_exact(&mut vfs_data)?;
let mut dir: VirtualDirectory = serde_json::from_slice(&vfs_data)?;
let data = libsui::find_section("d3n0l4nd").unwrap();
// We do the first part sync so it can complete quickly
let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap();
let trailer = match Trailer::parse(&trailer)? {
None => panic!("Could not find trailer"),
Some(trailer) => trailer,
};
let data = &data[TRAILER_SIZE..];
let vfs_data =
&data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize];
let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?;
// align the name of the directory with the root dir
dir.name = root_dir_path
@ -127,38 +145,32 @@ pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
root_path: root_dir_path,
start_file_offset: trailer.npm_files_pos,
};
Ok(FileBackedVfs::new(file, fs_root))
Ok(FileBackedVfs::new(data.to_vec(), fs_root))
}
fn write_binary_bytes(
writer: &mut impl Write,
mut file_writer: File,
original_bin: Vec<u8>,
metadata: &Metadata,
eszip: eszip::EszipV2,
npm_vfs: Option<&VirtualDirectory>,
npm_files: &Vec<Vec<u8>>,
compile_flags: &CompileFlags,
) -> Result<(), AnyError> {
let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec();
let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec();
let eszip_archive = eszip.into_bytes();
writer.write_all(&original_bin)?;
writer.write_all(&eszip_archive)?;
writer.write_all(&metadata)?;
writer.write_all(&npm_vfs)?;
for file in npm_files {
writer.write_all(file)?;
}
let mut writer = Vec::new();
// write the trailer, which includes the positions
// of the data blocks in the file
writer.write_all(&{
let eszip_pos = original_bin.len() as u64;
let metadata_pos = eszip_pos + (eszip_archive.len() as u64);
let metadata_pos = eszip_archive.len() as u64;
let npm_vfs_pos = metadata_pos + (metadata.len() as u64);
let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64);
Trailer {
eszip_pos,
eszip_pos: 0,
metadata_pos,
npm_vfs_pos,
npm_files_pos,
@ -166,27 +178,36 @@ fn write_binary_bytes(
.as_bytes()
})?;
writer.write_all(&eszip_archive)?;
writer.write_all(&metadata)?;
writer.write_all(&npm_vfs)?;
for file in npm_files {
writer.write_all(file)?;
}
let target = compile_flags.resolve_target();
if target.contains("linux") {
libsui::Elf::new(&original_bin).append(&writer, &mut file_writer)?;
} else if target.contains("windows") {
libsui::PortableExecutable::from(&original_bin)?
.write_resource("d3n0l4nd", writer)?
.build(&mut file_writer)?;
} else if target.contains("darwin") {
libsui::Macho::from(original_bin)?
.write_section("d3n0l4nd", writer)?
.build_and_sign(&mut file_writer)?;
}
Ok(())
}
pub fn is_standalone_binary(exe_path: &Path) -> bool {
let Ok(mut output_file) = std::fs::File::open(exe_path) else {
let Ok(data) = std::fs::read(exe_path) else {
return false;
};
if output_file
.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))
.is_err()
{
// This seek may fail because the file is too small to possibly be
// `deno compile` output.
return false;
}
let mut trailer = [0; TRAILER_SIZE];
if output_file.read_exact(&mut trailer).is_err() {
return false;
};
let (magic_trailer, _) = trailer.split_at(8);
magic_trailer == MAGIC_TRAILER
libsui::utils::is_elf(&data)
|| libsui::utils::is_pe(&data)
|| libsui::utils::is_macho(&data)
}
/// This function will try to run this binary as a standalone binary
@ -195,40 +216,32 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
/// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone(
exe_path: &Path,
cli_args: Cow<Vec<OsString>>,
) -> Result<
Option<impl Future<Output = Result<(Metadata, eszip::EszipV2), AnyError>>>,
AnyError,
> {
let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None);
};
// We do the first part sync so it can complete quickly
let mut file = std::fs::File::open(exe_path)?;
file.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))?;
let mut trailer = [0; TRAILER_SIZE];
file.read_exact(&mut trailer)?;
let trailer = match Trailer::parse(&trailer)? {
let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? {
None => return Ok(None),
Some(trailer) => trailer,
};
file.seek(SeekFrom::Start(trailer.eszip_pos))?;
let cli_args = cli_args.into_owned();
// If we have an eszip, read it out
Ok(Some(async move {
let bufreader =
deno_core::futures::io::BufReader::new(AllowStdIo::new(file));
deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]);
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
.await
.context("Failed to parse eszip header")?;
let mut bufreader =
loader.await.context("Failed to parse eszip archive")?;
bufreader
.seek(SeekFrom::Start(trailer.metadata_pos))
.await?;
let bufreader = loader.await.context("Failed to parse eszip archive")?;
let mut metadata = String::new();
@ -306,72 +319,6 @@ fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
Ok(u64::from_be_bytes(*fixed_arr))
}
pub fn unpack_into_dir(
exe_name: &str,
archive_name: &str,
archive_data: Vec<u8>,
is_windows: bool,
temp_dir: &tempfile::TempDir,
) -> Result<PathBuf, AnyError> {
let temp_dir_path = temp_dir.path();
let exe_ext = if is_windows { "exe" } else { "" };
let archive_path = temp_dir_path.join(exe_name).with_extension("zip");
let exe_path = temp_dir_path.join(exe_name).with_extension(exe_ext);
assert!(!exe_path.exists());
let archive_ext = Path::new(archive_name)
.extension()
.and_then(|ext| ext.to_str())
.unwrap();
let unpack_status = match archive_ext {
"zip" if cfg!(windows) => {
fs::write(&archive_path, &archive_data)?;
Command::new("tar.exe")
.arg("xf")
.arg(&archive_path)
.arg("-C")
.arg(temp_dir_path)
.spawn()
.map_err(|err| {
if err.kind() == std::io::ErrorKind::NotFound {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"`tar.exe` was not found in your PATH",
)
} else {
err
}
})?
.wait()?
}
"zip" => {
fs::write(&archive_path, &archive_data)?;
Command::new("unzip")
.current_dir(temp_dir_path)
.arg(&archive_path)
.spawn()
.map_err(|err| {
if err.kind() == std::io::ErrorKind::NotFound {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"`unzip` was not found in your PATH, please install `unzip`",
)
} else {
err
}
})?
.wait()?
}
ext => bail!("Unsupported archive type: '{ext}'"),
};
if !unpack_status.success() {
bail!("Failed to unpack archive.");
}
assert!(exe_path.exists());
fs::remove_file(&archive_path)?;
Ok(exe_path)
}
pub struct DenoCompileBinaryWriter<'a> {
deno_dir: &'a DenoDir,
file_fetcher: &'a FileFetcher,
@ -403,7 +350,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub async fn write_bin(
&self,
writer: &mut impl Write,
writer: File,
eszip: eszip::EszipV2,
root_dir_url: EszipRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
@ -468,13 +415,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
let archive_data = std::fs::read(binary_path)?;
let temp_dir = tempfile::TempDir::new()?;
let base_binary_path = unpack_into_dir(
"denort",
&binary_name,
archive_data,
target.contains("windows"),
&temp_dir,
)?;
let base_binary_path = archive::unpack_into_dir(archive::UnpackArgs {
exe_name: "denort",
archive_name: &binary_name,
archive_data: &archive_data,
is_windows: target.contains("windows"),
dest_path: temp_dir.path(),
})?;
let base_binary = std::fs::read(base_binary_path)?;
drop(temp_dir); // delete the temp dir
Ok(base_binary)
@ -493,7 +440,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
self
.http_client_provider
.get_or_create()?
.download_with_progress(download_url, None, &progress)
.download_with_progress(download_url.parse()?, None, &progress)
.await?
};
let bytes = match maybe_bytes {
@ -516,7 +463,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)]
fn write_standalone_binary(
&self,
writer: &mut impl Write,
writer: File,
original_bin: Vec<u8>,
mut eszip: eszip::EszipV2,
root_dir_url: EszipRelativeFileBaseUrl<'_>,
@ -570,20 +517,29 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Some(root_dir),
files,
Some(NodeModules::Byonm {
root_node_modules_dir: root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(
// will always be set for byonm
resolver.root_node_modules_path().unwrap(),
)
.unwrap(),
)
.into_owned(),
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(node_modules_dir)
.unwrap(),
)
.into_owned()
},
),
}),
)
}
};
let env_vars_from_env_file = match cli_options.env_file_name() {
Some(env_filename) => {
log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename);
get_file_env_vars(env_filename.to_string())?
}
None => Default::default(),
};
let metadata = Metadata {
argv: compile_flags.args.clone(),
seed: cli_options.seed(),
@ -596,6 +552,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
log_level: cli_options.log_level(),
ca_stores: cli_options.ca_stores().clone(),
ca_data,
env_vars_from_env_file,
entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
workspace_resolver: SerializedWorkspaceResolver {
import_map: self.workspace_resolver.maybe_import_map().map(|i| {
@ -609,6 +566,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
json: i.to_json(),
}
}),
jsr_pkgs: self
.workspace_resolver
.jsr_packages()
.map(|pkg| SerializedResolverWorkspaceJsrPackage {
relative_base: root_dir_url.specifier_key(&pkg.base).into_owned(),
name: pkg.name.clone(),
version: pkg.version.clone(),
exports: pkg.exports.clone(),
})
.collect(),
package_jsons: self
.workspace_resolver
.package_jsons()
@ -642,6 +609,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
eszip,
npm_vfs.as_ref(),
&npm_files,
compile_flags,
)
}
@ -721,18 +689,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
for pkg_json in cli_options.workspace.package_jsons() {
for pkg_json in cli_options.workspace().package_jsons() {
builder.add_file_at_path(&pkg_json.path)?;
}
// traverse and add all the node_modules directories in the workspace
let mut pending_dirs = VecDeque::new();
pending_dirs.push_back(
cli_options
.workspace
.root_folder()
.0
.to_file_path()
.unwrap(),
cli_options.workspace().root_dir().to_file_path().unwrap(),
);
while let Some(pending_dir) = pending_dirs.pop_front() {
let entries = fs::read_dir(&pending_dir).with_context(|| {
@ -757,6 +720,21 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}
}
/// This function returns the environment variables specified
/// in the passed environment file.
fn get_file_env_vars(
filename: String,
) -> Result<HashMap<String, String>, dotenvy::Error> {
let mut file_env_vars = HashMap::new();
for item in dotenvy::from_filename_iter(filename)? {
let Ok((key, val)) = item else {
continue; // this failure will be warned about on load
};
file_env_vars.insert(key, val);
}
Ok(file_env_vars)
}
/// This function sets the subsystem field in the PE header to 2 (GUI subsystem)
/// For more information about the PE header: https://learn.microsoft.com/en-us/windows/win32/debug/pe-format
fn set_windows_binary_to_gui(bin: &mut [u8]) -> Result<(), AnyError> {

View file

@ -5,6 +5,44 @@
#![allow(dead_code)]
#![allow(unused_imports)]
use deno_ast::MediaType;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionError;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::v8_set_flags;
use deno_core::FeatureChecker;
use deno_core::ModuleLoader;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
use eszip::EszipRelativeFileBaseUrl;
use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::NodeResolutionMode;
use std::borrow::Cow;
use std::rc::Rc;
use std::sync::Arc;
use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::npm_pkg_req_ref_to_binary_command;
@ -33,41 +71,6 @@ use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions;
use crate::worker::ModuleLoaderAndSourceMapGetter;
use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType;
use deno_config::package_json::PackageJsonDepValue;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionError;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::v8_set_flags;
use deno_core::FeatureChecker;
use deno_core::ModuleLoader;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
use eszip::EszipRelativeFileBaseUrl;
use import_map::parse_from_json;
use std::borrow::Cow;
use std::rc::Rc;
use std::sync::Arc;
pub mod binary;
mod file_system;
@ -88,7 +91,7 @@ struct WorkspaceEszipModule {
struct WorkspaceEszip {
eszip: eszip::EszipV2,
root_dir_url: ModuleSpecifier,
root_dir_url: Arc<ModuleSpecifier>,
}
impl WorkspaceEszip {
@ -129,6 +132,8 @@ struct EmbeddedModuleLoader {
dynamic_permissions: PermissionsContainer,
}
pub const MODULE_NOT_FOUND: &str = "Module not found";
impl ModuleLoader for EmbeddedModuleLoader {
fn resolve(
&self,
@ -151,21 +156,39 @@ impl ModuleLoader for EmbeddedModuleLoader {
})?
};
if let Some(result) = self.shared.node_resolver.resolve_if_in_npm_package(
specifier,
&referrer,
NodeResolutionMode::Execution,
) {
return match result? {
Some(res) => Ok(res.into_url()),
None => Err(generic_error("not found")),
};
if self.shared.node_resolver.in_npm_package(&referrer) {
return Ok(
self
.shared
.node_resolver
.resolve(specifier, &referrer, NodeResolutionMode::Execution)?
.into_url(),
);
}
let mapped_resolution =
self.shared.workspace_resolver.resolve(specifier, &referrer);
match mapped_resolution {
Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => {
Ok(specifier)
}
Ok(MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
sub_path,
..
}) => Ok(
self
.shared
.node_resolver
.resolve_package_sub_path_from_deno_module(
pkg_json.dir_path(),
sub_path.as_deref(),
Some(&referrer),
NodeResolutionMode::Execution,
)?
.into_url(),
),
Ok(MappedResolution::PackageJson {
dep_result,
sub_path,
@ -197,7 +220,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
.resolve_package_sub_path_from_deno_module(
pkg_folder,
sub_path.as_deref(),
&referrer,
Some(&referrer),
NodeResolutionMode::Execution,
)?
.into_url(),
@ -226,22 +249,23 @@ impl ModuleLoader for EmbeddedModuleLoader {
}
}
self
.shared
.node_resolver
.handle_if_in_node_modules(specifier)
Ok(
self
.shared
.node_resolver
.handle_if_in_node_modules(&specifier)?
.unwrap_or(specifier),
)
}
Err(err)
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
{
// todo(dsherret): return a better error from node resolution so that
// we can more easily tell whether to surface it or not
let node_result = self.shared.node_resolver.resolve(
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
specifier,
&referrer,
NodeResolutionMode::Execution,
);
if let Ok(Some(res)) = node_result {
)?;
if let Some(res) = maybe_res {
return Ok(res.into_url());
}
Err(err.into())
@ -250,6 +274,19 @@ impl ModuleLoader for EmbeddedModuleLoader {
}
}
fn get_host_defined_options<'s>(
&self,
scope: &mut deno_core::v8::HandleScope<'s>,
name: &str,
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
let name = deno_core::ModuleSpecifier::parse(name).ok()?;
if self.shared.node_resolver.in_npm_package(&name) {
Some(create_host_defined_options(scope))
} else {
None
}
}
fn load(
&self,
original_specifier: &ModuleSpecifier,
@ -305,7 +342,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
let Some(module) = self.shared.eszip.get_module(original_specifier) else {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!(
"Module not found: {}",
"{MODULE_NOT_FOUND}: {}",
original_specifier
))));
};
@ -369,7 +406,6 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
root_permissions,
dynamic_permissions,
}),
source_map_getter: None,
}
}
@ -384,7 +420,6 @@ impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
root_permissions,
dynamic_permissions,
}),
source_map_getter: None,
}
}
}
@ -427,7 +462,8 @@ pub async fn run(
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
let root_path =
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
let root_dir_url = ModuleSpecifier::from_directory_path(&root_path).unwrap();
let root_dir_url =
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let root_node_modules_path = root_path.join("node_modules");
let npm_cache_dir = NpmCacheDir::new(
@ -478,6 +514,7 @@ pub async fn run(
scopes: Default::default(),
registry_configs: Default::default(),
}),
lifecycle_scripts: Default::default(),
},
))
.await?;
@ -489,7 +526,8 @@ pub async fn run(
let vfs_root_dir_path = root_path.clone();
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
.context("Failed to load vfs.")?;
let root_node_modules_dir = vfs.root().join(root_node_modules_dir);
let root_node_modules_dir =
root_node_modules_dir.map(|p| vfs.root().join(p));
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
as Arc<dyn deno_fs::FileSystem>;
let npm_resolver = create_cli_npm_resolver(
@ -522,6 +560,7 @@ pub async fn run(
// Packages from different registries are already inlined in the ESZip,
// so no need to create actual `.npmrc` configuration.
npmrc: create_default_npmrc(),
lifecycle_scripts: Default::default(),
},
))
.await?;
@ -531,7 +570,7 @@ pub async fn run(
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
let node_resolver = Arc::new(NodeResolver::new(
fs.clone(),
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
npm_resolver.clone().into_npm_resolver(),
));
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
@ -541,7 +580,7 @@ pub async fn run(
CliCjsCodeAnalyzer::new(node_analysis_cache, fs.clone());
let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer,
fs.clone(),
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
node_resolver.clone(),
npm_resolver.clone().into_npm_resolver(),
));
@ -571,18 +610,30 @@ pub async fn run(
.to_file_path()
.unwrap();
let pkg_json =
deno_config::package_json::PackageJson::load_from_value(path, json);
deno_package_json::PackageJson::load_from_value(path, json);
Arc::new(pkg_json)
})
.collect();
WorkspaceResolver::new_raw(
root_dir_url.clone(),
import_map,
metadata
.workspace_resolver
.jsr_pkgs
.iter()
.map(|pkg| ResolverWorkspaceJsrPackage {
base: root_dir_url.join(&pkg.relative_base).unwrap(),
name: pkg.name.clone(),
version: pkg.version.clone(),
exports: pkg.exports.clone(),
})
.collect(),
pkg_jsons,
metadata.workspace_resolver.pkg_json_resolution,
)
};
let cli_node_resolver = Arc::new(CliNodeResolver::new(
Some(cjs_resolutions.clone()),
cjs_resolutions.clone(),
fs.clone(),
node_resolver.clone(),
npm_resolver.clone(),
@ -694,7 +745,7 @@ pub async fn run(
// Initialize v8 once from the main thread.
v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![]));
deno_core::JsRuntime::init_platform(None);
deno_core::JsRuntime::init_platform(None, true);
let mut worker = worker_factory
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)

View file

@ -748,12 +748,12 @@ impl deno_io::fs::File for FileBackedVfsFile {
#[derive(Debug)]
pub struct FileBackedVfs {
file: Mutex<File>,
file: Mutex<Vec<u8>>,
fs_root: VfsRoot,
}
impl FileBackedVfs {
pub fn new(file: File, fs_root: VfsRoot) -> Self {
pub fn new(file: Vec<u8>, fs_root: VfsRoot) -> Self {
Self {
file: Mutex::new(file),
fs_root,
@ -836,11 +836,18 @@ impl FileBackedVfs {
pos: u64,
buf: &mut [u8],
) -> std::io::Result<usize> {
let mut fs_file = self.file.lock();
fs_file.seek(SeekFrom::Start(
self.fs_root.start_file_offset + file.offset + pos,
))?;
fs_file.read(buf)
let data = self.file.lock();
let start = self.fs_root.start_file_offset + file.offset + pos;
let end = start + buf.len() as u64;
if end > data.len() as u64 {
return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof,
"unexpected EOF",
));
}
buf.copy_from_slice(&data[start as usize..end as usize]);
Ok(buf.len())
}
pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> {
@ -1016,12 +1023,12 @@ mod test {
file.write_all(file_data).unwrap();
}
}
let file = std::fs::File::open(&virtual_fs_file).unwrap();
let dest_path = temp_dir.path().join("dest");
let data = std::fs::read(&virtual_fs_file).unwrap();
(
dest_path.to_path_buf(),
FileBackedVfs::new(
file,
data,
VfsRoot {
dir: root_dir,
root_path: dest_path.to_path_buf(),

506
cli/task_runner.rs Normal file
View file

@ -0,0 +1,506 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use deno_ast::MediaType;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::future::LocalBoxFuture;
use deno_runtime::deno_node::NodeResolver;
use deno_semver::package::PackageNv;
use deno_task_shell::ExecutableCommand;
use deno_task_shell::ExecuteResult;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;
use lazy_regex::Lazy;
use regex::Regex;
use tokio::task::LocalSet;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::npm::ManagedCliNpmResolver;
pub fn get_script_with_args(script: &str, argv: &[String]) -> String {
let additional_args = argv
.iter()
// surround all the additional arguments in double quotes
// and sanitize any command substitution
.map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$")))
.collect::<Vec<_>>()
.join(" ");
let script = format!("{script} {additional_args}");
script.trim().to_owned()
}
pub struct RunTaskOptions<'a> {
pub task_name: &'a str,
pub script: &'a str,
pub cwd: &'a Path,
pub init_cwd: &'a Path,
pub env_vars: HashMap<String, String>,
pub argv: &'a [String],
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
pub root_node_modules_dir: Option<&'a Path>,
}
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
pub async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
let script = get_script_with_args(opts.script, opts.argv);
let seq_list = deno_task_shell::parser::parse(&script)
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
let env_vars =
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
let local = LocalSet::new();
let future = deno_task_shell::execute(
seq_list,
env_vars,
opts.cwd,
opts.custom_commands,
);
Ok(local.run_until(future).await)
}
fn prepare_env_vars(
mut env_vars: HashMap<String, String>,
initial_cwd: &Path,
node_modules_dir: Option<&Path>,
) -> HashMap<String, String> {
const INIT_CWD_NAME: &str = "INIT_CWD";
if !env_vars.contains_key(INIT_CWD_NAME) {
// if not set, set an INIT_CWD env var that has the cwd
env_vars.insert(
INIT_CWD_NAME.to_string(),
initial_cwd.to_string_lossy().to_string(),
);
}
if let Some(node_modules_dir) = node_modules_dir {
prepend_to_path(
&mut env_vars,
node_modules_dir.join(".bin").to_string_lossy().to_string(),
);
}
env_vars
}
fn prepend_to_path(env_vars: &mut HashMap<String, String>, value: String) {
match env_vars.get_mut("PATH") {
Some(path) => {
if path.is_empty() {
*path = value;
} else {
*path =
format!("{}{}{}", value, if cfg!(windows) { ";" } else { ":" }, path);
}
}
None => {
env_vars.insert("PATH".to_string(), value);
}
}
}
pub fn real_env_vars() -> HashMap<String, String> {
std::env::vars()
.map(|(k, v)| {
if cfg!(windows) {
(k.to_uppercase(), v)
} else {
(k, v)
}
})
.collect::<HashMap<String, String>>()
}
// WARNING: Do not depend on this env var in user code. It's not stable API.
pub(crate) const USE_PKG_JSON_HIDDEN_ENV_VAR_NAME: &str =
"DENO_INTERNAL_TASK_USE_PKG_JSON";
pub struct NpmCommand;
impl ShellCommand for NpmCommand {
fn execute(
&self,
mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
if context.args.first().map(|s| s.as_str()) == Some("run")
&& context.args.len() > 2
// for now, don't run any npm scripts that have a flag because
// we don't handle stuff like `--workspaces` properly
&& !context.args.iter().any(|s| s.starts_with('-'))
{
// run with deno task instead
let mut args = Vec::with_capacity(context.args.len());
args.push("task".to_string());
args.extend(context.args.iter().skip(1).cloned());
let mut state = context.state;
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
return ExecutableCommand::new(
"deno".to_string(),
std::env::current_exe().unwrap(),
)
.execute(ShellCommandContext {
args,
state,
..context
});
}
// fallback to running the real npm command
let npm_path = match context.state.resolve_command_path("npm") {
Ok(path) => path,
Err(err) => {
let _ = context.stderr.write_line(&format!("{}", err));
return Box::pin(futures::future::ready(
ExecuteResult::from_exit_code(err.exit_code()),
));
}
};
ExecutableCommand::new("npm".to_string(), npm_path).execute(context)
}
}
pub struct NodeCommand;
impl ShellCommand for NodeCommand {
fn execute(
&self,
context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
// run with deno if it's a simple invocation, fall back to node
// if there are extra flags
let mut args = Vec::with_capacity(context.args.len());
if context.args.len() > 1
&& (
context.args[0].starts_with('-') // has a flag
|| !matches!(
MediaType::from_str(&context.args[0]),
MediaType::Cjs | MediaType::Mjs | MediaType::JavaScript
)
// not a script file
)
{
return ExecutableCommand::new(
"node".to_string(),
"node".to_string().into(),
)
.execute(context);
}
args.extend(["run", "-A"].into_iter().map(|s| s.to_string()));
args.extend(context.args.iter().cloned());
let mut state = context.state;
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap())
.execute(ShellCommandContext {
args,
state,
..context
})
}
}
pub struct NodeGypCommand;
impl ShellCommand for NodeGypCommand {
fn execute(
&self,
context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
// at the moment this shell command is just to give a warning if node-gyp is not found
// in the future, we could try to run/install node-gyp for the user with deno
if which::which("node-gyp").is_err() {
log::warn!("{}: node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`)", crate::colors::yellow("warning"));
}
ExecutableCommand::new(
"node-gyp".to_string(),
"node-gyp".to_string().into(),
)
.execute(context)
}
}
pub struct NpxCommand;
impl ShellCommand for NpxCommand {
fn execute(
&self,
mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
if let Some(first_arg) = context.args.first().cloned() {
if let Some(command) = context.state.resolve_custom_command(&first_arg) {
let context = ShellCommandContext {
args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(),
..context
};
command.execute(context)
} else {
// can't find the command, so fallback to running the real npx command
let npx_path = match context.state.resolve_command_path("npx") {
Ok(npx) => npx,
Err(err) => {
let _ = context.stderr.write_line(&format!("{}", err));
return Box::pin(futures::future::ready(
ExecuteResult::from_exit_code(err.exit_code()),
));
}
};
ExecutableCommand::new("npx".to_string(), npx_path).execute(context)
}
} else {
let _ = context.stderr.write_line("npx: missing command");
Box::pin(futures::future::ready(ExecuteResult::from_exit_code(1)))
}
}
}
#[derive(Clone)]
struct NpmPackageBinCommand {
name: String,
npm_package: PackageNv,
}
impl ShellCommand for NpmPackageBinCommand {
fn execute(
&self,
context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
let mut args = vec![
"run".to_string(),
"-A".to_string(),
if self.npm_package.name == self.name {
format!("npm:{}", self.npm_package)
} else {
format!("npm:{}/{}", self.npm_package, self.name)
},
];
args.extend(context.args);
let executable_command = deno_task_shell::ExecutableCommand::new(
"deno".to_string(),
std::env::current_exe().unwrap(),
);
executable_command.execute(ShellCommandContext { args, ..context })
}
}
/// Runs a module in the node_modules folder.
#[derive(Clone)]
pub struct NodeModulesFileRunCommand {
pub command_name: String,
pub path: PathBuf,
}
impl ShellCommand for NodeModulesFileRunCommand {
fn execute(
&self,
mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> {
let mut args = vec![
"run".to_string(),
"--ext=js".to_string(),
"-A".to_string(),
self.path.to_string_lossy().to_string(),
];
args.extend(context.args);
let executable_command = deno_task_shell::ExecutableCommand::new(
"deno".to_string(),
std::env::current_exe().unwrap(),
);
// set this environment variable so that the launched process knows the npm command name
context
.state
.apply_env_var("DENO_INTERNAL_NPM_CMD_NAME", &self.command_name);
executable_command.execute(ShellCommandContext { args, ..context })
}
}
pub fn resolve_custom_commands(
npm_resolver: &dyn CliNpmResolver,
node_resolver: &NodeResolver,
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
let mut commands = match npm_resolver.as_inner() {
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
resolve_npm_commands_from_bin_dir(node_modules_dir)
}
InnerCliNpmResolverRef::Managed(npm_resolver) => {
resolve_managed_npm_commands(npm_resolver, node_resolver)?
}
};
commands.insert("npm".to_string(), Rc::new(NpmCommand));
Ok(commands)
}
pub fn resolve_npm_commands_from_bin_dir(
node_modules_dir: &Path,
) -> HashMap<String, Rc<dyn ShellCommand>> {
let mut result = HashMap::<String, Rc<dyn ShellCommand>>::new();
let bin_dir = node_modules_dir.join(".bin");
log::debug!("Resolving commands in '{}'.", bin_dir.display());
match std::fs::read_dir(&bin_dir) {
Ok(entries) => {
for entry in entries {
let Ok(entry) = entry else {
continue;
};
if let Some(command) = resolve_bin_dir_entry_command(entry) {
result.insert(command.command_name.clone(), Rc::new(command));
}
}
}
Err(err) => {
log::debug!("Failed read_dir for '{}': {:#}", bin_dir.display(), err);
}
}
result
}
fn resolve_bin_dir_entry_command(
entry: std::fs::DirEntry,
) -> Option<NodeModulesFileRunCommand> {
if entry.path().extension().is_some() {
return None; // only look at files without extensions (even on Windows)
}
let file_type = entry.file_type().ok()?;
let path = if file_type.is_file() {
entry.path()
} else if file_type.is_symlink() {
entry.path().canonicalize().ok()?
} else {
return None;
};
let text = std::fs::read_to_string(&path).ok()?;
let command_name = entry.file_name().to_string_lossy().to_string();
if let Some(path) = resolve_execution_path_from_npx_shim(path, &text) {
log::debug!(
"Resolved npx command '{}' to '{}'.",
command_name,
path.display()
);
Some(NodeModulesFileRunCommand { command_name, path })
} else {
log::debug!("Failed resolving npx command '{}'.", command_name);
None
}
}
/// This is not ideal, but it works ok because it allows us to bypass
/// the shebang and execute the script directly with Deno.
fn resolve_execution_path_from_npx_shim(
file_path: PathBuf,
text: &str,
) -> Option<PathBuf> {
static SCRIPT_PATH_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
if text.starts_with("#!/usr/bin/env node") {
// launch this file itself because it's a JS file
Some(file_path)
} else {
// Search for...
// > "$basedir/../next/dist/bin/next" "$@"
// ...which is what it will look like on Windows
SCRIPT_PATH_RE
.captures(text)
.and_then(|c| c.get(1))
.map(|relative_path| {
file_path.parent().unwrap().join(relative_path.as_str())
})
}
}
fn resolve_managed_npm_commands(
npm_resolver: &ManagedCliNpmResolver,
node_resolver: &NodeResolver,
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
let mut result = HashMap::new();
let snapshot = npm_resolver.snapshot();
for id in snapshot.top_level_packages() {
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(id)?;
let bin_commands =
node_resolver.resolve_binary_commands(&package_folder)?;
for bin_command in bin_commands {
result.insert(
bin_command.to_string(),
Rc::new(NpmPackageBinCommand {
name: bin_command,
npm_package: id.nv.clone(),
}) as Rc<dyn ShellCommand>,
);
}
}
if !result.contains_key("npx") {
result.insert("npx".to_string(), Rc::new(NpxCommand));
}
Ok(result)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_prepend_to_path() {
let mut env_vars = HashMap::new();
prepend_to_path(&mut env_vars, "/example".to_string());
assert_eq!(
env_vars,
HashMap::from([("PATH".to_string(), "/example".to_string())])
);
prepend_to_path(&mut env_vars, "/example2".to_string());
let separator = if cfg!(windows) { ";" } else { ":" };
assert_eq!(
env_vars,
HashMap::from([(
"PATH".to_string(),
format!("/example2{}/example", separator)
)])
);
env_vars.get_mut("PATH").unwrap().clear();
prepend_to_path(&mut env_vars, "/example".to_string());
assert_eq!(
env_vars,
HashMap::from([("PATH".to_string(), "/example".to_string())])
);
}
#[test]
fn test_resolve_execution_path_from_npx_shim() {
// example shim on unix
let unix_shim = r#"#!/usr/bin/env node
"use strict";
console.log('Hi!');
"#;
let path = PathBuf::from("/node_modules/.bin/example");
assert_eq!(
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
path
);
// example shim on windows
let windows_shim = r#"#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../example/bin/example" "$@"
else
exec node "$basedir/../example/bin/example" "$@"
fi"#;
assert_eq!(
resolve_execution_path_from_npx_shim(path.clone(), windows_shim).unwrap(),
path.parent().unwrap().join("../example/bin/example")
);
}
}

View file

@ -1,12 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::BenchFlags;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::colors;
use crate::display::write_json_to_stdout;
use crate::factory::CliFactory;
use crate::factory::CliFactoryBuilder;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops;
use crate::tools::test::format_test_error;
@ -403,14 +401,13 @@ fn has_supported_bench_path_name(path: &Path) -> bool {
}
pub async fn run_benchmarks(
flags: Flags,
flags: Arc<Flags>,
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
let cli_options = CliOptions::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let workspace_bench_options =
cli_options.resolve_workspace_bench_options(&bench_flags);
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
// Various bench files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
@ -464,7 +461,7 @@ pub async fn run_benchmarks(
// TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs`
pub async fn run_benchmarks_with_watch(
flags: Flags,
flags: Arc<Flags>,
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
file_watcher::watch_func(
@ -480,9 +477,11 @@ pub async fn run_benchmarks_with_watch(
move |flags, watcher_communicator, changed_paths| {
let bench_flags = bench_flags.clone();
Ok(async move {
let factory = CliFactoryBuilder::new()
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
let cli_options = factory.cli_options();
let factory = CliFactory::from_flags_for_watcher(
flags,
watcher_communicator.clone(),
);
let cli_options = factory.cli_options()?;
let workspace_bench_options =
cli_options.resolve_workspace_bench_options(&bench_flags);

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_graph::Module;
@ -11,13 +12,12 @@ use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::TsConfigType;
use crate::factory::CliFactory;
use crate::factory::CliFactoryBuilder;
use crate::graph_util::error_for_any_npm_specifier;
use crate::util;
use crate::util::display;
pub async fn bundle(
flags: Flags,
flags: Arc<Flags>,
bundle_flags: BundleFlags,
) -> Result<(), AnyError> {
log::info!(
@ -35,11 +35,11 @@ pub async fn bundle(
move |flags, watcher_communicator, _changed_paths| {
let bundle_flags = bundle_flags.clone();
Ok(async move {
let factory = CliFactoryBuilder::new().build_from_flags_for_watcher(
let factory = CliFactory::from_flags_for_watcher(
flags,
watcher_communicator.clone(),
)?;
let cli_options = factory.cli_options();
);
let cli_options = factory.cli_options()?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
bundle_action(factory, &bundle_flags).await?;
@ -49,7 +49,7 @@ pub async fn bundle(
)
.await?;
} else {
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
bundle_action(factory, &bundle_flags).await?;
}
@ -60,11 +60,11 @@ async fn bundle_action(
factory: CliFactory,
bundle_flags: &BundleFlags,
) -> Result<(), AnyError> {
let cli_options = factory.cli_options();
let cli_options = factory.cli_options()?;
let module_specifier = cli_options.resolve_main_module()?;
log::debug!(">>>>> bundle START");
let module_graph_creator = factory.module_graph_creator().await?;
let cli_options = factory.cli_options();
let cli_options = factory.cli_options()?;
let graph = module_graph_creator
.create_graph_and_maybe_check(vec![module_specifier.clone()])

View file

@ -22,11 +22,11 @@ use std::sync::Arc;
use super::installer::infer_name_from_url;
pub async fn compile(
flags: Flags,
flags: Arc<Flags>,
compile_flags: CompileFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?;
let parsed_source_cache = factory.parsed_source_cache();
let binary_writer = factory.create_compile_binary_writer().await?;
@ -47,7 +47,7 @@ pub async fn compile(
log::warn!(
concat!(
"{} Sloppy imports are not supported in deno compile. ",
"The compiled executable may encouter runtime errors.",
"The compiled executable may encounter runtime errors.",
),
crate::colors::yellow("Warning"),
);
@ -77,15 +77,15 @@ pub async fn compile(
graph
};
let ts_config_for_emit =
cli_options.resolve_ts_config_for_emit(deno_config::TsConfigType::Emit)?;
let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
let (transpile_options, emit_options) =
crate::args::ts_config_to_transpile_and_emit_options(
ts_config_for_emit.ts_config,
)?;
let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace.root_folder().0,
cli_options.workspace().root_dir(),
graph.specifiers().map(|(s, _)| s).chain(
cli_options
.node_modules_dir_path()
@ -102,6 +102,7 @@ pub async fn compile(
emit_options,
// make all the modules relative to the root folder
relative_file_base: Some(root_dir_url),
npm_packages: None,
})?;
log::info!(
@ -123,12 +124,13 @@ pub async fn compile(
));
let temp_path = output_path.with_file_name(temp_filename);
let mut file = std::fs::File::create(&temp_path).with_context(|| {
let file = std::fs::File::create(&temp_path).with_context(|| {
format!("Opening temporary file '{}'", temp_path.display())
})?;
let write_result = binary_writer
.write_bin(
&mut file,
file,
eszip,
root_dir_url,
&module_specifier,
@ -139,7 +141,6 @@ pub async fn compile(
.with_context(|| {
format!("Writing temporary file '{}'", temp_path.display())
});
drop(file);
// set it as executable
#[cfg(unix)]

View file

@ -32,6 +32,7 @@ use std::io::BufWriter;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use text_lines::TextLines;
use uuid::Uuid;
@ -473,17 +474,17 @@ fn filter_coverages(
}
pub async fn cover_files(
flags: Flags,
flags: Arc<Flags>,
coverage_flags: CoverageFlags,
) -> Result<(), AnyError> {
if coverage_flags.files.include.is_empty() {
return Err(generic_error("No matching coverage profiles found"));
}
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let npm_resolver = factory.npm_resolver().await?;
let file_fetcher = factory.file_fetcher()?;
let cli_options = factory.cli_options();
let emitter = factory.emitter()?;
assert!(!coverage_flags.files.include.is_empty());

View file

@ -29,6 +29,7 @@ use doc::DocDiagnostic;
use indexmap::IndexMap;
use std::collections::BTreeMap;
use std::rc::Rc;
use std::sync::Arc;
async fn generate_doc_nodes_for_builtin_types(
doc_flags: DocFlags,
@ -58,7 +59,6 @@ async fn generate_doc_nodes_for_builtin_types(
imports: Vec::new(),
is_dynamic: false,
passthrough_jsr_specifiers: false,
workspace_members: &[],
executor: Default::default(),
file_system: &NullFileSystem,
jsr_url_provider: Default::default(),
@ -83,9 +83,12 @@ async fn generate_doc_nodes_for_builtin_types(
Ok(IndexMap::from([(source_file_specifier, nodes)]))
}
pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
pub async fn doc(
flags: Arc<Flags>,
doc_flags: DocFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_info_cache = factory.module_info_cache()?;
let parsed_source_cache = factory.parsed_source_cache();
let capturing_parser = parsed_source_cache.as_capturing_parser();
@ -102,7 +105,7 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
}
DocSourceFileFlag::Paths(ref source_files) => {
let module_graph_creator = factory.module_graph_creator().await?;
let maybe_lockfile = factory.maybe_lockfile();
let maybe_lockfile = cli_options.maybe_lockfile();
let module_specifiers = collect_specifiers(
FilePatterns {
@ -174,10 +177,10 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
.into_iter()
.map(|node| deno_doc::html::DocNodeWithContext {
origin: short_path.clone(),
ns_qualifiers: Rc::new(vec![]),
ns_qualifiers: Rc::new([]),
kind_with_drilldown:
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind),
inner: std::sync::Arc::new(node),
deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind()),
inner: Rc::new(node),
drilldown_parent_kind: None,
parent: None,
})
@ -187,32 +190,31 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
Default::default()
};
let rewrite_map = if let Some(config_file) =
cli_options.workspace.resolve_start_ctx().maybe_deno_json()
{
let config = config_file.to_exports_config()?;
let rewrite_map =
if let Some(config_file) = cli_options.start_dir.maybe_deno_json() {
let config = config_file.to_exports_config()?;
let rewrite_map = config
.clone()
.into_map()
.into_keys()
.map(|key| {
Ok((
config.get_resolved(&key)?.unwrap(),
key
.strip_prefix('.')
.unwrap_or(&key)
.strip_prefix('/')
.unwrap_or(&key)
.to_owned(),
))
})
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
let rewrite_map = config
.clone()
.into_map()
.into_keys()
.map(|key| {
Ok((
config.get_resolved(&key)?.unwrap(),
key
.strip_prefix('.')
.unwrap_or(&key)
.strip_prefix('/')
.unwrap_or(&key)
.to_owned(),
))
})
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
Some(rewrite_map)
} else {
None
};
Some(rewrite_map)
} else {
None
};
generate_docs_directory(
doc_nodes_by_url,
@ -297,7 +299,36 @@ impl deno_doc::html::HrefResolver for DocResolver {
}
fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> {
Some(location.filename.clone())
Some(location.filename.to_string())
}
fn resolve_external_jsdoc_module(
&self,
module: &str,
_symbol: Option<&str>,
) -> Option<(String, String)> {
if let Ok(url) = deno_core::url::Url::parse(module) {
match url.scheme() {
"npm" => {
let res =
deno_semver::npm::NpmPackageReqReference::from_str(module).ok()?;
let name = &res.req().name;
Some((
format!("https://www.npmjs.com/package/{name}"),
name.to_owned(),
))
}
"jsr" => {
let res =
deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?;
let name = &res.req().name;
Some((format!("https://jsr.io/{name}"), name.to_owned()))
}
_ => None,
}
} else {
None
}
}
}
@ -341,6 +372,14 @@ impl deno_doc::html::HrefResolver for DenoDocResolver {
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
None
}
fn resolve_external_jsdoc_module(
&self,
_module: &str,
_symbol: Option<&str>,
) -> Option<(String, String)> {
None
}
}
struct NodeDocResolver(bool);
@ -385,6 +424,14 @@ impl deno_doc::html::HrefResolver for NodeDocResolver {
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
None
}
fn resolve_external_jsdoc_module(
&self,
_module: &str,
_symbol: Option<&str>,
) -> Option<(String, String)> {
None
}
}
fn generate_docs_directory(
@ -489,9 +536,9 @@ fn print_docs_to_stdout(
doc_flags: DocFlags,
mut doc_nodes: Vec<deno_doc::DocNode>,
) -> Result<(), AnyError> {
doc_nodes.retain(|doc_node| doc_node.kind != doc::DocNodeKind::Import);
doc_nodes.retain(|doc_node| doc_node.kind() != doc::DocNodeKind::Import);
let details = if let Some(filter) = doc_flags.filter {
let nodes = doc::find_nodes_by_name_recursively(doc_nodes, filter.clone());
let nodes = doc::find_nodes_by_name_recursively(doc_nodes, &filter);
if nodes.is_empty() {
bail!("Node {} was not found!", filter);
}

View file

@ -13,6 +13,7 @@ use crate::args::FmtFlags;
use crate::args::FmtOptions;
use crate::args::FmtOptionsConfig;
use crate::args::ProseWrap;
use crate::args::UnstableFmtOptions;
use crate::cache::Caches;
use crate::colors;
use crate::factory::CliFactory;
@ -49,12 +50,20 @@ use std::sync::Arc;
use crate::cache::IncrementalCache;
/// Format JavaScript/TypeScript files.
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
pub async fn format(
flags: Arc<Flags>,
fmt_flags: FmtFlags,
) -> Result<(), AnyError> {
if fmt_flags.is_stdin() {
let cli_options = CliOptions::from_flags(flags)?;
let start_ctx = cli_options.workspace.resolve_start_ctx();
let fmt_options =
cli_options.resolve_fmt_options(&fmt_flags, &start_ctx)?;
let start_dir = &cli_options.start_dir;
let fmt_config = start_dir
.to_fmt_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let fmt_options = FmtOptions::resolve(
fmt_config,
cli_options.resolve_config_unstable_fmt_options(),
&fmt_flags,
);
return format_stdin(
&fmt_flags,
fmt_options,
@ -73,8 +82,8 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
move |flags, watcher_communicator, changed_paths| {
let fmt_flags = fmt_flags.clone();
Ok(async move {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let caches = factory.caches()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
@ -118,9 +127,9 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
)
.await?;
} else {
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let caches = factory.caches()?;
let cli_options = factory.cli_options();
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
@ -143,7 +152,7 @@ fn resolve_paths_with_options_batches(
cli_options.resolve_fmt_options_for_members(fmt_flags)?;
let mut paths_with_options_batches =
Vec::with_capacity(members_fmt_options.len());
for member_fmt_options in members_fmt_options {
for (_ctx, member_fmt_options) in members_fmt_options {
let files =
collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
if !files.is_empty() {
@ -180,11 +189,16 @@ async fn format_files(
let paths = paths_with_options.paths;
let incremental_cache = Arc::new(IncrementalCache::new(
caches.fmt_incremental_cache_db(),
&fmt_options.options,
&(&fmt_options.options, &fmt_options.unstable), // cache key
&paths,
));
formatter
.handle_files(paths, fmt_options.options, incremental_cache.clone())
.handle_files(
paths,
fmt_options.options,
fmt_options.unstable,
incremental_cache.clone(),
)
.await?;
incremental_cache.wait_completion().await;
}
@ -208,6 +222,7 @@ fn collect_fmt_files(
fn format_markdown(
file_text: &str,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
) -> Result<Option<String>, AnyError> {
let markdown_config = get_resolved_markdown_config(fmt_options);
dprint_plugin_markdown::format_text(
@ -229,6 +244,12 @@ fn format_markdown(
| "typescript"
| "json"
| "jsonc"
| "css"
| "scss"
| "sass"
| "less"
| "yml"
| "yaml"
) {
// It's important to tell dprint proper file extension, otherwise
// it might parse the file twice.
@ -240,19 +261,41 @@ fn format_markdown(
let fake_filename =
PathBuf::from(format!("deno_fmt_stdin.{extension}"));
if matches!(extension, "json" | "jsonc") {
let mut json_config = get_resolved_json_config(fmt_options);
json_config.line_width = line_width;
dprint_plugin_json::format_text(&fake_filename, text, &json_config)
} else {
let mut codeblock_config =
get_resolved_typescript_config(fmt_options);
codeblock_config.line_width = line_width;
dprint_plugin_typescript::format_text(
&fake_filename,
text.to_string(),
&codeblock_config,
)
match extension {
"json" | "jsonc" => {
let mut json_config = get_resolved_json_config(fmt_options);
json_config.line_width = line_width;
dprint_plugin_json::format_text(&fake_filename, text, &json_config)
}
"css" | "scss" | "sass" | "less" => {
if unstable_options.css {
format_css(&fake_filename, text, fmt_options)
} else {
Ok(None)
}
}
"yml" | "yaml" => {
if unstable_options.yaml {
pretty_yaml::format_text(
text,
&get_resolved_yaml_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
} else {
Ok(None)
}
}
_ => {
let mut codeblock_config =
get_resolved_typescript_config(fmt_options);
codeblock_config.line_width = line_width;
dprint_plugin_typescript::format_text(
&fake_filename,
text.to_string(),
&codeblock_config,
)
}
}
} else {
Ok(None)
@ -273,23 +316,57 @@ pub fn format_json(
dprint_plugin_json::format_text(file_path, file_text, &config)
}
pub fn format_css(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
malva::format_text(
file_text,
malva::detect_syntax(file_path).unwrap_or(malva::Syntax::Css),
&get_resolved_malva_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
}
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, or IPYNB file.
pub fn format_file(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
) -> Result<Option<String>, AnyError> {
let ext = get_extension(file_path).unwrap_or_default();
match ext.as_str() {
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown" => {
format_markdown(file_text, fmt_options)
format_markdown(file_text, fmt_options, unstable_options)
}
"json" | "jsonc" => format_json(file_path, file_text, fmt_options),
"css" | "scss" | "sass" | "less" => {
if unstable_options.css {
format_css(file_path, file_text, fmt_options)
} else {
Ok(None)
}
}
"yml" | "yaml" => {
if unstable_options.yaml {
pretty_yaml::format_text(
file_text,
&get_resolved_yaml_config(fmt_options),
)
.map(Some)
.map_err(AnyError::from)
} else {
Ok(None)
}
}
"ipynb" => dprint_plugin_jupyter::format_text(
file_text,
|file_path: &Path, file_text: String| {
format_file(file_path, &file_text, fmt_options)
format_file(file_path, &file_text, fmt_options, unstable_options)
},
),
_ => {
@ -319,6 +396,7 @@ trait Formatter {
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
) -> Result<(), AnyError>;
@ -337,6 +415,7 @@ impl Formatter for CheckFormatter {
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
) -> Result<(), AnyError> {
// prevent threads outputting at the same time
@ -354,7 +433,12 @@ impl Formatter for CheckFormatter {
return Ok(());
}
match format_file(&file_path, &file_text, &fmt_options) {
match format_file(
&file_path,
&file_text,
&fmt_options,
&unstable_options,
) {
Ok(Some(formatted_text)) => {
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
let _g = output_lock.lock();
@ -429,6 +513,7 @@ impl Formatter for RealFormatter {
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
) -> Result<(), AnyError> {
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
@ -448,8 +533,9 @@ impl Formatter for RealFormatter {
match format_ensure_stable(
&file_path,
&file_contents.text,
&fmt_options,
format_file,
|file_path, file_text| {
format_file(file_path, file_text, &fmt_options, &unstable_options)
},
) {
Ok(Some(formatted_text)) => {
incremental_cache.update_file(&file_path, &formatted_text);
@ -506,20 +592,15 @@ impl Formatter for RealFormatter {
fn format_ensure_stable(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
fmt_func: impl Fn(
&Path,
&str,
&FmtOptionsConfig,
) -> Result<Option<String>, AnyError>,
fmt_func: impl Fn(&Path, &str) -> Result<Option<String>, AnyError>,
) -> Result<Option<String>, AnyError> {
let formatted_text = fmt_func(file_path, file_text, fmt_options)?;
let formatted_text = fmt_func(file_path, file_text)?;
match formatted_text {
Some(mut current_text) => {
let mut count = 0;
loop {
match fmt_func(file_path, &current_text, fmt_options) {
match fmt_func(file_path, &current_text) {
Ok(Some(next_pass_text)) => {
// just in case
if next_pass_text == current_text {
@ -574,7 +655,12 @@ fn format_stdin(
bail!("Failed to read from stdin");
}
let file_path = PathBuf::from(format!("_stdin.{ext}"));
let formatted_text = format_file(&file_path, &source, &fmt_options.options)?;
let formatted_text = format_file(
&file_path,
&source,
&fmt_options.options,
&fmt_options.unstable,
)?;
if fmt_flags.check {
#[allow(clippy::print_stdout)]
if formatted_text.is_some() {
@ -683,6 +769,93 @@ fn get_resolved_json_config(
builder.build()
}
fn get_resolved_malva_config(
options: &FmtOptionsConfig,
) -> malva::config::FormatOptions {
use malva::config::*;
let layout_options = LayoutOptions {
print_width: options.line_width.unwrap_or(80) as usize,
use_tabs: options.use_tabs.unwrap_or_default(),
indent_width: options.indent_width.unwrap_or(2) as usize,
line_break: LineBreak::Lf,
};
let language_options = LanguageOptions {
hex_case: HexCase::Lower,
hex_color_length: None,
quotes: if let Some(true) = options.single_quote {
Quotes::PreferSingle
} else {
Quotes::PreferDouble
},
operator_linebreak: OperatorLineBreak::Before,
block_selector_linebreak: BlockSelectorLineBreak::Consistent,
omit_number_leading_zero: false,
trailing_comma: true,
format_comments: false,
linebreak_in_pseudo_parens: true,
declaration_order: None,
single_line_block_threshold: None,
keyframe_selector_notation: None,
attr_value_quotes: AttrValueQuotes::Always,
prefer_single_line: false,
selectors_prefer_single_line: None,
function_args_prefer_single_line: None,
sass_content_at_rule_prefer_single_line: None,
sass_include_at_rule_prefer_single_line: None,
sass_map_prefer_single_line: None,
sass_module_config_prefer_single_line: None,
sass_params_prefer_single_line: None,
less_import_options_prefer_single_line: None,
less_mixin_args_prefer_single_line: None,
less_mixin_params_prefer_single_line: None,
top_level_declarations_prefer_single_line: None,
selector_override_comment_directive: "deno-fmt-selector-override".into(),
ignore_comment_directive: "deno-fmt-ignore".into(),
};
FormatOptions {
layout: layout_options,
language: language_options,
}
}
fn get_resolved_yaml_config(
options: &FmtOptionsConfig,
) -> pretty_yaml::config::FormatOptions {
use pretty_yaml::config::*;
let layout_options = LayoutOptions {
print_width: options.line_width.unwrap_or(80) as usize,
use_tabs: options.use_tabs.unwrap_or_default(),
indent_width: options.indent_width.unwrap_or(2) as usize,
line_break: LineBreak::Lf,
};
let language_options = LanguageOptions {
quotes: if let Some(true) = options.single_quote {
Quotes::PreferSingle
} else {
Quotes::PreferDouble
},
trailing_comma: true,
format_comments: false,
indent_block_sequence_in_map: true,
brace_spacing: true,
bracket_spacing: false,
dash_spacing: DashSpacing::OneSpace,
trim_trailing_whitespaces: true,
trim_trailing_zero: false,
ignore_comment_directive: "deno-fmt-ignore".into(),
};
FormatOptions {
layout: layout_options,
language: language_options,
}
}
struct FileContents {
text: String,
had_bom: bool,
@ -775,12 +948,18 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
| "mts"
| "json"
| "jsonc"
| "css"
| "scss"
| "sass"
| "less"
| "md"
| "mkd"
| "mkdn"
| "mdwn"
| "mdown"
| "markdown"
| "yml"
| "yaml"
| "ipynb"
)
})
@ -815,29 +994,35 @@ mod test {
assert!(is_supported_ext_fmt(Path::new("foo.JSONC")));
assert!(is_supported_ext_fmt(Path::new("foo.json")));
assert!(is_supported_ext_fmt(Path::new("foo.JsON")));
assert!(is_supported_ext_fmt(Path::new("foo.css")));
assert!(is_supported_ext_fmt(Path::new("foo.Css")));
assert!(is_supported_ext_fmt(Path::new("foo.scss")));
assert!(is_supported_ext_fmt(Path::new("foo.SCSS")));
assert!(is_supported_ext_fmt(Path::new("foo.sass")));
assert!(is_supported_ext_fmt(Path::new("foo.Sass")));
assert!(is_supported_ext_fmt(Path::new("foo.less")));
assert!(is_supported_ext_fmt(Path::new("foo.LeSS")));
assert!(is_supported_ext_fmt(Path::new("foo.yml")));
assert!(is_supported_ext_fmt(Path::new("foo.Yml")));
assert!(is_supported_ext_fmt(Path::new("foo.yaml")));
assert!(is_supported_ext_fmt(Path::new("foo.YaML")));
assert!(is_supported_ext_fmt(Path::new("foo.ipynb")));
}
#[test]
#[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
fn test_format_ensure_stable_unstable_format() {
format_ensure_stable(
&PathBuf::from("mod.ts"),
"1",
&Default::default(),
|_, file_text, _| Ok(Some(format!("1{file_text}"))),
)
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
Ok(Some(format!("1{file_text}")))
})
.unwrap();
}
#[test]
fn test_format_ensure_stable_error_first() {
let err = format_ensure_stable(
&PathBuf::from("mod.ts"),
"1",
&Default::default(),
|_, _, _| bail!("Error formatting."),
)
let err = format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, _| {
bail!("Error formatting.")
})
.unwrap_err();
assert_eq!(err.to_string(), "Error formatting.");
@ -846,28 +1031,20 @@ mod test {
#[test]
#[should_panic(expected = "Formatting succeeded initially, but failed when")]
fn test_format_ensure_stable_error_second() {
format_ensure_stable(
&PathBuf::from("mod.ts"),
"1",
&Default::default(),
|_, file_text, _| {
if file_text == "1" {
Ok(Some("11".to_string()))
} else {
bail!("Error formatting.")
}
},
)
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
if file_text == "1" {
Ok(Some("11".to_string()))
} else {
bail!("Error formatting.")
}
})
.unwrap();
}
#[test]
fn test_format_stable_after_two() {
let result = format_ensure_stable(
&PathBuf::from("mod.ts"),
"1",
&Default::default(),
|_, file_text, _| {
let result =
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
if file_text == "1" {
Ok(Some("11".to_string()))
} else if file_text == "11" {
@ -875,9 +1052,8 @@ mod test {
} else {
unreachable!();
}
},
)
.unwrap();
})
.unwrap();
assert_eq!(result, Some("11".to_string()));
}
@ -891,6 +1067,7 @@ mod test {
single_quote: Some(true),
..Default::default()
},
&UnstableFmtOptions::default(),
)
.unwrap()
.unwrap();

View file

@ -4,6 +4,7 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt;
use std::fmt::Write;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
@ -34,14 +35,17 @@ use crate::npm::CliNpmResolver;
use crate::npm::ManagedCliNpmResolver;
use crate::util::checksum;
pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
pub async fn info(
flags: Arc<Flags>,
info_flags: InfoFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
if let Some(specifier) = info_flags.file {
let module_graph_builder = factory.module_graph_builder().await?;
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = factory.maybe_lockfile();
let maybe_lockfile = cli_options.maybe_lockfile();
let resolver = factory.workspace_resolver().await?;
let maybe_import_specifier =

View file

@ -4,10 +4,243 @@ use crate::args::InitFlags;
use crate::colors;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json::json;
use log::info;
use std::io::Write;
use std::path::Path;
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
let cwd =
std::env::current_dir().context("Can't read current working directory.")?;
let dir = if let Some(dir) = &init_flags.dir {
let dir = cwd.join(dir);
std::fs::create_dir_all(&dir)?;
dir
} else {
cwd
};
if init_flags.serve {
create_file(
&dir,
"main.ts",
r#"import { type Route, route, serveDir } from "@std/http";
const routes: Route[] = [
{
pattern: new URLPattern({ pathname: "/" }),
handler: () => new Response("Home page"),
},
{
pattern: new URLPattern({ pathname: "/users/:id" }),
handler: (_req, _info, params) => new Response(params?.pathname.groups.id),
},
{
pattern: new URLPattern({ pathname: "/static/*" }),
handler: (req) => serveDir(req, { urlRoot: "./" }),
},
];
function defaultHandler(_req: Request) {
return new Response("Not found", { status: 404 });
}
const handler = route(routes, defaultHandler);
export default {
fetch(req) {
return handler(req);
},
} satisfies Deno.ServeDefaultExport;
"#,
)?;
create_file(
&dir,
"main_test.ts",
r#"import { assertEquals } from "@std/assert";
import server from "./main.ts";
Deno.test(async function serverFetch() {
const req = new Request("https://deno.land");
const res = await server.fetch(req);
assertEquals(await res.text(), "Home page");
});
Deno.test(async function serverFetchNotFound() {
const req = new Request("https://deno.land/404");
const res = await server.fetch(req);
assertEquals(res.status, 404);
});
Deno.test(async function serverFetchUsers() {
const req = new Request("https://deno.land/users/123");
const res = await server.fetch(req);
assertEquals(await res.text(), "123");
});
Deno.test(async function serverFetchStatic() {
const req = new Request("https://deno.land/static/main.ts");
const res = await server.fetch(req);
assertEquals(res.headers.get("content-type"), "text/plain;charset=UTF-8");
});
"#,
)?;
create_json_file(
&dir,
"deno.json",
&json!({
"tasks": {
"dev": "deno serve --watch -R main.ts",
},
"imports": {
"@std/assert": "jsr:@std/assert@1",
"@std/http": "jsr:@std/http@1",
}
}),
)?;
} else if init_flags.lib {
// Extract the directory name to use as the project name
let project_name = dir
.file_name()
.unwrap_or_else(|| dir.as_os_str())
.to_str()
.unwrap();
create_file(
&dir,
"mod.ts",
r#"export function add(a: number, b: number): number {
return a + b;
}
"#,
)?;
create_file(
&dir,
"mod_test.ts",
r#"import { assertEquals } from "@std/assert";
import { add } from "./mod.ts";
Deno.test(function addTest() {
assertEquals(add(2, 3), 5);
});
"#,
)?;
create_json_file(
&dir,
"deno.json",
&json!({
"name": project_name,
"version": "0.1.0",
"exports": "./mod.ts",
"tasks": {
"dev": "deno test --watch mod.ts"
},
"imports": {
"@std/assert": "jsr:@std/assert@1"
},
}),
)?;
} else {
create_file(
&dir,
"main.ts",
r#"export function add(a: number, b: number): number {
return a + b;
}
// Learn more at https://docs.deno.com/runtime/manual/examples/module_metadata#concepts
if (import.meta.main) {
console.log("Add 2 + 3 =", add(2, 3));
}
"#,
)?;
create_file(
&dir,
"main_test.ts",
r#"import { assertEquals } from "@std/assert";
import { add } from "./main.ts";
Deno.test(function addTest() {
assertEquals(add(2, 3), 5);
});
"#,
)?;
create_json_file(
&dir,
"deno.json",
&json!({
"tasks": {
"dev": "deno run --watch main.ts"
},
"imports": {
"@std/assert": "jsr:@std/assert@1"
}
}),
)?;
}
info!("✅ {}", colors::green("Project initialized"));
info!("");
info!("{}", colors::gray("Run these commands to get started"));
info!("");
if let Some(dir) = init_flags.dir {
info!(" cd {}", dir);
info!("");
}
if init_flags.serve {
info!(" {}", colors::gray("# Run the server"));
info!(" deno serve -R main.ts");
info!("");
info!(
" {}",
colors::gray("# Run the server and watch for file changes")
);
info!(" deno task dev");
info!("");
info!(" {}", colors::gray("# Run the tests"));
info!(" deno -R test");
} else if init_flags.lib {
info!(" {}", colors::gray("# Run the tests"));
info!(" deno test");
info!("");
info!(
" {}",
colors::gray("# Run the tests and watch for file changes")
);
info!(" deno task dev");
info!("");
info!(" {}", colors::gray("# Publish to JSR (dry run)"));
info!(" deno publish --dry-run");
} else {
info!(" {}", colors::gray("# Run the program"));
info!(" deno run main.ts");
info!("");
info!(
" {}",
colors::gray("# Run the program and watch for file changes")
);
info!(" deno task dev");
info!("");
info!(" {}", colors::gray("# Run the tests"));
info!(" deno test");
}
Ok(())
}
fn create_json_file(
dir: &Path,
filename: &str,
value: &deno_core::serde_json::Value,
) -> Result<(), AnyError> {
let mut text = deno_core::serde_json::to_string_pretty(value)?;
text.push('\n');
create_file(dir, filename, &text)
}
fn create_file(
dir: &Path,
filename: &str,
@ -30,46 +263,3 @@ fn create_file(
Ok(())
}
}
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
let cwd =
std::env::current_dir().context("Can't read current working directory.")?;
let dir = if let Some(dir) = &init_flags.dir {
let dir = cwd.join(dir);
std::fs::create_dir_all(&dir)?;
dir
} else {
cwd
};
let main_ts = include_str!("./templates/main.ts");
create_file(&dir, "main.ts", main_ts)?;
create_file(
&dir,
"main_test.ts",
include_str!("./templates/main_test.ts"),
)?;
create_file(&dir, "deno.json", include_str!("./templates/deno.json"))?;
info!("✅ {}", colors::green("Project initialized"));
info!("");
info!("{}", colors::gray("Run these commands to get started"));
info!("");
if let Some(dir) = init_flags.dir {
info!(" cd {}", dir);
info!("");
}
info!(" {}", colors::gray("# Run the program"));
info!(" deno run main.ts");
info!("");
info!(
" {}",
colors::gray("# Run the program and watch for file changes")
);
info!(" deno task dev");
info!("");
info!(" {}", colors::gray("# Run the tests"));
info!(" deno test");
Ok(())
}

View file

@ -1,5 +0,0 @@
{
"tasks": {
"dev": "deno run --watch main.ts"
}
}

View file

@ -1,8 +0,0 @@
export function add(a: number, b: number): number {
return a + b;
}
// Learn more at https://deno.land/manual/examples/module_metadata#concepts
if (import.meta.main) {
console.log("Add 2 + 3 =", add(2, 3));
}

View file

@ -1,6 +0,0 @@
import { assertEquals } from "jsr:@std/assert";
import { add } from "./main.ts";
Deno.test(function addTest() {
assertEquals(add(2, 3), 5);
});

View file

@ -3,6 +3,7 @@
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
use crate::args::InstallFlagsGlobal;
@ -14,7 +15,6 @@ use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_config::ConfigFlag;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
@ -35,6 +35,7 @@ use std::path::PathBuf;
#[cfg(not(windows))]
use std::os::unix::fs::PermissionsExt;
use std::sync::Arc;
static EXEC_NAME_RE: Lazy<Regex> = Lazy::new(|| {
RegexBuilder::new(r"^[a-z0-9][\w-]*$")
@ -261,17 +262,22 @@ pub fn uninstall(uninstall_flags: UninstallFlags) -> Result<(), AnyError> {
}
async fn install_local(
flags: Flags,
flags: Arc<Flags>,
maybe_add_flags: Option<AddFlags>,
) -> Result<(), AnyError> {
if let Some(add_flags) = maybe_add_flags {
return super::registry::add(flags, add_flags).await;
return super::registry::add(
flags,
add_flags,
super::registry::AddCommandName::Install,
)
.await;
}
let factory = CliFactory::from_flags(flags)?;
let factory = CliFactory::from_flags(flags);
crate::module_loader::load_top_level_deps(&factory).await?;
if let Some(lockfile) = factory.cli_options().maybe_lockfile() {
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
lockfile.write_if_changed()?;
}
@ -279,15 +285,15 @@ async fn install_local(
}
pub async fn install_command(
flags: Flags,
flags: Arc<Flags>,
install_flags: InstallFlags,
) -> Result<(), AnyError> {
if !install_flags.global {
log::warn!("⚠️ `deno install` behavior will change in Deno 2. To preserve the current behavior use the `-g` or `--global` flag.");
}
match install_flags.kind {
InstallKind::Global(global_flags) => {
if !install_flags.global {
log::warn!("⚠️ `deno install` behavior will change in Deno 2. To preserve the current behavior use the `-g` or `--global` flag.");
}
install_global(flags, global_flags).await
}
InstallKind::Local(maybe_add_flags) => {
@ -297,11 +303,11 @@ pub async fn install_command(
}
async fn install_global(
flags: Flags,
flags: Arc<Flags>,
install_flags_global: InstallFlagsGlobal,
) -> Result<(), AnyError> {
// ensure the module is cached
let factory = CliFactory::from_flags(flags.clone())?;
let factory = CliFactory::from_flags(flags.clone());
factory
.main_module_graph_container()
.await?
@ -310,16 +316,16 @@ async fn install_global(
let http_client = factory.http_client_provider();
// create the install shim
create_install_shim(http_client, flags, install_flags_global).await
create_install_shim(http_client, &flags, install_flags_global).await
}
async fn create_install_shim(
http_client_provider: &HttpClientProvider,
flags: Flags,
flags: &Flags,
install_flags_global: InstallFlagsGlobal,
) -> Result<(), AnyError> {
let shim_data =
resolve_shim_data(http_client_provider, &flags, &install_flags_global)
resolve_shim_data(http_client_provider, flags, &install_flags_global)
.await?;
// ensure directory exists
@ -571,11 +577,11 @@ fn is_in_path(dir: &Path) -> bool {
mod tests {
use super::*;
use crate::args::ConfigFlag;
use crate::args::PermissionFlags;
use crate::args::UninstallFlagsGlobal;
use crate::args::UnstableConfig;
use crate::util::fs::canonicalize_path;
use deno_config::ConfigFlag;
use std::process::Command;
use test_util::testdata_path;
use test_util::TempDir;
@ -778,7 +784,7 @@ mod tests {
create_install_shim(
&HttpClientProvider::new(None, None),
Flags {
&Flags {
unstable_config: UnstableConfig {
legacy_flag_enabled: true,
..Default::default()
@ -1173,7 +1179,7 @@ mod tests {
create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: local_module_str.to_string(),
args: vec![],
@ -1203,7 +1209,7 @@ mod tests {
create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: "http://localhost:4545/echo_server.ts".to_string(),
args: vec![],
@ -1224,7 +1230,7 @@ mod tests {
// No force. Install failed.
let no_force_result = create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
args: vec![],
@ -1246,7 +1252,7 @@ mod tests {
// Force. Install success.
let force_result = create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
args: vec![],
@ -1274,7 +1280,7 @@ mod tests {
let result = create_install_shim(
&HttpClientProvider::new(None, None),
Flags {
&Flags {
config_flag: ConfigFlag::Path(config_file_path.to_string()),
..Flags::default()
},
@ -1307,7 +1313,7 @@ mod tests {
create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: "http://localhost:4545/echo_server.ts".to_string(),
args: vec!["\"".to_string()],
@ -1348,7 +1354,7 @@ mod tests {
create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: local_module_str.to_string(),
args: vec![],
@ -1390,7 +1396,7 @@ mod tests {
let result = create_install_shim(
&HttpClientProvider::new(None, None),
Flags {
&Flags {
import_map_path: Some(import_map_path.to_string()),
..Flags::default()
},
@ -1436,7 +1442,7 @@ mod tests {
let result = create_install_shim(
&HttpClientProvider::new(None, None),
Flags::default(),
&Flags::default(),
InstallFlagsGlobal {
module_url: file_module_string.to_string(),
args: vec![],

View file

@ -1,5 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use crate::args::Flags;
use crate::args::JupyterFlags;
use crate::cdp;
@ -36,7 +38,7 @@ mod install;
pub mod server;
pub async fn kernel(
flags: Flags,
flags: Arc<Flags>,
jupyter_flags: JupyterFlags,
) -> Result<(), AnyError> {
log::info!(
@ -56,8 +58,8 @@ pub async fn kernel(
let connection_filepath = jupyter_flags.conn_file.unwrap();
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module =
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
.unwrap();

View file

@ -21,6 +21,7 @@ use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use jupyter_runtime::ExecutionCount;
use tokio::sync::mpsc;
use tokio::sync::oneshot;
@ -34,11 +35,12 @@ use jupyter_runtime::KernelShellConnection;
use jupyter_runtime::ReplyError;
use jupyter_runtime::ReplyStatus;
use jupyter_runtime::StreamContent;
use uuid::Uuid;
use super::JupyterReplProxy;
pub struct JupyterServer {
execution_count: usize,
execution_count: ExecutionCount,
last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
repl_session_proxy: JupyterReplProxy,
@ -64,16 +66,22 @@ impl JupyterServer {
repl_session_proxy: JupyterReplProxy,
setup_tx: oneshot::Sender<StartupData>,
) -> Result<(), AnyError> {
let session_id = Uuid::new_v4().to_string();
let mut heartbeat =
connection_info.create_kernel_heartbeat_connection().await?;
let shell_connection =
connection_info.create_kernel_shell_connection().await?;
let control_connection =
connection_info.create_kernel_control_connection().await?;
let mut stdin_connection =
connection_info.create_kernel_stdin_connection().await?;
let iopub_connection =
connection_info.create_kernel_iopub_connection().await?;
let shell_connection = connection_info
.create_kernel_shell_connection(&session_id)
.await?;
let control_connection = connection_info
.create_kernel_control_connection(&session_id)
.await?;
let mut stdin_connection = connection_info
.create_kernel_stdin_connection(&session_id)
.await?;
let iopub_connection = connection_info
.create_kernel_iopub_connection(&session_id)
.await?;
let iopub_connection = Arc::new(Mutex::new(iopub_connection));
let last_execution_request = Arc::new(Mutex::new(None));
@ -100,7 +108,7 @@ impl JupyterServer {
let cancel_handle = CancelHandle::new_rc();
let mut server = Self {
execution_count: 0,
execution_count: ExecutionCount::new(0),
iopub_connection: iopub_connection.clone(),
last_execution_request: last_execution_request.clone(),
repl_session_proxy,
@ -481,14 +489,14 @@ impl JupyterServer {
connection: &mut KernelShellConnection,
) -> Result<(), AnyError> {
if !execute_request.silent && execute_request.store_history {
self.execution_count += 1;
self.execution_count.increment();
}
*self.last_execution_request.lock() = Some(parent_message.clone());
self
.send_iopub(
messaging::ExecuteInput {
execution_count: self.execution_count.into(),
execution_count: self.execution_count,
code: execute_request.code.clone(),
}
.as_child_of(parent_message),
@ -516,7 +524,7 @@ impl JupyterServer {
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count.into(),
execution_count: self.execution_count,
status: ReplyStatus::Error,
payload: Default::default(),
user_expressions: None,
@ -545,7 +553,7 @@ impl JupyterServer {
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count.into(),
execution_count: self.execution_count,
status: ReplyStatus::Ok,
user_expressions: None,
payload: Default::default(),
@ -645,7 +653,7 @@ impl JupyterServer {
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count.into(),
execution_count: self.execution_count,
status: ReplyStatus::Error,
error: Some(Box::new(ReplyError {
ename,
@ -667,7 +675,7 @@ impl JupyterServer {
&mut self,
message: JupyterMessage,
) -> Result<(), AnyError> {
self.iopub_connection.lock().send(message).await
self.iopub_connection.lock().send(message.clone()).await
}
}
@ -699,10 +707,10 @@ fn kernel_info() -> messaging::KernelInfoReply {
async fn publish_result(
repl_session_proxy: &mut JupyterReplProxy,
evaluate_result: &cdp::RemoteObject,
execution_count: usize,
execution_count: ExecutionCount,
) -> Result<Option<HashMap<String, serde_json::Value>>, AnyError> {
let arg0 = cdp::CallArgument {
value: Some(serde_json::Value::Number(execution_count.into())),
value: Some(execution_count.into()),
unserializable_value: None,
object_id: None,
};

242
cli/tools/lint/linter.rs Normal file
View file

@ -0,0 +1,242 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_lint::linter::LintFileOptions;
use deno_lint::linter::Linter as DenoLintLinter;
use deno_lint::linter::LinterOptions;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::specifier_from_file_path;
use super::rules::FileOrPackageLintRule;
use super::rules::PackageLintRule;
use super::ConfiguredRules;
pub struct CliLinterOptions {
pub configured_rules: ConfiguredRules,
pub fix: bool,
pub deno_lint_config: DenoLintConfig,
}
#[derive(Debug)]
pub struct CliLinter {
fix: bool,
package_rules: Vec<Box<dyn PackageLintRule>>,
linter: DenoLintLinter,
deno_lint_config: DenoLintConfig,
}
impl CliLinter {
pub fn new(options: CliLinterOptions) -> Self {
let rules = options.configured_rules.rules;
let mut deno_lint_rules = Vec::with_capacity(rules.len());
let mut package_rules = Vec::with_capacity(rules.len());
for rule in rules {
match rule.into_file_or_pkg_rule() {
FileOrPackageLintRule::File(rule) => {
deno_lint_rules.push(rule);
}
FileOrPackageLintRule::Package(rule) => {
package_rules.push(rule);
}
}
}
Self {
fix: options.fix,
package_rules,
linter: DenoLintLinter::new(LinterOptions {
rules: deno_lint_rules,
all_rule_codes: options.configured_rules.all_rule_codes,
custom_ignore_file_directive: None,
custom_ignore_diagnostic_directive: None,
}),
deno_lint_config: options.deno_lint_config,
}
}
pub fn has_package_rules(&self) -> bool {
!self.package_rules.is_empty()
}
pub fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic> {
let mut diagnostics = Vec::new();
for rule in &self.package_rules {
diagnostics.extend(rule.lint_package(graph, entrypoints));
}
diagnostics
}
pub fn lint_with_ast(
&self,
parsed_source: &ParsedSource,
) -> Vec<LintDiagnostic> {
self
.linter
.lint_with_ast(parsed_source, self.deno_lint_config.clone())
}
pub fn lint_file(
&self,
file_path: &Path,
source_code: String,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
let specifier = specifier_from_file_path(file_path)?;
let media_type = MediaType::from_specifier(&specifier);
if self.fix {
self.lint_file_and_fix(&specifier, media_type, source_code, file_path)
} else {
self
.linter
.lint_file(LintFileOptions {
specifier,
media_type,
source_code,
config: self.deno_lint_config.clone(),
})
.map_err(AnyError::from)
}
}
fn lint_file_and_fix(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_code: String,
file_path: &Path,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), deno_core::anyhow::Error> {
// initial lint
let (source, diagnostics) = self.linter.lint_file(LintFileOptions {
specifier: specifier.clone(),
media_type,
source_code,
config: self.deno_lint_config.clone(),
})?;
// Try applying fixes repeatedly until the file has none left or
// a maximum number of iterations is reached. This is necessary
// because lint fixes may overlap and so we can't always apply
// them in one pass.
let mut source = source;
let mut diagnostics = diagnostics;
let mut fix_iterations = 0;
loop {
let change = apply_lint_fixes_and_relint(
specifier,
media_type,
&self.linter,
self.deno_lint_config.clone(),
source.text_info_lazy(),
&diagnostics,
)?;
match change {
Some(change) => {
source = change.0;
diagnostics = change.1;
}
None => {
break;
}
}
fix_iterations += 1;
if fix_iterations > 5 {
log::warn!(
concat!(
"Reached maximum number of fix iterations for '{}'. There's ",
"probably a bug in Deno. Please fix this file manually.",
),
specifier,
);
break;
}
}
if fix_iterations > 0 {
// everything looks good and the file still parses, so write it out
atomic_write_file_with_retries(
file_path,
source.text().as_ref(),
crate::cache::CACHE_PERM,
)
.context("Failed writing fix to file.")?;
}
Ok((source, diagnostics))
}
}
fn apply_lint_fixes_and_relint(
specifier: &ModuleSpecifier,
media_type: MediaType,
linter: &DenoLintLinter,
config: DenoLintConfig,
text_info: &SourceTextInfo,
diagnostics: &[LintDiagnostic],
) -> Result<Option<(ParsedSource, Vec<LintDiagnostic>)>, AnyError> {
let Some(new_text) = apply_lint_fixes(text_info, diagnostics) else {
return Ok(None);
};
linter
.lint_file(LintFileOptions {
specifier: specifier.clone(),
source_code: new_text,
media_type,
config,
})
.map(Some)
.context(
"An applied lint fix caused a syntax error. Please report this bug.",
)
}
fn apply_lint_fixes(
text_info: &SourceTextInfo,
diagnostics: &[LintDiagnostic],
) -> Option<String> {
if diagnostics.is_empty() {
return None;
}
let file_start = text_info.range().start;
let mut quick_fixes = diagnostics
.iter()
// use the first quick fix
.filter_map(|d| d.details.fixes.first())
.flat_map(|fix| fix.changes.iter())
.map(|change| deno_ast::TextChange {
range: change.range.as_byte_range(file_start),
new_text: change.new_text.to_string(),
})
.collect::<Vec<_>>();
if quick_fixes.is_empty() {
return None;
}
// remove any overlapping text changes, we'll circle
// back for another pass to fix the remaining
quick_fixes.sort_by_key(|change| change.range.start);
for i in (1..quick_fixes.len()).rev() {
let cur = &quick_fixes[i];
let previous = &quick_fixes[i - 1];
let is_overlapping = cur.range.start < previous.range.end;
if is_overlapping {
quick_fixes.remove(i);
}
}
let new_text =
deno_ast::apply_text_changes(text_info.text_str(), quick_fixes);
Some(new_text)
}

File diff suppressed because it is too large Load diff

View file

@ -1,38 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::diagnostics::Diagnostic;
use deno_ast::ModuleSpecifier;
use deno_graph::FastCheckDiagnostic;
use deno_graph::ModuleGraph;
/// Collects diagnostics from the module graph for the
/// given package's export URLs.
pub fn collect_no_slow_type_diagnostics(
package_export_urls: &[ModuleSpecifier],
graph: &ModuleGraph,
) -> Vec<FastCheckDiagnostic> {
let mut js_exports = package_export_urls
.iter()
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
// fast check puts the same diagnostics in each entrypoint for the
// package (since it's all or nothing), so we only need to check
// the first one JS entrypoint
let Some(module) = js_exports.next() else {
// could happen if all the exports are JSON
return vec![];
};
if let Some(diagnostics) = module.fast_check_diagnostics() {
let mut diagnostics = diagnostics.clone();
diagnostics.sort_by_cached_key(|d| {
(
d.specifier().clone(),
d.range().map(|r| r.range),
d.code().to_string(),
)
});
diagnostics
} else {
Vec::new()
}
}

252
cli/tools/lint/reporters.rs Normal file
View file

@ -0,0 +1,252 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::diagnostics::Diagnostic;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_lint::diagnostic::LintDiagnostic;
use deno_runtime::colors;
use log::info;
use serde::Serialize;
use crate::args::LintReporterKind;
use super::LintError;
pub fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
match kind {
LintReporterKind::Pretty => Box::new(PrettyLintReporter::new()),
LintReporterKind::Json => Box::new(JsonLintReporter::new()),
LintReporterKind::Compact => Box::new(CompactLintReporter::new()),
}
}
pub trait LintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic);
fn visit_error(&mut self, file_path: &str, err: &AnyError);
fn close(&mut self, check_count: usize);
}
struct PrettyLintReporter {
lint_count: u32,
fixable_diagnostics: u32,
}
impl PrettyLintReporter {
fn new() -> PrettyLintReporter {
PrettyLintReporter {
lint_count: 0,
fixable_diagnostics: 0,
}
}
}
impl LintReporter for PrettyLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.lint_count += 1;
if !d.details.fixes.is_empty() {
self.fixable_diagnostics += 1;
}
log::error!("{}\n", d.display());
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
}
fn close(&mut self, check_count: usize) {
let fixable_suffix = if self.fixable_diagnostics > 0 {
colors::gray(format!(" ({} fixable via --fix)", self.fixable_diagnostics))
.to_string()
} else {
"".to_string()
};
match self.lint_count {
1 => info!("Found 1 problem{}", fixable_suffix),
n if n > 1 => {
info!("Found {} problems{}", self.lint_count, fixable_suffix)
}
_ => (),
}
match check_count {
1 => info!("Checked 1 file"),
n => info!("Checked {} files", n),
}
}
}
struct CompactLintReporter {
lint_count: u32,
}
impl CompactLintReporter {
fn new() -> CompactLintReporter {
CompactLintReporter { lint_count: 0 }
}
}
impl LintReporter for CompactLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.lint_count += 1;
match &d.range {
Some(range) => {
let text_info = &range.text_info;
let range = &range.range;
let line_and_column = text_info.line_and_column_display(range.start);
log::error!(
"{}: line {}, col {} - {} ({})",
d.specifier,
line_and_column.line_number,
line_and_column.column_number,
d.message(),
d.code(),
)
}
None => {
log::error!("{}: {} ({})", d.specifier, d.message(), d.code())
}
}
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
}
fn close(&mut self, check_count: usize) {
match self.lint_count {
1 => info!("Found 1 problem"),
n if n > 1 => info!("Found {} problems", self.lint_count),
_ => (),
}
match check_count {
1 => info!("Checked 1 file"),
n => info!("Checked {} files", n),
}
}
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct JsonDiagnosticLintPosition {
/// The 1-indexed line number.
pub line: usize,
/// The 0-indexed column index.
pub col: usize,
pub byte_pos: usize,
}
impl JsonDiagnosticLintPosition {
pub fn new(byte_index: usize, loc: deno_ast::LineAndColumnIndex) -> Self {
JsonDiagnosticLintPosition {
line: loc.line_index + 1,
col: loc.column_index,
byte_pos: byte_index,
}
}
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
struct JsonLintDiagnosticRange {
pub start: JsonDiagnosticLintPosition,
pub end: JsonDiagnosticLintPosition,
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Clone, Serialize)]
struct JsonLintDiagnostic {
pub filename: String,
pub range: Option<JsonLintDiagnosticRange>,
pub message: String,
pub code: String,
pub hint: Option<String>,
}
#[derive(Serialize)]
struct JsonLintReporter {
diagnostics: Vec<JsonLintDiagnostic>,
errors: Vec<LintError>,
}
impl JsonLintReporter {
fn new() -> JsonLintReporter {
JsonLintReporter {
diagnostics: Vec::new(),
errors: Vec::new(),
}
}
}
impl LintReporter for JsonLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.diagnostics.push(JsonLintDiagnostic {
filename: d.specifier.to_string(),
range: d.range.as_ref().map(|range| {
let text_info = &range.text_info;
let range = range.range;
JsonLintDiagnosticRange {
start: JsonDiagnosticLintPosition::new(
range.start.as_byte_index(text_info.range().start),
text_info.line_and_column_index(range.start),
),
end: JsonDiagnosticLintPosition::new(
range.end.as_byte_index(text_info.range().start),
text_info.line_and_column_index(range.end),
),
}
}),
message: d.message().to_string(),
code: d.code().to_string(),
hint: d.hint().map(|h| h.to_string()),
});
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
self.errors.push(LintError {
file_path: file_path.to_string(),
message: err.to_string(),
});
}
fn close(&mut self, _check_count: usize) {
sort_diagnostics(&mut self.diagnostics);
let json = serde_json::to_string_pretty(&self);
#[allow(clippy::print_stdout)]
{
println!("{}", json.unwrap());
}
}
}
fn sort_diagnostics(diagnostics: &mut [JsonLintDiagnostic]) {
// Sort so that we guarantee a deterministic output which is useful for tests
diagnostics.sort_by(|a, b| {
use std::cmp::Ordering;
let file_order = a.filename.cmp(&b.filename);
match file_order {
Ordering::Equal => match &a.range {
Some(a_range) => match &b.range {
Some(b_range) => {
let line_order = a_range.start.line.cmp(&b_range.start.line);
match line_order {
Ordering::Equal => a_range.start.col.cmp(&b_range.start.col),
_ => line_order,
}
}
None => Ordering::Less,
},
None => match &b.range {
Some(_) => Ordering::Greater,
None => Ordering::Equal,
},
},
_ => file_order,
}
});
}

296
cli/tools/lint/rules/mod.rs Normal file
View file

@ -0,0 +1,296 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_config::deno_json::ConfigFile;
use deno_config::deno_json::LintRulesConfig;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::rules::LintRule;
use crate::resolver::SloppyImportsResolver;
mod no_sloppy_imports;
mod no_slow_types;
// used for publishing
pub use no_slow_types::collect_no_slow_type_diagnostics;
pub trait PackageLintRule: std::fmt::Debug + Send + Sync {
fn code(&self) -> &'static str;
fn tags(&self) -> &'static [&'static str] {
&[]
}
fn docs(&self) -> &'static str;
fn help_docs_url(&self) -> Cow<'static, str>;
fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic>;
}
pub(super) trait ExtendedLintRule: LintRule {
/// If the rule supports the incremental cache.
fn supports_incremental_cache(&self) -> bool;
fn help_docs_url(&self) -> Cow<'static, str>;
fn into_base(self: Box<Self>) -> Box<dyn LintRule>;
}
pub enum FileOrPackageLintRule {
File(Box<dyn LintRule>),
Package(Box<dyn PackageLintRule>),
}
#[derive(Debug)]
enum CliLintRuleKind {
DenoLint(Box<dyn LintRule>),
Extended(Box<dyn ExtendedLintRule>),
Package(Box<dyn PackageLintRule>),
}
#[derive(Debug)]
pub struct CliLintRule(CliLintRuleKind);
impl CliLintRule {
pub fn code(&self) -> &'static str {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => rule.code(),
Extended(rule) => rule.code(),
Package(rule) => rule.code(),
}
}
pub fn tags(&self) -> &'static [&'static str] {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => rule.tags(),
Extended(rule) => rule.tags(),
Package(rule) => rule.tags(),
}
}
pub fn docs(&self) -> &'static str {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => rule.docs(),
Extended(rule) => rule.docs(),
Package(rule) => rule.docs(),
}
}
pub fn help_docs_url(&self) -> Cow<'static, str> {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => {
Cow::Owned(format!("https://lint.deno.land/rules/{}", rule.code()))
}
Extended(rule) => rule.help_docs_url(),
Package(rule) => rule.help_docs_url(),
}
}
pub fn supports_incremental_cache(&self) -> bool {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(_) => true,
Extended(rule) => rule.supports_incremental_cache(),
// graph rules don't go through the incremental cache, so allow it
Package(_) => true,
}
}
pub fn into_file_or_pkg_rule(self) -> FileOrPackageLintRule {
use CliLintRuleKind::*;
match self.0 {
DenoLint(rule) => FileOrPackageLintRule::File(rule),
Extended(rule) => FileOrPackageLintRule::File(rule.into_base()),
Package(rule) => FileOrPackageLintRule::Package(rule),
}
}
}
#[derive(Debug)]
pub struct ConfiguredRules {
pub all_rule_codes: HashSet<&'static str>,
pub rules: Vec<CliLintRule>,
}
impl ConfiguredRules {
pub fn incremental_cache_state(&self) -> Option<impl std::hash::Hash> {
if self.rules.iter().any(|r| !r.supports_incremental_cache()) {
return None;
}
// use a hash of the rule names in order to bust the cache
let mut codes = self.rules.iter().map(|r| r.code()).collect::<Vec<_>>();
// ensure this is stable by sorting it
codes.sort_unstable();
Some(codes)
}
}
pub struct LintRuleProvider {
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
}
impl LintRuleProvider {
pub fn new(
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
) -> Self {
Self {
sloppy_imports_resolver,
workspace_resolver,
}
}
pub fn resolve_lint_rules_err_empty(
&self,
rules: LintRulesConfig,
maybe_config_file: Option<&ConfigFile>,
) -> Result<ConfiguredRules, AnyError> {
let lint_rules = self.resolve_lint_rules(rules, maybe_config_file);
if lint_rules.rules.is_empty() {
bail!("No rules have been configured")
}
Ok(lint_rules)
}
pub fn resolve_lint_rules(
&self,
rules: LintRulesConfig,
maybe_config_file: Option<&ConfigFile>,
) -> ConfiguredRules {
let deno_lint_rules = deno_lint::rules::get_all_rules();
let cli_lint_rules = vec![CliLintRule(CliLintRuleKind::Extended(
Box::new(no_sloppy_imports::NoSloppyImportsRule::new(
self.sloppy_imports_resolver.clone(),
self.workspace_resolver.clone(),
)),
))];
let cli_graph_rules = vec![CliLintRule(CliLintRuleKind::Package(
Box::new(no_slow_types::NoSlowTypesRule),
))];
let mut all_rule_names = HashSet::with_capacity(
deno_lint_rules.len() + cli_lint_rules.len() + cli_graph_rules.len(),
);
let all_rules = deno_lint_rules
.into_iter()
.map(|rule| CliLintRule(CliLintRuleKind::DenoLint(rule)))
.chain(cli_lint_rules)
.chain(cli_graph_rules)
.inspect(|rule| {
all_rule_names.insert(rule.code());
});
let rules = filtered_rules(
all_rules,
rules
.tags
.or_else(|| Some(get_default_tags(maybe_config_file))),
rules.exclude,
rules.include,
);
ConfiguredRules {
rules,
all_rule_codes: all_rule_names,
}
}
}
fn get_default_tags(maybe_config_file: Option<&ConfigFile>) -> Vec<String> {
let mut tags = Vec::with_capacity(2);
tags.push("recommended".to_string());
if maybe_config_file.map(|c| c.is_package()).unwrap_or(false) {
tags.push("jsr".to_string());
}
tags
}
fn filtered_rules(
all_rules: impl Iterator<Item = CliLintRule>,
maybe_tags: Option<Vec<String>>,
maybe_exclude: Option<Vec<String>>,
maybe_include: Option<Vec<String>>,
) -> Vec<CliLintRule> {
let tags_set =
maybe_tags.map(|tags| tags.into_iter().collect::<HashSet<_>>());
let mut rules = all_rules
.filter(|rule| {
let mut passes = if let Some(tags_set) = &tags_set {
rule
.tags()
.iter()
.any(|t| tags_set.contains(&t.to_string()))
} else {
true
};
if let Some(includes) = &maybe_include {
if includes.contains(&rule.code().to_owned()) {
passes |= true;
}
}
if let Some(excludes) = &maybe_exclude {
if excludes.contains(&rule.code().to_owned()) {
passes &= false;
}
}
passes
})
.collect::<Vec<_>>();
rules.sort_by_key(|r| r.code());
rules
}
#[cfg(test)]
mod test {
use super::*;
use crate::args::LintRulesConfig;
#[test]
fn recommended_rules_when_no_tags_in_config() {
let rules_config = LintRulesConfig {
exclude: Some(vec!["no-debugger".to_string()]),
include: None,
tags: None,
};
let rules_provider = LintRuleProvider::new(None, None);
let rules = rules_provider.resolve_lint_rules(rules_config, None);
let mut rule_names = rules
.rules
.into_iter()
.map(|r| r.code().to_string())
.collect::<Vec<_>>();
rule_names.sort();
let mut recommended_rule_names = rules_provider
.resolve_lint_rules(Default::default(), None)
.rules
.into_iter()
.filter(|r| r.tags().iter().any(|t| *t == "recommended"))
.map(|r| r.code().to_string())
.filter(|n| n != "no-debugger")
.collect::<Vec<_>>();
recommended_rule_names.sort();
assert_eq!(rule_names, recommended_rule_names);
}
}

View file

@ -0,0 +1,20 @@
Enforces specifying explicit references to paths in module specifiers.
Non-explicit specifiers are ambiguous and require probing for the correct file
path on every run, which has a performance overhead.
Note: This lint rule is only active when using `--unstable-sloppy-imports`.
### Invalid:
```typescript
import { add } from "./math/add";
import { ConsoleLogger } from "./loggers";
```
### Valid:
```typescript
import { add } from "./math/add.ts";
import { ConsoleLogger } from "./loggers/index.ts";
```

View file

@ -0,0 +1,218 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::Arc;
use deno_ast::SourceRange;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::Range;
use deno_lint::diagnostic::LintDiagnosticDetails;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange;
use deno_lint::rules::LintRule;
use text_lines::LineAndColumnIndex;
use crate::graph_util::CliJsrUrlProvider;
use crate::resolver::SloppyImportsResolution;
use crate::resolver::SloppyImportsResolver;
use super::ExtendedLintRule;
#[derive(Debug)]
pub struct NoSloppyImportsRule {
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
// None for making printing out the lint rules easy
workspace_resolver: Option<Arc<WorkspaceResolver>>,
}
impl NoSloppyImportsRule {
pub fn new(
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
workspace_resolver: Option<Arc<WorkspaceResolver>>,
) -> Self {
NoSloppyImportsRule {
sloppy_imports_resolver,
workspace_resolver,
}
}
}
const CODE: &str = "no-sloppy-imports";
const DOCS_URL: &str = "https://docs.deno.com/runtime/manual/tools/unstable_flags/#--unstable-sloppy-imports";
impl ExtendedLintRule for NoSloppyImportsRule {
fn supports_incremental_cache(&self) -> bool {
// only allow the incremental cache when we don't
// do sloppy import resolution because sloppy import
// resolution requires knowing about the surrounding files
// in addition to the current one
self.sloppy_imports_resolver.is_none() || self.workspace_resolver.is_none()
}
fn help_docs_url(&self) -> Cow<'static, str> {
Cow::Borrowed(DOCS_URL)
}
fn into_base(self: Box<Self>) -> Box<dyn LintRule> {
self
}
}
impl LintRule for NoSloppyImportsRule {
fn lint_program_with_ast_view<'view>(
&self,
context: &mut deno_lint::context::Context<'view>,
_program: deno_lint::Program<'view>,
) {
let Some(workspace_resolver) = &self.workspace_resolver else {
return;
};
let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver else {
return;
};
if context.specifier().scheme() != "file" {
return;
}
let resolver = SloppyImportCaptureResolver {
workspace_resolver,
sloppy_imports_resolver,
captures: Default::default(),
};
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::All,
specifier: context.specifier().clone(),
maybe_headers: None,
parsed_source: context.parsed_source(),
// ignore resolving dynamic imports like import(`./dir/${something}`)
file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(&resolver),
// don't bother resolving npm specifiers
maybe_npm_resolver: None,
});
for (range, sloppy_import) in resolver.captures.borrow_mut().drain() {
let start_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.start.line,
column_index: range.start.character,
});
let end_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.end.line,
column_index: range.end.character,
});
let source_range = SourceRange::new(start_range, end_range);
context.add_diagnostic_details(
Some(LintDiagnosticRange {
range: source_range,
description: None,
text_info: context.text_info().clone(),
}),
LintDiagnosticDetails {
message: "Sloppy imports are not allowed.".to_string(),
code: CODE.to_string(),
custom_docs_url: Some(DOCS_URL.to_string()),
fixes: context
.specifier()
.make_relative(sloppy_import.as_specifier())
.map(|relative| {
vec![LintFix {
description: Cow::Owned(sloppy_import.as_quick_fix_message()),
changes: vec![LintFixChange {
new_text: Cow::Owned({
let relative = if relative.starts_with("../") {
relative
} else {
format!("./{}", relative)
};
let current_text =
context.text_info().range_text(&source_range);
if current_text.starts_with('"') {
format!("\"{}\"", relative)
} else if current_text.starts_with('\'') {
format!("'{}'", relative)
} else {
relative
}
}),
range: source_range,
}],
}]
})
.unwrap_or_default(),
hint: None,
info: vec![],
},
);
}
}
fn code(&self) -> &'static str {
CODE
}
fn docs(&self) -> &'static str {
include_str!("no_sloppy_imports.md")
}
fn tags(&self) -> &'static [&'static str] {
&["recommended"]
}
}
#[derive(Debug)]
struct SloppyImportCaptureResolver<'a> {
workspace_resolver: &'a WorkspaceResolver,
sloppy_imports_resolver: &'a SloppyImportsResolver,
captures: RefCell<HashMap<Range, SloppyImportsResolution>>,
}
impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
mode: ResolutionMode,
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
let resolution = self
.workspace_resolver
.resolve(specifier_text, &referrer_range.specifier)
.map_err(|err| ResolveError::Other(err.into()))?;
match resolution {
deno_config::workspace::MappedResolution::Normal(specifier)
| deno_config::workspace::MappedResolution::ImportMap(specifier) => {
match self.sloppy_imports_resolver.resolve(&specifier, mode) {
Some(res) => {
self
.captures
.borrow_mut()
.entry(referrer_range.clone())
.or_insert_with(|| res.clone());
Ok(res.into_specifier())
}
None => Ok(specifier),
}
}
deno_config::workspace::MappedResolution::WorkspaceJsrPackage {
..
}
| deno_config::workspace::MappedResolution::WorkspaceNpmPackage {
..
}
| deno_config::workspace::MappedResolution::PackageJson { .. } => {
// this error is ignored
Err(ResolveError::Other(anyhow!("")))
}
}
}
}

View file

@ -0,0 +1,3 @@
Enforces using types that are explicit or can be simply inferred.
Read more: https://jsr.io/docs/about-slow-types

View file

@ -0,0 +1,98 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::ModuleSpecifier;
use deno_graph::FastCheckDiagnostic;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::diagnostic::LintDiagnosticDetails;
use deno_lint::diagnostic::LintDiagnosticRange;
use super::PackageLintRule;
const CODE: &str = "no-slow-types";
#[derive(Debug)]
pub struct NoSlowTypesRule;
impl PackageLintRule for NoSlowTypesRule {
fn code(&self) -> &'static str {
CODE
}
fn tags(&self) -> &'static [&'static str] {
&["jsr"]
}
fn docs(&self) -> &'static str {
include_str!("no_slow_types.md")
}
fn help_docs_url(&self) -> Cow<'static, str> {
Cow::Borrowed("https://jsr.io/docs/about-slow-types")
}
fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic> {
collect_no_slow_type_diagnostics(graph, entrypoints)
.into_iter()
.map(|d| LintDiagnostic {
specifier: d.specifier().clone(),
range: d.range().map(|range| LintDiagnosticRange {
text_info: range.text_info.clone(),
range: range.range,
description: d.range_description().map(|r| r.to_string()),
}),
details: LintDiagnosticDetails {
message: d.message().to_string(),
code: CODE.to_string(),
hint: d.hint().map(|h| h.to_string()),
info: d
.info()
.iter()
.map(|info| Cow::Owned(info.to_string()))
.collect(),
fixes: vec![],
custom_docs_url: d.docs_url().map(|u| u.into_owned()),
},
})
.collect()
}
}
/// Collects diagnostics from the module graph for the
/// given package's export URLs.
pub fn collect_no_slow_type_diagnostics(
graph: &ModuleGraph,
package_export_urls: &[ModuleSpecifier],
) -> Vec<FastCheckDiagnostic> {
let mut js_exports = package_export_urls
.iter()
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
// fast check puts the same diagnostics in each entrypoint for the
// package (since it's all or nothing), so we only need to check
// the first one JS entrypoint
let Some(module) = js_exports.next() else {
// could happen if all the exports are JSON
return vec![];
};
if let Some(diagnostics) = module.fast_check_diagnostics() {
let mut diagnostics = diagnostics.clone();
diagnostics.sort_by_cached_key(|d| {
(
d.specifier().clone(),
d.range().map(|r| r.range),
d.code().to_string(),
)
});
diagnostics
} else {
Vec::new()
}
}

View file

@ -1,8 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::http_util;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_fetch;
use lsp_types::Url;
use serde::de::DeserializeOwned;
@ -82,7 +83,7 @@ impl std::fmt::Debug for ApiError {
impl std::error::Error for ApiError {}
pub async fn parse_response<T: DeserializeOwned>(
response: reqwest::Response,
response: http::Response<deno_fetch::ResBody>,
) -> Result<T, ApiError> {
let status = response.status();
let x_deno_ray = response
@ -90,7 +91,7 @@ pub async fn parse_response<T: DeserializeOwned>(
.get("x-deno-ray")
.and_then(|value| value.to_str().ok())
.map(|s| s.to_string());
let text = response.text().await.unwrap();
let text = http_util::body_to_string(response).await.unwrap();
if !status.is_success() {
match serde_json::from_str::<ApiError>(&text) {
@ -122,9 +123,9 @@ pub async fn get_scope(
client: &HttpClient,
registry_api_url: &Url,
scope: &str,
) -> Result<reqwest::Response, AnyError> {
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
let scope_url = format!("{}scopes/{}", registry_api_url, scope);
let response = client.get(&scope_url).send().await?;
let response = client.get(scope_url.parse()?)?.send().await?;
Ok(response)
}
@ -141,9 +142,9 @@ pub async fn get_package(
registry_api_url: &Url,
scope: &str,
package: &str,
) -> Result<reqwest::Response, AnyError> {
) -> Result<http::Response<deno_fetch::ResBody>, AnyError> {
let package_url = get_package_api_url(registry_api_url, scope, package);
let response = client.get(&package_url).send().await?;
let response = client.get(package_url.parse()?)?.send().await?;
Ok(response)
}

View file

@ -14,6 +14,7 @@ use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
use deno_ast::swc::common::util::take::Take;
use deno_ast::ParseDiagnostic;
use deno_ast::SourcePos;
use deno_ast::SourceRange;
use deno_ast::SourceRanged;
@ -117,6 +118,11 @@ pub enum PublishDiagnostic {
text_info: SourceTextInfo,
range: SourceRange,
},
SyntaxError(ParseDiagnostic),
MissingLicense {
/// This only exists because diagnostics require a location.
expected_path: PathBuf,
},
}
impl PublishDiagnostic {
@ -165,6 +171,8 @@ impl Diagnostic for PublishDiagnostic {
ExcludedModule { .. } => DiagnosticLevel::Error,
MissingConstraint { .. } => DiagnosticLevel::Error,
BannedTripleSlashDirectives { .. } => DiagnosticLevel::Error,
SyntaxError { .. } => DiagnosticLevel::Error,
MissingLicense { .. } => DiagnosticLevel::Error,
}
}
@ -183,6 +191,8 @@ impl Diagnostic for PublishDiagnostic {
BannedTripleSlashDirectives { .. } => {
Cow::Borrowed("banned-triple-slash-directives")
}
SyntaxError { .. } => Cow::Borrowed("syntax-error"),
MissingLicense { .. } => Cow::Borrowed("missing-license"),
}
}
@ -203,6 +213,8 @@ impl Diagnostic for PublishDiagnostic {
ExcludedModule { .. } => Cow::Borrowed("module in package's module graph was excluded from publishing"),
MissingConstraint { specifier, .. } => Cow::Owned(format!("specifier '{}' is missing a version constraint", specifier)),
BannedTripleSlashDirectives { .. } => Cow::Borrowed("triple slash directives that modify globals are not allowed"),
SyntaxError(diagnostic) => diagnostic.message(),
MissingLicense { .. } => Cow::Borrowed("missing license file"),
}
}
@ -269,6 +281,10 @@ impl Diagnostic for PublishDiagnostic {
source_pos: DiagnosticSourcePos::SourcePos(range.start),
text_info: Cow::Borrowed(text_info),
},
SyntaxError(diagnostic) => diagnostic.location(),
MissingLicense { expected_path } => DiagnosticLocation::Path {
path: expected_path.clone(),
},
}
}
@ -348,6 +364,8 @@ impl Diagnostic for PublishDiagnostic {
description: Some("the triple slash directive".into()),
}],
}),
SyntaxError(diagnostic) => diagnostic.snippet(),
MissingLicense { .. } => None,
}
}
@ -380,6 +398,10 @@ impl Diagnostic for PublishDiagnostic {
BannedTripleSlashDirectives { .. } => Some(
Cow::Borrowed("remove the triple slash directive"),
),
SyntaxError(diagnostic) => diagnostic.hint(),
MissingLicense { .. } => Some(
Cow::Borrowed("add a LICENSE file to the package and ensure it is not ignored from being published"),
),
}
}
@ -407,7 +429,17 @@ impl Diagnostic for PublishDiagnostic {
None => None,
}
}
_ => None,
SyntaxError(diagnostic) => diagnostic.snippet_fixed(),
FastCheck(_)
| SpecifierUnfurl(_)
| InvalidPath { .. }
| DuplicatePath { .. }
| UnsupportedFileType { .. }
| UnsupportedJsxTsx { .. }
| ExcludedModule { .. }
| MissingConstraint { .. }
| BannedTripleSlashDirectives { .. }
| MissingLicense { .. } => None,
}
}
@ -456,6 +488,8 @@ impl Diagnostic for PublishDiagnostic {
Cow::Borrowed("instead instruct the user of your package to specify these directives"),
Cow::Borrowed("or set their 'lib' compiler option appropriately"),
]),
SyntaxError(diagnostic) => diagnostic.info(),
MissingLicense { .. } => Cow::Borrowed(&[]),
}
}
@ -488,6 +522,10 @@ impl Diagnostic for PublishDiagnostic {
BannedTripleSlashDirectives { .. } => Some(Cow::Borrowed(
"https://jsr.io/go/banned-triple-slash-directives",
)),
SyntaxError(diagnostic) => diagnostic.docs_url(),
MissingLicense { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/missing-license"))
}
}
}
}

View file

@ -147,6 +147,13 @@ impl GraphDiagnosticsCollector {
let parsed_source = self
.parsed_source_cache
.get_parsed_source_from_js_module(module)?;
// surface syntax errors
for diagnostic in parsed_source.diagnostics() {
diagnostics_collector
.push(PublishDiagnostic::SyntaxError(diagnostic.clone()));
}
check_for_banned_triple_slash_directives(
&parsed_source,
diagnostics_collector,

View file

@ -4,6 +4,7 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::io::IsTerminal;
use std::path::Path;
use std::path::PathBuf;
use std::process::Stdio;
use std::rc::Rc;
use std::sync::Arc;
@ -13,6 +14,7 @@ use base64::Engine;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::JsrPackageConfig;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::Workspace;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@ -23,8 +25,8 @@ use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use deno_runtime::deno_fetch::reqwest;
use deno_terminal::colors;
use http_body_util::BodyExt;
use lsp_types::Url;
use serde::Deserialize;
use serde::Serialize;
@ -43,7 +45,7 @@ use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
use crate::resolver::SloppyImportsResolver;
use crate::tools::check::CheckOptions;
use crate::tools::lint::no_slow_types;
use crate::tools::lint::collect_no_slow_type_diagnostics;
use crate::tools::registry::diagnostics::PublishDiagnostic;
use crate::tools::registry::diagnostics::PublishDiagnosticsCollector;
use crate::util::display::human_size;
@ -62,6 +64,8 @@ mod unfurl;
use auth::get_auth_method;
use auth::AuthMethod;
pub use pm::add;
pub use pm::remove;
pub use pm::AddCommandName;
use publish_order::PublishOrderGraph;
use unfurl::SpecifierUnfurler;
@ -72,19 +76,19 @@ use self::paths::CollectedPublishPath;
use self::tar::PublishableTarball;
pub async fn publish(
flags: Flags,
flags: Arc<Flags>,
publish_flags: PublishFlags,
) -> Result<(), AnyError> {
let cli_factory = CliFactory::from_flags(flags)?;
let cli_factory = CliFactory::from_flags(flags);
let auth_method =
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
let directory_path = cli_factory.cli_options().initial_cwd();
let cli_options = cli_factory.cli_options();
let publish_configs = cli_options.workspace.jsr_packages_for_publish();
let cli_options = cli_factory.cli_options()?;
let directory_path = cli_options.initial_cwd();
let publish_configs = cli_options.start_dir.jsr_packages_for_publish();
if publish_configs.is_empty() {
match cli_options.workspace.resolve_start_ctx().maybe_deno_json() {
match cli_options.start_dir.maybe_deno_json() {
Some(deno_json) => {
debug_assert!(!deno_json.is_package());
bail!(
@ -121,7 +125,7 @@ pub async fn publish(
cli_factory.module_graph_creator().await?.clone(),
cli_factory.parsed_source_cache().clone(),
cli_factory.type_checker().await?.clone(),
cli_factory.cli_options().clone(),
cli_options.clone(),
specifier_unfurler,
);
@ -143,9 +147,13 @@ pub async fn publish(
.ok()
.is_none()
&& !publish_flags.allow_dirty
&& check_if_git_repo_dirty(cli_options.initial_cwd()).await
{
bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
if let Some(dirty_text) =
check_if_git_repo_dirty(cli_options.initial_cwd()).await
{
log::error!("\nUncommitted changes:\n\n{}\n", dirty_text);
bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
}
}
if publish_flags.dry_run {
@ -159,7 +167,7 @@ pub async fn publish(
log::info!(" {} ({})", file.specifier, human_size(file.size as f64),);
}
}
log::warn!("{} Aborting due to --dry-run", colors::yellow("Warning"));
log::warn!("{} Dry run complete", colors::green("Success"));
return Ok(());
}
@ -306,7 +314,10 @@ impl PublishPreparer {
} else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref()
== Ok("1")
{
if check_if_git_repo_dirty(self.cli_options.initial_cwd()).await {
if check_if_git_repo_dirty(self.cli_options.initial_cwd())
.await
.is_some()
{
bail!("When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state.");
}
@ -332,7 +343,7 @@ impl PublishPreparer {
for package in package_configs {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
collect_no_slow_type_diagnostics(&graph, &export_urls);
if !diagnostics.is_empty() {
any_pkg_had_diagnostics = true;
for diagnostic in diagnostics {
@ -430,7 +441,7 @@ impl PublishPreparer {
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let file_patterns = package.member_ctx.to_publish_config()?.files;
let file_patterns = package.member_dir.to_publish_config()?.files;
let tarball = deno_core::unsync::spawn_blocking({
let diagnostics_collector = diagnostics_collector.clone();
@ -441,7 +452,7 @@ impl PublishPreparer {
move || {
let root_specifier =
ModuleSpecifier::from_directory_path(&root_dir).unwrap();
let publish_paths =
let mut publish_paths =
paths::collect_publish_paths(paths::CollectPublishPathsOptions {
root_dir: &root_dir,
cli_options: &cli_options,
@ -455,8 +466,30 @@ impl PublishPreparer {
&publish_paths,
&diagnostics_collector,
);
if !has_license_file(publish_paths.iter().map(|p| &p.specifier)) {
if let Some(license_path) =
resolve_license_file(&root_dir, cli_options.workspace())
{
// force including the license file from the package or workspace root
publish_paths.push(CollectedPublishPath {
specifier: ModuleSpecifier::from_file_path(&license_path)
.unwrap(),
relative_path: "/LICENSE".to_string(),
maybe_content: Some(std::fs::read(&license_path).with_context(
|| format!("failed reading '{}'.", license_path.display()),
)?),
path: license_path,
});
} else {
diagnostics_collector.push(PublishDiagnostic::MissingLicense {
expected_path: root_dir.join("LICENSE"),
});
}
}
tar::create_gzipped_tarball(
&publish_paths,
publish_paths,
LazyGraphSourceParser::new(&source_cache, &graph),
&diagnostics_collector,
&unfurler,
@ -532,11 +565,13 @@ async fn get_auth_headers(
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
let response = client
.post(format!("{}authorizations", registry_url))
.json(&serde_json::json!({
"challenge": challenge,
"permissions": permissions,
}))
.post_json(
format!("{}authorizations", registry_url).parse()?,
&serde_json::json!({
"challenge": challenge,
"permissions": permissions,
}),
)?
.send()
.await
.context("Failed to create interactive authorization")?;
@ -566,11 +601,13 @@ async fn get_auth_headers(
loop {
tokio::time::sleep(interval).await;
let response = client
.post(format!("{}authorizations/exchange", registry_url))
.json(&serde_json::json!({
"exchangeToken": auth.exchange_token,
"verifier": verifier,
}))
.post_json(
format!("{}authorizations/exchange", registry_url).parse()?,
&serde_json::json!({
"exchangeToken": auth.exchange_token,
"verifier": verifier,
}),
)?
.send()
.await
.context("Failed to exchange authorization")?;
@ -627,15 +664,20 @@ async fn get_auth_headers(
);
let response = client
.get(url)
.bearer_auth(&oidc_config.token)
.get(url.parse()?)?
.header(
http::header::AUTHORIZATION,
format!("Bearer {}", oidc_config.token).parse()?,
)
.send()
.await
.context("Failed to get OIDC token")?;
let status = response.status();
let text = response.text().await.with_context(|| {
format!("Failed to get OIDC token: status {}", status)
})?;
let text = crate::http_util::body_to_string(response)
.await
.with_context(|| {
format!("Failed to get OIDC token: status {}", status)
})?;
if !status.is_success() {
bail!(
"Failed to get OIDC token: status {}, response: '{}'",
@ -763,7 +805,7 @@ async fn ensure_scopes_and_packages_exist(
loop {
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
let response = client.get(&package_api_url).send().await?;
let response = client.get(package_api_url.parse()?)?.send().await?;
if response.status() == 200 {
let name = format!("@{}/{}", package.scope, package.package);
log::info!("Package {} created", colors::green(name));
@ -887,11 +929,19 @@ async fn publish_package(
package.config
);
let body = http_body_util::Full::new(package.tarball.bytes.clone())
.map_err(|never| match never {})
.boxed();
let response = http_client
.post(url)
.header(reqwest::header::AUTHORIZATION, authorization)
.header(reqwest::header::CONTENT_ENCODING, "gzip")
.body(package.tarball.bytes.clone())
.post(url.parse()?, body)?
.header(
http::header::AUTHORIZATION,
authorization.parse().map_err(http::Error::from)?,
)
.header(
http::header::CONTENT_ENCODING,
"gzip".parse().map_err(http::Error::from)?,
)
.send()
.await?;
@ -936,7 +986,7 @@ async fn publish_package(
while task.status != "success" && task.status != "failure" {
tokio::time::sleep(interval).await;
let resp = http_client
.get(format!("{}publish_status/{}", registry_api_url, task.id))
.get(format!("{}publish_status/{}", registry_api_url, task.id).parse()?)?
.send()
.await
.with_context(|| {
@ -966,14 +1016,6 @@ async fn publish_package(
);
}
log::info!(
"{} @{}/{}@{}",
colors::green("Successfully published"),
package.scope,
package.package,
package.version
);
let enable_provenance = std::env::var("DISABLE_JSR_PROVENANCE").is_err()
&& (auth::is_gha() && auth::gha_oidc_token().is_some() && provenance);
@ -985,7 +1027,8 @@ async fn publish_package(
package.scope, package.package, package.version
))?;
let meta_bytes = http_client.get(meta_url).send().await?.bytes().await?;
let resp = http_client.get(meta_url)?.send().await?;
let meta_bytes = resp.collect().await?.to_bytes();
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
verify_version_manifest(&meta_bytes, &package)?;
@ -1016,13 +1059,20 @@ async fn publish_package(
registry_api_url, package.scope, package.package, package.version
);
http_client
.post(provenance_url)
.header(reqwest::header::AUTHORIZATION, authorization)
.json(&json!({ "bundle": bundle }))
.post_json(provenance_url.parse()?, &json!({ "bundle": bundle }))?
.header(http::header::AUTHORIZATION, authorization.parse()?)
.send()
.await?;
}
log::info!(
"{} @{}/{}@{}",
colors::green("Successfully published"),
package.scope,
package.package,
package.version
);
log::info!(
"{}",
colors::gray(format!(
@ -1130,10 +1180,10 @@ fn verify_version_manifest(
Ok(())
}
async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
async fn check_if_git_repo_dirty(cwd: &Path) -> Option<String> {
let bin_name = if cfg!(windows) { "git.exe" } else { "git" };
// Check if git exists
// Check if git exists
let git_exists = Command::new(bin_name)
.arg("--version")
.stderr(Stdio::null())
@ -1143,7 +1193,7 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
.map_or(false, |status| status.success());
if !git_exists {
return false; // Git is not installed
return None; // Git is not installed
}
// Check if there are uncommitted changes
@ -1155,7 +1205,60 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> bool {
.expect("Failed to execute command");
let output_str = String::from_utf8_lossy(&output.stdout);
!output_str.trim().is_empty()
let text = output_str.trim();
if text.is_empty() {
None
} else {
Some(text.to_string())
}
}
static SUPPORTED_LICENSE_FILE_NAMES: [&str; 6] = [
"LICENSE",
"LICENSE.md",
"LICENSE.txt",
"LICENCE",
"LICENCE.md",
"LICENCE.txt",
];
fn resolve_license_file(
pkg_root_dir: &Path,
workspace: &Workspace,
) -> Option<PathBuf> {
let workspace_root_dir = workspace.root_dir_path();
let mut dirs = Vec::with_capacity(2);
dirs.push(pkg_root_dir);
if workspace_root_dir != pkg_root_dir {
dirs.push(&workspace_root_dir);
}
for dir in dirs {
for file_name in &SUPPORTED_LICENSE_FILE_NAMES {
let file_path = dir.join(file_name);
if file_path.exists() {
return Some(file_path);
}
}
}
None
}
fn has_license_file<'a>(
mut specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
) -> bool {
let supported_license_files = SUPPORTED_LICENSE_FILE_NAMES
.iter()
.map(|s| s.to_lowercase())
.collect::<HashSet<_>>();
specifiers.any(|specifier| {
specifier
.path()
.rsplit_once('/')
.map(|(_, file)| {
supported_license_files.contains(file.to_lowercase().as_str())
})
.unwrap_or(false)
})
}
#[allow(clippy::print_stderr)]
@ -1166,6 +1269,10 @@ fn ring_bell() {
#[cfg(test)]
mod tests {
use deno_ast::ModuleSpecifier;
use crate::tools::registry::has_license_file;
use super::tar::PublishableTarball;
use super::tar::PublishableTarballFile;
use super::verify_version_manifest;
@ -1267,4 +1374,31 @@ mod tests {
assert!(verify_version_manifest(meta_bytes, &package).is_err());
}
#[test]
fn test_has_license_files() {
fn has_license_file_str(expected: &[&str]) -> bool {
let specifiers = expected
.iter()
.map(|s| ModuleSpecifier::parse(s).unwrap())
.collect::<Vec<_>>();
has_license_file(specifiers.iter())
}
assert!(has_license_file_str(&["file:///LICENSE"]));
assert!(has_license_file_str(&["file:///license"]));
assert!(has_license_file_str(&["file:///LICENSE.txt"]));
assert!(has_license_file_str(&["file:///LICENSE.md"]));
assert!(has_license_file_str(&["file:///LICENCE"]));
assert!(has_license_file_str(&["file:///LICENCE.txt"]));
assert!(has_license_file_str(&["file:///LICENCE.md"]));
assert!(has_license_file_str(&[
"file:///other",
"file:///test/LICENCE.md"
]),);
assert!(!has_license_file_str(&[
"file:///other",
"file:///test/tLICENSE"
]),);
}
}

View file

@ -214,7 +214,10 @@ pub enum PackagePathValidationError {
pub struct CollectedPublishPath {
pub specifier: ModuleSpecifier,
pub path: PathBuf,
/// Relative path to use in the tarball. This should be prefixed with a `/`.
pub relative_path: String,
/// Specify the contents for any injected paths.
pub maybe_content: Option<Vec<u8>>,
}
pub struct CollectPublishPathsOptions<'a> {
@ -307,6 +310,7 @@ pub fn collect_publish_paths(
specifier,
path,
relative_path,
maybe_content: None,
});
}

View file

@ -1,11 +1,12 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::TextChange;
use deno_config::FmtOptionsConfig;
use deno_config::deno_json::FmtOptionsConfig;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
@ -23,6 +24,7 @@ use jsonc_parser::ast::Value;
use crate::args::AddFlags;
use crate::args::CacheSetting;
use crate::args::Flags;
use crate::args::RemoveFlags;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::jsr::JsrFetchResolver;
@ -49,7 +51,7 @@ impl DenoConfigFormat {
}
enum DenoOrPackageJson {
Deno(Arc<deno_config::ConfigFile>, DenoConfigFormat),
Deno(Arc<deno_config::deno_json::ConfigFile>, DenoConfigFormat),
Npm(Arc<deno_node::PackageJson>, Option<FmtOptionsConfig>),
}
@ -119,12 +121,12 @@ impl DenoOrPackageJson {
/// creates a `deno.json` file - in this case
/// we also return a new `CliFactory` that knows about
/// the new config
fn from_flags(flags: Flags) -> Result<(Self, CliFactory), AnyError> {
let factory = CliFactory::from_flags(flags.clone())?;
let options = factory.cli_options();
let start_ctx = options.workspace.resolve_start_ctx();
fn from_flags(flags: Arc<Flags>) -> Result<(Self, CliFactory), AnyError> {
let factory = CliFactory::from_flags(flags.clone());
let options = factory.cli_options()?;
let start_dir = &options.start_dir;
match (start_ctx.maybe_deno_json(), start_ctx.maybe_pkg_json()) {
match (start_dir.maybe_deno_json(), start_dir.maybe_pkg_json()) {
// when both are present, for now,
// default to deno.json
(Some(deno), Some(_) | None) => Ok((
@ -140,13 +142,14 @@ impl DenoOrPackageJson {
(None, Some(_) | None) => {
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
.context("Failed to create deno.json file")?;
drop(factory); // drop to prevent use
log::info!("Created deno.json configuration file.");
let factory = CliFactory::from_flags(flags.clone())?;
let options = factory.cli_options().clone();
let start_ctx = options.workspace.resolve_start_ctx();
let factory = CliFactory::from_flags(flags.clone());
let options = factory.cli_options()?.clone();
let start_dir = &options.start_dir;
Ok((
DenoOrPackageJson::Deno(
start_ctx.maybe_deno_json().cloned().ok_or_else(|| {
start_dir.maybe_deno_json().cloned().ok_or_else(|| {
anyhow!("config not found, but it was just created")
})?,
DenoConfigFormat::Json,
@ -174,7 +177,27 @@ fn package_json_dependency_entry(
}
}
pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
#[derive(Clone, Copy)]
/// The name of the subcommand invoking the `add` operation.
pub enum AddCommandName {
Add,
Install,
}
impl std::fmt::Display for AddCommandName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AddCommandName::Add => write!(f, "add"),
AddCommandName::Install => write!(f, "install"),
}
}
}
pub async fn add(
flags: Arc<Flags>,
add_flags: AddFlags,
cmd_name: AddCommandName,
) -> Result<(), AnyError> {
let (config_file, cli_factory) =
DenoOrPackageJson::from_flags(flags.clone())?;
@ -230,8 +253,16 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
let package_and_version = package_and_version_result?;
match package_and_version {
PackageAndVersion::NotFound(package_name) => {
bail!("{} was not found.", crate::colors::red(package_name));
PackageAndVersion::NotFound {
package: package_name,
found_npm_package,
package_req,
} => {
if found_npm_package {
bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}")));
} else {
bail!("{} was not found.", crate::colors::red(package_name));
}
}
PackageAndVersion::Selected(selected) => {
selected_packages.push(selected);
@ -263,10 +294,10 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
let is_npm = config_file.is_npm();
for selected_package in selected_packages {
log::info!(
"Add {} - {}@{}",
crate::colors::green(&selected_package.import_name),
selected_package.package_name,
selected_package.version_req
"Add {}{}{}",
crate::colors::green(&selected_package.package_name),
crate::colors::gray("@"),
selected_package.selected_version
);
if is_npm {
@ -304,13 +335,11 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
.context("Failed to update configuration file")?;
// clear the previously cached package.json from memory before reloading it
deno_node::PackageJsonThreadLocalCache::clear();
node_resolver::PackageJsonThreadLocalCache::clear();
// make a new CliFactory to pick up the updated config file
let cli_factory = CliFactory::from_flags(flags)?;
let cli_factory = CliFactory::from_flags(flags);
// cache deps
if cli_factory.cli_options().enable_future_features() {
crate::module_loader::load_top_level_deps(&cli_factory).await?;
}
crate::module_loader::load_top_level_deps(&cli_factory).await?;
Ok(())
}
@ -319,10 +348,15 @@ struct SelectedPackage {
import_name: String,
package_name: String,
version_req: String,
selected_version: String,
}
enum PackageAndVersion {
NotFound(String),
NotFound {
package: String,
found_npm_package: bool,
package_req: PackageReq,
},
Selected(SelectedPackage),
}
@ -335,7 +369,19 @@ async fn find_package_and_select_version_for_req(
AddPackageReqValue::Jsr(req) => {
let jsr_prefixed_name = format!("jsr:{}", &req.name);
let Some(nv) = jsr_resolver.req_to_nv(&req).await else {
return Ok(PackageAndVersion::NotFound(jsr_prefixed_name));
if npm_resolver.req_to_nv(&req).await.is_some() {
return Ok(PackageAndVersion::NotFound {
package: jsr_prefixed_name,
found_npm_package: true,
package_req: req,
});
}
return Ok(PackageAndVersion::NotFound {
package: jsr_prefixed_name,
found_npm_package: false,
package_req: req,
});
};
let range_symbol = if req.version_req.version_text().starts_with('~') {
'~'
@ -346,12 +392,17 @@ async fn find_package_and_select_version_for_req(
import_name: add_package_req.alias,
package_name: jsr_prefixed_name,
version_req: format!("{}{}", range_symbol, &nv.version),
selected_version: nv.version.to_string(),
}))
}
AddPackageReqValue::Npm(req) => {
let npm_prefixed_name = format!("npm:{}", &req.name);
let Some(nv) = npm_resolver.req_to_nv(&req).await else {
return Ok(PackageAndVersion::NotFound(npm_prefixed_name));
return Ok(PackageAndVersion::NotFound {
package: npm_prefixed_name,
found_npm_package: false,
package_req: req,
});
};
let range_symbol = if req.version_req.version_text().starts_with('~') {
'~'
@ -362,6 +413,7 @@ async fn find_package_and_select_version_for_req(
import_name: add_package_req.alias,
package_name: npm_prefixed_name,
version_req: format!("{}{}", range_symbol, &nv.version),
selected_version: nv.version.to_string(),
}))
}
}
@ -461,6 +513,85 @@ fn generate_imports(packages_to_version: Vec<(String, String)>) -> String {
contents.join("\n")
}
fn remove_from_config(
config_path: &Path,
keys: &[&'static str],
packages_to_remove: &[String],
removed_packages: &mut Vec<String>,
fmt_options: &FmtOptionsConfig,
) -> Result<(), AnyError> {
let mut json: serde_json::Value =
serde_json::from_slice(&std::fs::read(config_path)?)?;
for key in keys {
let Some(obj) = json.get_mut(*key).and_then(|v| v.as_object_mut()) else {
continue;
};
for package in packages_to_remove {
if obj.shift_remove(package).is_some() {
removed_packages.push(package.clone());
}
}
}
let config = serde_json::to_string_pretty(&json)?;
let config =
crate::tools::fmt::format_json(config_path, &config, fmt_options)
.ok()
.flatten()
.unwrap_or(config);
std::fs::write(config_path, config)
.context("Failed to update configuration file")?;
Ok(())
}
pub async fn remove(
flags: Arc<Flags>,
remove_flags: RemoveFlags,
) -> Result<(), AnyError> {
let (config_file, factory) = DenoOrPackageJson::from_flags(flags.clone())?;
let options = factory.cli_options()?;
let start_dir = &options.start_dir;
let fmt_config_options = config_file.fmt_options();
let mut removed_packages = Vec::new();
if let Some(deno_json) = start_dir.maybe_deno_json() {
remove_from_config(
&deno_json.specifier.to_file_path().unwrap(),
&["imports"],
&remove_flags.packages,
&mut removed_packages,
&fmt_config_options,
)?;
}
if let Some(pkg_json) = start_dir.maybe_pkg_json() {
remove_from_config(
&pkg_json.path,
&["dependencies", "devDependencies"],
&remove_flags.packages,
&mut removed_packages,
&fmt_config_options,
)?;
}
if removed_packages.is_empty() {
log::info!("No packages were removed");
} else {
for package in &removed_packages {
log::info!("Removed {}", crate::colors::green(package));
}
// Update deno.lock
node_resolver::PackageJsonThreadLocalCache::clear();
let cli_factory = CliFactory::from_flags(flags);
crate::module_loader::load_top_level_deps(&cli_factory).await?;
}
Ok(())
}
fn update_config_file_content(
obj: jsonc_parser::ast::Object,
config_file_contents: &str,
@ -484,7 +615,7 @@ fn update_config_file_content(
text_changes.push(TextChange {
range: insert_position..insert_position,
// NOTE(bartlomieju): adding `\n` here to force the formatter to always
// produce a config file that is multline, like so:
// produce a config file that is multiline, like so:
// ```
// {
// "imports": {

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::http_util;
use crate::http_util::HttpClient;
use super::api::OidcTokenResponse;
@ -12,6 +13,8 @@ use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use http_body_util::BodyExt;
use once_cell::sync::Lazy;
use p256::elliptic_curve;
use p256::pkcs8::AssociatedOid;
@ -504,12 +507,12 @@ impl<'a> FulcioSigner<'a> {
let response = self
.http_client
.post(url)
.json(&request_body)
.post_json(url.parse()?, &request_body)?
.send()
.await?;
let body: SigningCertificateResponse = response.json().await?;
let body: SigningCertificateResponse =
http_util::body_to_json(response).await?;
let key = body
.signed_certificate_embedded_sct
@ -527,15 +530,23 @@ impl<'a> FulcioSigner<'a> {
bail!("No OIDC token available");
};
let res = self
let mut url = req_url.parse::<Url>()?;
url.query_pairs_mut().append_pair("audience", aud);
let res_bytes = self
.http_client
.get(&req_url)
.bearer_auth(token)
.query(&[("audience", aud)])
.get(url)?
.header(
http::header::AUTHORIZATION,
format!("Bearer {}", token)
.parse()
.map_err(http::Error::from)?,
)
.send()
.await?
.json::<OidcTokenResponse>()
.await?;
.collect()
.await?
.to_bytes();
let res: OidcTokenResponse = serde_json::from_slice(&res_bytes)?;
Ok(res.value)
}
}
@ -685,11 +696,10 @@ async fn testify(
let url = format!("{}/api/v1/log/entries", *DEFAULT_REKOR_URL);
let res = http_client
.post(&url)
.json(&proposed_intoto_entry)
.post_json(url.parse()?, &proposed_intoto_entry)?
.send()
.await?;
let body: RekorEntry = res.json().await?;
let body: RekorEntry = http_util::body_to_json(res).await?;
Ok(body)
}

View file

@ -34,7 +34,7 @@ pub struct PublishableTarball {
}
pub fn create_gzipped_tarball(
publish_paths: &[CollectedPublishPath],
publish_paths: Vec<CollectedPublishPath>,
source_parser: LazyGraphSourceParser,
diagnostics_collector: &PublishDiagnosticsCollector,
unfurler: &SpecifierUnfurler,
@ -45,15 +45,17 @@ pub fn create_gzipped_tarball(
for path in publish_paths {
let path_str = &path.relative_path;
let specifier = &path.specifier;
let path = &path.path;
let content = resolve_content_maybe_unfurling(
path,
specifier,
unfurler,
source_parser,
diagnostics_collector,
)?;
let content = match path.maybe_content {
Some(content) => content.clone(),
None => resolve_content_maybe_unfurling(
&path.path,
specifier,
unfurler,
source_parser,
diagnostics_collector,
)?,
};
files.push(PublishableTarballFile {
path_str: path_str.clone(),
@ -62,10 +64,11 @@ pub fn create_gzipped_tarball(
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
size: content.len(),
});
assert!(path_str.starts_with('/'));
tar
.add_file(format!(".{}", path_str), &content)
.with_context(|| {
format!("Unable to add file to tarball '{}'", path.display())
format!("Unable to add file to tarball '{}'", path.path.display())
})?;
}

View file

@ -3,7 +3,6 @@
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_config::package_json::PackageJsonDepValue;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
@ -12,6 +11,7 @@ use deno_graph::DependencyDescriptor;
use deno_graph::DynamicTemplatePart;
use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::is_builtin_node_module;
use crate::resolver::SloppyImportsResolver;
@ -75,26 +75,62 @@ impl SpecifierUnfurler {
match resolved {
MappedResolution::Normal(specifier)
| MappedResolution::ImportMap(specifier) => Some(specifier),
MappedResolution::WorkspaceJsrPackage { pkg_req_ref, .. } => {
Some(ModuleSpecifier::parse(&pkg_req_ref.to_string()).unwrap())
}
MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
pkg_name,
sub_path,
} => {
// todo(#24612): consider warning or error when this is also a jsr package?
ModuleSpecifier::parse(&format!(
"npm:{}{}{}",
pkg_name,
pkg_json
.version
.as_ref()
.map(|v| format!("@^{}", v))
.unwrap_or_default(),
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
MappedResolution::PackageJson {
alias,
sub_path,
dep_result,
..
} => match dep_result {
Ok(dep) => match dep {
PackageJsonDepValue::Req(req) => ModuleSpecifier::parse(&format!(
"npm:{}{}",
req,
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok(),
PackageJsonDepValue::Workspace(_) => {
log::warn!(
"package.json workspace entries are not implemented yet for publishing."
);
None
PackageJsonDepValue::Req(pkg_req) => {
// todo(#24612): consider warning or error when this is an npm workspace
// member that's also a jsr package?
ModuleSpecifier::parse(&format!(
"npm:{}{}",
pkg_req,
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
PackageJsonDepValue::Workspace(version_req) => {
// todo(#24612): consider warning or error when this is also a jsr package?
ModuleSpecifier::parse(&format!(
"npm:{}@{}{}",
alias,
version_req,
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
},
Err(err) => {
@ -144,8 +180,8 @@ impl SpecifierUnfurler {
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
sloppy_imports_resolver
.resolve(&resolved, deno_graph::source::ResolutionMode::Execution)
.as_specifier()
.clone()
.map(|res| res.into_specifier())
.unwrap_or(resolved)
} else {
resolved
};
@ -355,11 +391,14 @@ mod tests {
use super::*;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_node::PackageJson;
use deno_semver::Version;
use import_map::ImportMapWithDiagnostics;
use indexmap::IndexMap;
use pretty_assertions::assert_eq;
use test_util::testdata_path;
@ -401,7 +440,15 @@ mod tests {
}),
);
let workspace_resolver = WorkspaceResolver::new_raw(
Arc::new(ModuleSpecifier::from_directory_path(&cwd).unwrap()),
Some(import_map),
vec![ResolverWorkspaceJsrPackage {
base: ModuleSpecifier::from_directory_path(cwd.join("jsr-package"))
.unwrap(),
name: "@denotest/example".to_string(),
version: Some(Version::parse_standard("1.0.0").unwrap()),
exports: IndexMap::from([(".".to_string(), "mod.ts".to_string())]),
}],
vec![Arc::new(package_json)],
deno_config::workspace::PackageJsonDepResolution::Enabled,
);
@ -424,6 +471,7 @@ import b from "./b.js";
import b2 from "./b";
import "./mod.ts";
import url from "url";
import "@denotest/example";
// TODO: unfurl these to jsr
// import "npm:@jsr/std__fs@1/file";
// import "npm:@jsr/std__fs@1";
@ -473,6 +521,7 @@ import b from "./b.ts";
import b2 from "./b.ts";
import "./mod.ts";
import url from "node:url";
import "jsr:@denotest/example@^1.0.0";
// TODO: unfurl these to jsr
// import "npm:@jsr/std__fs@1/file";
// import "npm:@jsr/std__fs@1";

View file

@ -156,9 +156,12 @@ async fn read_eval_file(
}
#[allow(clippy::print_stdout)]
pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
pub async fn run(
flags: Arc<Flags>,
repl_flags: ReplFlags,
) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
let permissions = PermissionsContainer::new(Permissions::from_options(
&cli_options.permissions_options()?,

Some files were not shown because too many files have changed in this diff Show more