mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge branch 'main' into Fix-UNC-Path-Permissions-Issue-on-Windows
This commit is contained in:
commit
587b09a6fa
641 changed files with 7213 additions and 13280 deletions
|
@ -65,11 +65,11 @@
|
|||
"third_party"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.91.6.wasm",
|
||||
"https://plugins.dprint.dev/typescript-0.91.7.wasm",
|
||||
"https://plugins.dprint.dev/json-0.19.3.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.5.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.8.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.2.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.4.0.wasm"
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
|
||||
]
|
||||
}
|
||||
|
|
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 11;
|
||||
const cacheVersion = 14;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -367,8 +367,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '11-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '11-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '14-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '14-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -381,7 +381,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '11-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '14-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -670,7 +670,7 @@ jobs:
|
|||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '11-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '14-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
444
Cargo.lock
generated
444
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
77
Cargo.toml
77
Cargo.toml
|
@ -44,47 +44,48 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.41.2", features = ["transpiling"] }
|
||||
deno_core = { version = "0.305.0" }
|
||||
deno_ast = { version = "=0.42.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.307.0" }
|
||||
|
||||
deno_bench_util = { version = "0.158.0", path = "./bench_util" }
|
||||
deno_lockfile = "0.21.2"
|
||||
deno_bench_util = { version = "0.161.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.0"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_permissions = { version = "0.24.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.173.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.27.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.176.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.13"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.94.0", path = "./cli/napi/sym" }
|
||||
napi_sym = { version = "0.97.0", path = "./cli/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
denokv_remote = "0.8.1"
|
||||
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
|
||||
denokv_sqlite = { default-features = false, version = "0.8.1" }
|
||||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.158.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.96.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.33.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.164.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.44.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.178.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.188.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.151.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.74.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.162.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.74.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.72.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.95.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.156.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.101.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.151.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.164.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.195.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.131.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.164.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.169.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.159.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.3.0", path = "./ext/node_resolver" }
|
||||
deno_broadcast_channel = { version = "0.161.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.99.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.36.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.167.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.47.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.181.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.191.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.154.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.77.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.165.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.77.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.75.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.98.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.159.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.104.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.154.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.167.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.198.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.134.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.167.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.172.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.162.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.6.0", path = "./ext/node_resolver" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -102,11 +103,11 @@ chrono = { version = "0.4", default-features = false, features = ["std", "serde"
|
|||
console_static_text = "=0.8.1"
|
||||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.10.2"
|
||||
deno_cache_dir = "=0.11.1"
|
||||
deno_package_json = { version = "=0.1.1", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
|
||||
encoding_rs = "=0.8.33"
|
||||
fast-socks5 = "0.9.6"
|
||||
faster-hex = "0.9"
|
||||
|
@ -133,7 +134,7 @@ lazy-regex = "3"
|
|||
libc = "0.2.126"
|
||||
libz-sys = { version = "1.1", default-features = false }
|
||||
log = "0.4.20"
|
||||
lsp-types = "=0.94.1" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
memmem = "0.1.1"
|
||||
monch = "=0.5.0"
|
||||
notify = "=6.1.1"
|
||||
|
@ -141,8 +142,8 @@ num-bigint = { version = "0.4", features = ["rand"] }
|
|||
once_cell = "1.17.1"
|
||||
os_pipe = { version = "=1.1.5", features = ["io_safety"] }
|
||||
p224 = { version = "0.13.0", features = ["ecdh"] }
|
||||
p256 = { version = "0.13.2", features = ["ecdh"] }
|
||||
p384 = { version = "0.13.0", features = ["ecdh"] }
|
||||
p256 = { version = "0.13.2", features = ["ecdh", "jwk"] }
|
||||
p384 = { version = "0.13.0", features = ["ecdh", "jwk"] }
|
||||
parking_lot = "0.12.0"
|
||||
percent-encoding = "2.3.0"
|
||||
phf = { version = "0.11", features = ["macros"] }
|
||||
|
@ -154,7 +155,7 @@ rand = "=0.8.5"
|
|||
regex = "^1.7.0"
|
||||
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
|
||||
ring = "^0.17.0"
|
||||
rusqlite = { version = "=0.29.0", features = ["unlock_notify", "bundled"] }
|
||||
rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled"] }
|
||||
rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] }
|
||||
rustls-pemfile = "2"
|
||||
rustls-tokio-stream = "=0.3.0"
|
||||
|
@ -184,7 +185,7 @@ tokio-socks = "0.5.1"
|
|||
tokio-util = "0.7.4"
|
||||
tower = { version = "0.4.13", default-features = false, features = ["util"] }
|
||||
tower-http = { version = "0.5.2", features = ["decompression-br", "decompression-gzip"] }
|
||||
tower-lsp = { version = "=0.20.0", features = ["proposed"] }
|
||||
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
||||
tower-service = "0.3.2"
|
||||
twox-hash = "=1.6.3"
|
||||
# Upgrading past 2.4.1 may cause WPT failures
|
||||
|
|
309
Releases.md
309
Releases.md
|
@ -6,6 +6,315 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 1.46.2 / 2024.08.29
|
||||
|
||||
- Revert "feat(fetch): accept async iterables for body" (#25207)
|
||||
- fix(bench): Fix table column alignments and NO_COLOR=1 (#25190)
|
||||
- fix(ext/crypto): throw DataError for invalid EC key import (#25181)
|
||||
- fix(ext/fetch): percent decode userinfo when parsing proxies (#25229)
|
||||
- fix(ext/node): emit `online` event after worker thread is initialized (#25243)
|
||||
- fix(ext/node): export JWK public key (#25239)
|
||||
- fix(ext/node): import EC JWK keys (#25266)
|
||||
- fix(ext/node): import JWK octet key pairs (#25180)
|
||||
- fix(ext/node): import RSA JWK keys (#25267)
|
||||
- fix(ext/node): throw when loading `cpu-features` module (#25257)
|
||||
- fix(ext/node): update aead-gcm-stream to 0.3 (#25261)
|
||||
- fix(ext/webgpu): allow to build on unsupported platforms (#25202)
|
||||
- fix(fmt): fix incorrect quotes in components (#25249)
|
||||
- fix(fmt/markdown): fix regression with multi-line footnotes and inline math
|
||||
(#25222)
|
||||
- fix(install): Use relative symlinks in deno install (#25164)
|
||||
- fix(lsp): panic on url_to_uri() (#25238)
|
||||
- fix(napi): Don't run microtasks in napi_resolve_deferred (#25246)
|
||||
- fix(napi): Fix worker threads importing already-loaded NAPI addon (#25245)
|
||||
- fix(node/cluster): improve stubs to make log4js work (#25146)
|
||||
- fix(runtime/web_worker): populate `SnapshotOptions` for `WebWorker` when
|
||||
instantiated without snapshot (#25280)
|
||||
- fix(task): support tasks with colons in name in `deno run` (#25233)
|
||||
- fix: handle showing warnings while the progress bar is shown (#25187)
|
||||
- fix: reland async context (#25140)
|
||||
- fix: removed unstable-htttp from deno help (#25216)
|
||||
- fix: replace `npm install` hint with `deno install` hint (#25244)
|
||||
- fix: update deno_doc (#25290)
|
||||
- fix: upgrade deno_core to 0.307.0 (#25287)
|
||||
- perf(ext/node): reduce some allocations in require (#25197)
|
||||
|
||||
### 1.46.1 / 2024.08.22
|
||||
|
||||
- fix(ext/node): http2session ready state (#25143)
|
||||
- fix(ext/node): register `node:wasi` built-in (#25134)
|
||||
- fix(urlpattern): fallback to empty string for undefined group values (#25151)
|
||||
- fix: trim space around DENO_AUTH_TOKENS (#25147)
|
||||
|
||||
### 1.46.0 / 2024.08.22
|
||||
|
||||
- BREAKING(temporal/unstable): Remove obsoleted Temporal APIs (#24836)
|
||||
- BREAKING(webgpu/unstable): Replace async .requestAdapterInfo() with sync .info
|
||||
(#24783)
|
||||
- feat: `deno compile --icon <ico>` (#25039)
|
||||
- feat: `deno init --serve` (#24897)
|
||||
- feat: `deno upgrade --rc` (#24905)
|
||||
- feat: Add Deno.ServeDefaultExport type (#24879)
|
||||
- feat: async context (#24402)
|
||||
- feat: better help output (#24958)
|
||||
- feat: codesign for deno compile binaries (#24604)
|
||||
- feat: deno clean (#24950)
|
||||
- feat: deno remove (#24952)
|
||||
- feat: deno run <task> (#24891)
|
||||
- feat: Deprecate "import assertions" with a warning (#24743)
|
||||
- feat: glob and directory support for `deno check` and `deno cache` cli arg
|
||||
paths (#25001)
|
||||
- feat: Print deprecation message for npm packages (#24992)
|
||||
- feat: refresh "Download" progress bar with a spinner (#24913)
|
||||
- feat: Rename --unstable-hmr to --watch-hmr (#24975)
|
||||
- feat: support short flags for permissions (#24883)
|
||||
- feat: treat bare deno command with run arguments as deno run (#24887)
|
||||
- feat: upgrade deno_core (#24886)
|
||||
- feat: upgrade deno_core (#25042)
|
||||
- feat: upgrade V8 to 12.8 (#24693)
|
||||
- feat: Upgrade V8 to 12.9 (#25138)
|
||||
- feat: vm rewrite (#24596)
|
||||
- feat(clean): add progress bar (#25026)
|
||||
- feat(cli): Add --env-file as alternative to --env (#24555)
|
||||
- feat(cli/tools): add a subcommand `--hide-stacktraces` for test (#24095)
|
||||
- feat(config): Support frozen lockfile config option in deno.json (#25100)
|
||||
- feat(config/jsr): add license field (#25056)
|
||||
- feat(coverage): add breadcrumbs to deno coverage `--html` report (#24860)
|
||||
- feat(ext/node): rewrite crypto keys (#24463)
|
||||
- feat(ext/node): support http2session.socket (#24786)
|
||||
- feat(fetch): accept async iterables for body (#24623)
|
||||
- feat(flags): improve help output and make `deno run` list tasks (#25108)
|
||||
- feat(fmt): support CSS, SCSS, Sass and Less (#24870)
|
||||
- feat(fmt): support HTML, Svelte, Vue, Astro and Angular (#25019)
|
||||
- feat(fmt): support YAML (#24717)
|
||||
- feat(FUTURE): terse lockfile (v4) (#25059)
|
||||
- feat(install): change 'Add ...' message (#24949)
|
||||
- feat(lint): Add lint for usage of node globals (with autofix) (#25048)
|
||||
- feat(lsp): node specifier completions (#24904)
|
||||
- feat(lsp): registry completions for import-mapped specifiers (#24792)
|
||||
- feat(node): support `username` and `_password` in `.npmrc` file (#24793)
|
||||
- feat(permissions): link to docs in permission prompt (#24948)
|
||||
- feat(publish): error on missing license file (#25011)
|
||||
- feat(publish): suggest importing `jsr:@std/` for `deno.land/std` urls (#25046)
|
||||
- feat(serve): Opt-in parallelism for `deno serve` (#24920)
|
||||
- feat(test): rename --allow-none to --permit-no-files (#24809)
|
||||
- feat(unstable): ability to use a local copy of jsr packages (#25068)
|
||||
- feat(unstable/fmt): move yaml formatting behind unstable flag (#24848)
|
||||
- feat(upgrade): refresh output (#24911)
|
||||
- feat(upgrade): support `deno upgrade 1.46.0` (#25096)
|
||||
- feat(urlpattern): add ignoreCase option & hasRegExpGroups property, and fix
|
||||
spec discrepancies (#24741)
|
||||
- feat(watch): add watch paths to test subcommand (#24771)
|
||||
- fix: `node:inspector` not being registered (#25007)
|
||||
- fix: `rename` watch event missing (#24893)
|
||||
- fix: actually add missing `node:readline/promises` module (#24772)
|
||||
- fix: adapt to new jupyter runtime API and include session IDs (#24762)
|
||||
- fix: add permission name when accessing a special file errors (#25085)
|
||||
- fix: adjust suggestion for lockfile regeneration (#25107)
|
||||
- fix: cache bust jsr meta file when version not found in dynamic branches
|
||||
(#24928)
|
||||
- fix: CFunctionInfo and CTypeInfo leaks (#24634)
|
||||
- fix: clean up flag help output (#24686)
|
||||
- fix: correct JSON config schema to show vendor option as stable (#25090)
|
||||
- fix: dd-trace http message compat (#25021)
|
||||
- fix: deserialize lockfile v3 straight (#25121)
|
||||
- fix: Don't panic if fail to handle JS stack frame (#25122)
|
||||
- fix: Don't panic if failed to add system certificate (#24823)
|
||||
- fix: Don't shell out to `unzip` in deno upgrade/compile (#24926)
|
||||
- fix: enable the reporting of parsing related problems when running deno lint
|
||||
(#24332)
|
||||
- fix: errors with CallSite methods (#24907)
|
||||
- fix: include already seen deps in lockfile dep tracking (#24556)
|
||||
- fix: log current version when using deno upgrade (#25079)
|
||||
- fix: make `deno add` output more deterministic (#25083)
|
||||
- fix: make vendor cache manifest more deterministic (#24658)
|
||||
- fix: missing `emitWarning` import (#24587)
|
||||
- fix: regressions around Error.prepareStackTrace (#24839)
|
||||
- fix: stub `node:module.register()` (#24965)
|
||||
- fix: support `npm:bindings` and `npm:callsites` packages (#24727)
|
||||
- fix: unblock fsevents native module (#24542)
|
||||
- fix: update deno_doc (#24972)
|
||||
- fix: update dry run success message (#24885)
|
||||
- fix: update lsp error message of 'relative import path' to 'use deno add' for
|
||||
npm/jsr packages (#24524)
|
||||
- fix: upgrade deno_core to 0.298.0 (#24709)
|
||||
- fix: warn about import assertions when using typescript (#25135)
|
||||
- fix(add): better error message providing scoped pkg missing leading `@` symbol
|
||||
(#24961)
|
||||
- fix(add): Better error message when missing npm specifier (#24970)
|
||||
- fix(add): error when config file contains importMap field (#25115)
|
||||
- fix(add): Handle packages without root exports (#25102)
|
||||
- fix(add): Support dist tags in deno add (#24960)
|
||||
- fix(cli): add NAPI support in standalone mode (#24642)
|
||||
- fix(cli): Create child node_modules for conflicting dependency versions,
|
||||
respect aliases in package.json (#24609)
|
||||
- fix(cli): Respect implied BYONM from DENO_FUTURE in `deno task` (#24652)
|
||||
- fix(cli): shorten examples in help text (#24374)
|
||||
- fix(cli): support --watch when running cjs npm packages (#25038)
|
||||
- fix(cli): Unhide publish subcommand help string (#24787)
|
||||
- fix(cli): update permission prompt message for compiled binaries (#24081)
|
||||
- fix(cli/init): broken link in deno init sample template (#24545)
|
||||
- fix(compile): adhoc codesign mach-o by default (#24824)
|
||||
- fix(compile): make output more deterministic (#25092)
|
||||
- fix(compile): support workspace members importing other members (#24909)
|
||||
- fix(compile/windows): handle cjs re-export of relative path with parent
|
||||
component (#24795)
|
||||
- fix(config): regression - should not discover npm workspace for nested
|
||||
deno.json not in workspace (#24559)
|
||||
- fix(cron): improve error message for invalid cron names (#24644)
|
||||
- fix(docs): fix some deno.land/manual broken urls (#24557)
|
||||
- fix(ext/console): Error Cause Not Inspect-Formatted when printed (#24526)
|
||||
- fix(ext/console): render properties of Intl.Locale (#24827)
|
||||
- fix(ext/crypto): respect offsets when writing into ab views in randomFillSync
|
||||
(#24816)
|
||||
- fix(ext/fetch): include TCP src/dst socket info in error messages (#24939)
|
||||
- fix(ext/fetch): include URL and error details on fetch failures (#24910)
|
||||
- fix(ext/fetch): respect authority from URL (#24705)
|
||||
- fix(ext/fetch): use correct ALPN to proxies (#24696)
|
||||
- fix(ext/fetch): use correct ALPN to socks5 proxies (#24817)
|
||||
- fix(ext/http): correctly consume response body in `Deno.serve` (#24811)
|
||||
- fix(ext/net): validate port in Deno.{connect,serve,listen} (#24399)
|
||||
- fix(ext/node): add `CipherIv.setAutoPadding()` (#24940)
|
||||
- fix(ext/node): add crypto.diffieHellman (#24938)
|
||||
- fix(ext/node): client closing streaming request shouldn't terminate http
|
||||
server (#24946)
|
||||
- fix(ext/node): createBrotliCompress params (#24984)
|
||||
- fix(ext/node): do not expose `self` global in node (#24637)
|
||||
- fix(ext/node): don't concat set-cookie in ServerResponse.appendHeader (#25000)
|
||||
- fix(ext/node): don't throw when calling PerformanceObserver.observe (#25036)
|
||||
- fix(ext/node): ed25519 signing and cipheriv autopadding fixes (#24957)
|
||||
- fix(ext/node): fix prismjs compatibiliy in Web Worker (#25062)
|
||||
- fix(ext/node): handle node child_process with --v8-options flag (#24804)
|
||||
- fix(ext/node): handle prefix mapping for IPv4-mapped IPv6 addresses (#24546)
|
||||
- fix(ext/node): http request uploads of subarray of buffer should work (#24603)
|
||||
- fix(ext/node): improve shelljs compat with managed npm execution (#24912)
|
||||
- fix(ext/node): node:zlib coerces quality 10 to 9.5 (#24850)
|
||||
- fix(ext/node): pass content-disposition header as string instead of bytes
|
||||
(#25128)
|
||||
- fix(ext/node): prevent panic in http2.connect with uppercase header names
|
||||
(#24780)
|
||||
- fix(ext/node): read correct CPU usage stats on Linux (#24732)
|
||||
- fix(ext/node): rewrite X509Certificate resource and add `publicKey()` (#24988)
|
||||
- fix(ext/node): stat.mode on windows (#24434)
|
||||
- fix(ext/node): support ieee-p1363 ECDSA signatures and pss salt len (#24981)
|
||||
- fix(ext/node): use pem private keys in createPublicKey (#24969)
|
||||
- fix(ext/node/net): emit `error` before `close` when connection is refused
|
||||
(#24656)
|
||||
- fix(ext/web): make CompressionResource garbage collectable (#24884)
|
||||
- fix(ext/web): make TextDecoderResource use cppgc (#24888)
|
||||
- fix(ext/webgpu): assign missing `constants` property of shader about
|
||||
`GPUDevice.createRenderPipeline[Async]` (#24803)
|
||||
- fix(ext/webgpu): don't crash while constructing GPUOutOfMemoryError (#24807)
|
||||
- fix(ext/webgpu): GPUDevice.createRenderPipelineAsync should return a Promise
|
||||
(#24349)
|
||||
- fix(ext/websocket): unhandled close rejection in WebsocketStream (#25125)
|
||||
- fix(fmt): handle using stmt in for of stmt (#24834)
|
||||
- fix(fmt): regression with pipe in code blocks in tables (#25098)
|
||||
- fix(fmt): upgrade to dprint-plugin-markdown 0.17.4 (#25075)
|
||||
- fix(fmt): was sometimes putting comments in front of commas in parameter lists
|
||||
(#24650)
|
||||
- fix(future): Emit `deno install` warning less often, suggest `deno install` in
|
||||
error message (#24706)
|
||||
- fix(http): Adjust hostname display for Windows when using 0.0.0.0 (#24698)
|
||||
- fix(init): use bare specifier for `jsr:@std/assert` (#24581)
|
||||
- fix(install): Properly handle dist tags when setting up node_modules (#24968)
|
||||
- fix(lint): support linting tsx/jsx from stdin (#24955)
|
||||
- fix(lsp): directly use file referrer when loading document (#24997)
|
||||
- fix(lsp): don't always use byonm resolver when DENO_FUTURE=1 (#24865)
|
||||
- fix(lsp): hang when caching failed (#24651)
|
||||
- fix(lsp): import map lookup for jsr subpath auto import (#25025)
|
||||
- fix(lsp): include scoped import map keys in completions (#25047)
|
||||
- fix(lsp): resolve jsx import source with types mode (#25064)
|
||||
- fix(lsp): rewrite import for 'infer return type' action (#24685)
|
||||
- fix(lsp): scope attribution for asset documents (#24663)
|
||||
- fix(lsp): support npm workspaces and fix some resolution issues (#24627)
|
||||
- fix(node): better detection for when to surface node resolution errors
|
||||
(#24653)
|
||||
- fix(node): cjs pkg dynamically importing esm-only pkg fails (#24730)
|
||||
- fix(node): Create additional pipes for child processes (#25016)
|
||||
- fix(node): Fix `--allow-scripts` with no `deno.json` (#24533)
|
||||
- fix(node): Fix node IPC serialization for objects with undefined values
|
||||
(#24894)
|
||||
- fix(node): revert invalid package target change (#24539)
|
||||
- fix(node): Rework node:child_process IPC (#24763)
|
||||
- fix(node): Run node compat tests listed in the `ignore` field (and fix the
|
||||
ones that fail) (#24631)
|
||||
- fix(node): support `tty.hasColors()` and `tty.getColorDepth()` (#24619)
|
||||
- fix(node): support wildcards in package.json imports (#24794)
|
||||
- fix(node/crypto): Assign publicKey and privateKey with let instead of const
|
||||
(#24943)
|
||||
- fix(node/fs): node:fs.read and write should accept typed arrays other than
|
||||
Uint8Array (#25030)
|
||||
- fix(node/fs): Use correct offset and length in node:fs.read and write (#25049)
|
||||
- fix(node/fs/promises): watch should be async iterable (#24805)
|
||||
- fix(node/http): wrong `req.url` value (#25081)
|
||||
- fix(node/inspector): Session constructor should not throw (#25041)
|
||||
- fix(node/timers/promises): add scheduler APIs (#24802)
|
||||
- fix(node/tty): fix `tty.WriteStream.hasColor` with different args (#25094)
|
||||
- fix(node/util): add missing `debug` alias of `debuglog` (#24944)
|
||||
- fix(node/worker_threads): support `port.once()` (#24725)
|
||||
- fix(npm): handle packages with only pre-released 0.0.0 versions (#24563)
|
||||
- fix(npm): use start directory deno.json as "root deno.json config" in npm
|
||||
workspace (#24538)
|
||||
- fix(npmrc): skip loading .npmrc in home dir on permission error (#24758)
|
||||
- fix(publish): show dirty files on dirty check failure (#24541)
|
||||
- fix(publish): surface syntax errors when using --no-check (#24620)
|
||||
- fix(publish): warn about missing license file (#24677)
|
||||
- fix(publish): workspace included license file had incorrect path (#24747)
|
||||
- fix(repl): Prevent panic on broken pipe (#21945)
|
||||
- fix(runtime/windows): fix calculation of console size (#23873)
|
||||
- fix(std/http2): release window capacity back to remote stream (#24576)
|
||||
- fix(tls): print a warning if a system certificate can't be loaded (#25023)
|
||||
- fix(types): Conform lib.deno_web.d.ts to lib.dom.d.ts and lib.webworker.d.ts
|
||||
(#24599)
|
||||
- fix(types): fix streams types (#24770)
|
||||
- fix(unstable): move sloppy-import warnings to lint rule (#24710)
|
||||
- fix(unstable): panic when running deno install with DENO_FUTURE=1 (#24866)
|
||||
- fix(unstable/compile): handle byonm import in sub dir (#24755)
|
||||
- fix(upgrade): better error message when check_exe fails (#25133)
|
||||
- fix(upgrade): correctly compute latest version based on current release
|
||||
channel (#25087)
|
||||
- fix(upgrade): do not error if config in cwd invalid (#24689)
|
||||
- fix(upgrade): fallback to Content-Length header for progress bar (#24923)
|
||||
- fix(upgrade): return no RC versions if fetching fails (#25013)
|
||||
- fix(upgrade): support RC release with --version flag (#25091)
|
||||
- fix(upgrade): use proper version display (#25029)
|
||||
- fix(urlpattern): correct typings for added APIs (#24881)
|
||||
- fix(webgpu): Fix `GPUAdapter#isFallbackAdapter` and `GPUAdapter#info`
|
||||
properties (#24914)
|
||||
- fix(workspace): do not resolve to self for npm pkg depending on matching req
|
||||
(#24591)
|
||||
- fix(workspace): support resolving bare specifiers to npm pkgs within a
|
||||
workspace (#24611)
|
||||
- fix(workspaces/publish): include the license file from the workspace root if
|
||||
not in pkg (#24714)
|
||||
- perf: skip saving to emit cache after first failure (#24896)
|
||||
- perf: update deno_ast to 0.41 (#24819)
|
||||
- perf: update deno_doc (#24700)
|
||||
- perf(ext/crypto): make randomUUID() 5x faster (#24510)
|
||||
- perf(ext/fetch): speed up `resp.clone()` (#24812)
|
||||
- perf(ext/http): Reduce size of `ResponseBytesInner` (#24840)
|
||||
- perf(ext/node): improve `Buffer` from string performance (#24567)
|
||||
- perf(ext/node): optimize fs.exists[Sync] (#24613)
|
||||
- perf(lsp): remove fallback config scopes for workspace folders (#24868)
|
||||
- refactor: `version` module exports a single const struct (#25014)
|
||||
- refactor: decouple node resolution from deno_core (#24724)
|
||||
- refactor: move importMap with imports/scopes diagnostic to deno_config
|
||||
(#24553)
|
||||
- refactor: remove version::is_canary(), use ReleaseChannel instead (#25053)
|
||||
- refactor: show release channel in `deno --version` (#25061)
|
||||
- refactor: update to deno_config 0.25 (#24645)
|
||||
- refactor: update to use deno_package_json (#24688)
|
||||
- refactor(ext/node): create separate ops for node:http module (#24788)
|
||||
- refactor(fetch): reimplement fetch with hyper instead of reqwest (#24237)
|
||||
- refactor(lint): move reporters to separate module (#24757)
|
||||
- refactor(node): internally add `.code()` to node resolution errors (#24610)
|
||||
- refactor(upgrade): cleanup pass (#24954)
|
||||
- refactor(upgrade): make fetching latest version async (#24919)
|
||||
- Reland "fix: CFunctionInfo and CTypeInfo leaks (#24634)" (#24692)
|
||||
- Reland "refactor(fetch): reimplement fetch with hyper instead of reqwest"
|
||||
(#24593)
|
||||
|
||||
### 1.45.5 / 2024.07.31
|
||||
|
||||
- fix(cli): Unhide publish subcommand help string (#24787)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.158.0"
|
||||
version = "0.161.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "1.46.0-rc.3"
|
||||
version = "2.0.0-rc.0"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -65,24 +65,25 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.30.1", features = ["workspace", "sync"] }
|
||||
deno_config = { version = "=0.32.0", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.146.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.44.0"
|
||||
deno_graph = { version = "=0.81.2" }
|
||||
deno_lint = { version = "=0.63.1", features = ["docs"] }
|
||||
deno_doc = { version = "0.148.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.45.0"
|
||||
deno_graph = { version = "=0.82.0" }
|
||||
deno_lint = { version = "=0.64.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm = "=0.23.1"
|
||||
deno_npm = "=0.25.0"
|
||||
deno_package_json.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver = "=0.5.10"
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.17.0"
|
||||
deno_terminal.workspace = true
|
||||
eszip = "=0.75.0"
|
||||
eszip = "=0.78.0"
|
||||
libsui = "0.3.0"
|
||||
napi_sym.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
async-trait.workspace = true
|
||||
base32.workspace = true
|
||||
base64.workspace = true
|
||||
|
@ -101,8 +102,8 @@ dissimilar = "=1.0.4"
|
|||
dotenvy = "0.15.7"
|
||||
dprint-plugin-json = "=0.19.3"
|
||||
dprint-plugin-jupyter = "=0.1.3"
|
||||
dprint-plugin-markdown = "=0.17.5"
|
||||
dprint-plugin-typescript = "=0.91.6"
|
||||
dprint-plugin-markdown = "=0.17.8"
|
||||
dprint-plugin-typescript = "=0.91.7"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
|
@ -114,7 +115,7 @@ http.workspace = true
|
|||
http-body.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper-util.workspace = true
|
||||
import_map = { version = "=0.20.0", features = ["ext"] }
|
||||
import_map = { version = "=0.20.1", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser.workspace = true
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
|
||||
|
@ -155,6 +156,7 @@ thiserror.workspace = true
|
|||
tokio.workspace = true
|
||||
tokio-util.workspace = true
|
||||
tower-lsp.workspace = true
|
||||
tracing = { version = "0.1", features = ["log", "default"] }
|
||||
twox-hash.workspace = true
|
||||
typed-arena = "=2.0.2"
|
||||
uuid = { workspace = true, features = ["serde"] }
|
||||
|
|
1520
cli/args/flags.rs
1520
cli/args/flags.rs
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
|
@ -12,6 +12,7 @@ use deno_core::parking_lot::MutexGuard;
|
|||
use deno_lockfile::WorkspaceMemberConfig;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
|
||||
use crate::cache;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
|
@ -98,7 +99,9 @@ impl CliLockfile {
|
|||
flags: &Flags,
|
||||
workspace: &Workspace,
|
||||
) -> Result<Option<CliLockfile>, AnyError> {
|
||||
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
|
||||
fn pkg_json_deps(
|
||||
maybe_pkg_json: Option<&PackageJson>,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
let Some(pkg_json) = maybe_pkg_json else {
|
||||
return Default::default();
|
||||
};
|
||||
|
@ -107,21 +110,21 @@ impl CliLockfile {
|
|||
.values()
|
||||
.filter_map(|dep| dep.as_ref().ok())
|
||||
.filter_map(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => Some(req),
|
||||
PackageJsonDepValue::Req(req) => {
|
||||
Some(JsrDepPackageReq::npm(req.clone()))
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_) => None,
|
||||
})
|
||||
.map(|r| format!("npm:{}", r))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deno_json_deps(
|
||||
maybe_deno_json: Option<&ConfigFile>,
|
||||
) -> BTreeSet<String> {
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
maybe_deno_json
|
||||
.map(|c| {
|
||||
crate::args::deno_json::deno_json_deps(c)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
|
@ -157,15 +160,7 @@ impl CliLockfile {
|
|||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
let lockfile = if flags.lock_write {
|
||||
log::warn!(
|
||||
"{} \"--lock-write\" flag is deprecated and will be removed in Deno 2.",
|
||||
crate::colors::yellow("Warning")
|
||||
);
|
||||
CliLockfile::new(Lockfile::new_empty(filename, true), frozen)
|
||||
} else {
|
||||
Self::read_from_path(filename, frozen)?
|
||||
};
|
||||
let lockfile = Self::read_from_path(filename, frozen)?;
|
||||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
let root_url = workspace.root_dir();
|
||||
|
@ -215,6 +210,7 @@ impl CliLockfile {
|
|||
|
||||
Ok(Some(lockfile))
|
||||
}
|
||||
|
||||
pub fn read_from_path(
|
||||
file_path: PathBuf,
|
||||
frozen: bool,
|
||||
|
@ -225,26 +221,12 @@ impl CliLockfile {
|
|||
file_path,
|
||||
content: &text,
|
||||
overwrite: false,
|
||||
is_deno_future: *super::DENO_FUTURE,
|
||||
})?,
|
||||
frozen,
|
||||
)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
Ok(CliLockfile::new(
|
||||
if *super::DENO_FUTURE {
|
||||
// force version 4 for deno future
|
||||
Lockfile::new(deno_lockfile::NewLockfileOptions {
|
||||
file_path,
|
||||
content: r#"{"version":"4"}"#,
|
||||
overwrite: false,
|
||||
is_deno_future: true,
|
||||
})?
|
||||
} else {
|
||||
Lockfile::new_empty(file_path, false)
|
||||
},
|
||||
frozen,
|
||||
))
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(
|
||||
CliLockfile::new(Lockfile::new_empty(file_path, false), frozen),
|
||||
),
|
||||
Err(err) => Err(err).with_context(|| {
|
||||
format!("Failed reading lockfile '{}'", file_path.display())
|
||||
}),
|
||||
|
@ -257,12 +239,6 @@ impl CliLockfile {
|
|||
}
|
||||
let lockfile = self.lockfile.lock();
|
||||
if lockfile.has_content_changed {
|
||||
let suggested = if *super::DENO_FUTURE {
|
||||
"`deno cache --frozen=false`, `deno install --frozen=false`,"
|
||||
} else {
|
||||
"`deno cache --frozen=false`"
|
||||
};
|
||||
|
||||
let contents =
|
||||
std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
|
||||
let new_contents = lockfile.as_json_string();
|
||||
|
@ -270,7 +246,7 @@ impl CliLockfile {
|
|||
// has an extra newline at the end
|
||||
let diff = diff.trim_end();
|
||||
Err(deno_core::anyhow::anyhow!(
|
||||
"The lockfile is out of date. Run {suggested} or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
|
||||
"The lockfile is out of date. Run `deno cache --frozen=false`, `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
195
cli/args/mod.rs
195
cli/args/mod.rs
|
@ -8,7 +8,9 @@ mod lockfile;
|
|||
mod package_json;
|
||||
|
||||
use deno_ast::SourceMapOption;
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_config::workspace::CreateResolverOptions;
|
||||
use deno_config::workspace::FolderConfigs;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::VendorEnablement;
|
||||
use deno_config::workspace::Workspace;
|
||||
|
@ -50,7 +52,6 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
|
||||
use deno_runtime::deno_tls::rustls;
|
||||
|
@ -63,6 +64,7 @@ use dotenvy::from_filename;
|
|||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io::BufReader;
|
||||
|
@ -116,9 +118,6 @@ pub static DENO_DISABLE_PEDANTIC_NODE_WARNINGS: Lazy<bool> = Lazy::new(|| {
|
|||
.is_some()
|
||||
});
|
||||
|
||||
pub static DENO_FUTURE: Lazy<bool> =
|
||||
Lazy::new(|| std::env::var("DENO_FUTURE").ok().is_some());
|
||||
|
||||
pub fn jsr_url() -> &'static Url {
|
||||
static JSR_URL: Lazy<Url> = Lazy::new(|| {
|
||||
let env_var_name = "JSR_URL";
|
||||
|
@ -370,7 +369,7 @@ pub struct WorkspaceTestOptions {
|
|||
pub doc: bool,
|
||||
pub no_run: bool,
|
||||
pub fail_fast: Option<NonZeroUsize>,
|
||||
pub allow_none: bool,
|
||||
pub permit_no_files: bool,
|
||||
pub filter: Option<String>,
|
||||
pub shuffle: Option<u64>,
|
||||
pub concurrent_jobs: NonZeroUsize,
|
||||
|
@ -383,7 +382,7 @@ pub struct WorkspaceTestOptions {
|
|||
impl WorkspaceTestOptions {
|
||||
pub fn resolve(test_flags: &TestFlags) -> Self {
|
||||
Self {
|
||||
allow_none: test_flags.allow_none,
|
||||
permit_no_files: test_flags.permit_no_files,
|
||||
concurrent_jobs: test_flags
|
||||
.concurrent_jobs
|
||||
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap()),
|
||||
|
@ -781,8 +780,6 @@ pub struct CliOptions {
|
|||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
overrides: CliOptionOverrides,
|
||||
pub start_dir: Arc<WorkspaceDirectory>,
|
||||
pub disable_deprecated_api_warning: bool,
|
||||
pub verbose_deprecated_api_warning: bool,
|
||||
pub deno_dir_provider: Arc<DenoDirProvider>,
|
||||
}
|
||||
|
||||
|
@ -813,27 +810,18 @@ impl CliOptions {
|
|||
}
|
||||
|
||||
let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache);
|
||||
let root_folder = start_dir.workspace.root_folder_configs();
|
||||
let deno_dir_provider =
|
||||
Arc::new(DenoDirProvider::new(flags.cache_path.clone()));
|
||||
let maybe_node_modules_folder = resolve_node_modules_folder(
|
||||
&initial_cwd,
|
||||
&flags,
|
||||
root_folder.deno_json.as_deref(),
|
||||
root_folder.pkg_json.as_deref(),
|
||||
&start_dir.workspace,
|
||||
&deno_dir_provider,
|
||||
)
|
||||
.with_context(|| "Resolving node_modules folder.")?;
|
||||
|
||||
load_env_variables_from_env_file(flags.env_file.as_ref());
|
||||
|
||||
let disable_deprecated_api_warning = flags.log_level
|
||||
== Some(log::Level::Error)
|
||||
|| std::env::var("DENO_NO_DEPRECATION_WARNINGS").ok().is_some();
|
||||
|
||||
let verbose_deprecated_api_warning =
|
||||
std::env::var("DENO_VERBOSE_WARNINGS").ok().is_some();
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
initial_cwd,
|
||||
|
@ -842,8 +830,6 @@ impl CliOptions {
|
|||
maybe_node_modules_folder,
|
||||
overrides: Default::default(),
|
||||
start_dir,
|
||||
disable_deprecated_api_warning,
|
||||
verbose_deprecated_api_warning,
|
||||
deno_dir_provider,
|
||||
})
|
||||
}
|
||||
|
@ -1137,15 +1123,8 @@ impl CliOptions {
|
|||
self.flags.env_file.as_ref()
|
||||
}
|
||||
|
||||
pub fn enable_future_features(&self) -> bool {
|
||||
*DENO_FUTURE
|
||||
}
|
||||
|
||||
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> {
|
||||
let main_module = match &self.flags.subcommand {
|
||||
DenoSubcommand::Bundle(bundle_flags) => {
|
||||
resolve_url_or_path(&bundle_flags.source_file, self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
|
||||
}
|
||||
|
@ -1227,11 +1206,6 @@ impl CliOptions {
|
|||
NPM_PROCESS_STATE.is_some()
|
||||
}
|
||||
|
||||
/// Overrides the import map specifier to use.
|
||||
pub fn set_import_map_specifier(&mut self, path: Option<ModuleSpecifier>) {
|
||||
self.overrides.import_map_specifier = Some(path);
|
||||
}
|
||||
|
||||
pub fn has_node_modules_dir(&self) -> bool {
|
||||
self.maybe_node_modules_folder.is_some()
|
||||
}
|
||||
|
@ -1240,26 +1214,13 @@ impl CliOptions {
|
|||
self.maybe_node_modules_folder.as_ref()
|
||||
}
|
||||
|
||||
pub fn with_node_modules_dir_path(&self, path: PathBuf) -> Self {
|
||||
Self {
|
||||
flags: self.flags.clone(),
|
||||
initial_cwd: self.initial_cwd.clone(),
|
||||
maybe_node_modules_folder: Some(path),
|
||||
npmrc: self.npmrc.clone(),
|
||||
maybe_lockfile: self.maybe_lockfile.clone(),
|
||||
start_dir: self.start_dir.clone(),
|
||||
overrides: self.overrides.clone(),
|
||||
disable_deprecated_api_warning: self.disable_deprecated_api_warning,
|
||||
verbose_deprecated_api_warning: self.verbose_deprecated_api_warning,
|
||||
deno_dir_provider: self.deno_dir_provider.clone(),
|
||||
pub fn node_modules_dir(
|
||||
&self,
|
||||
) -> Result<Option<NodeModulesDirMode>, AnyError> {
|
||||
if let Some(flag) = self.flags.node_modules_dir {
|
||||
return Ok(Some(flag));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_modules_dir_enablement(&self) -> Option<bool> {
|
||||
self
|
||||
.flags
|
||||
.node_modules_dir
|
||||
.or_else(|| self.workspace().node_modules_dir())
|
||||
self.workspace().node_modules_dir_mode().map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
|
||||
|
@ -1270,23 +1231,7 @@ impl CliOptions {
|
|||
&self,
|
||||
config_type: TsConfigType,
|
||||
) -> Result<TsConfigForEmit, AnyError> {
|
||||
let result = self.workspace().resolve_ts_config_for_emit(config_type);
|
||||
|
||||
match result {
|
||||
Ok(mut ts_config_for_emit) => {
|
||||
if matches!(self.flags.subcommand, DenoSubcommand::Bundle(..)) {
|
||||
// For backwards compatibility, force `experimentalDecorators` setting
|
||||
// to true.
|
||||
*ts_config_for_emit
|
||||
.ts_config
|
||||
.0
|
||||
.get_mut("experimentalDecorators")
|
||||
.unwrap() = serde_json::Value::Bool(true);
|
||||
}
|
||||
Ok(ts_config_for_emit)
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
self.workspace().resolve_ts_config_for_emit(config_type)
|
||||
}
|
||||
|
||||
pub fn resolve_inspector_server(
|
||||
|
@ -1611,9 +1556,18 @@ impl CliOptions {
|
|||
|| self.workspace().has_unstable("bare-node-builtins")
|
||||
}
|
||||
|
||||
fn byonm_enabled(&self) -> bool {
|
||||
// check if enabled via unstable
|
||||
self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual)
|
||||
|| NPM_PROCESS_STATE
|
||||
.as_ref()
|
||||
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn use_byonm(&self) -> bool {
|
||||
if self.enable_future_features()
|
||||
&& self.node_modules_dir_enablement().is_none()
|
||||
if self.node_modules_dir().ok().flatten().is_none()
|
||||
&& self.maybe_node_modules_folder.is_some()
|
||||
&& self
|
||||
.workspace()
|
||||
.config_folders()
|
||||
|
@ -1623,13 +1577,7 @@ impl CliOptions {
|
|||
return true;
|
||||
}
|
||||
|
||||
// check if enabled via unstable
|
||||
self.flags.unstable_config.byonm
|
||||
|| NPM_PROCESS_STATE
|
||||
.as_ref()
|
||||
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
|
||||
.unwrap_or(false)
|
||||
|| self.workspace().has_unstable("byonm")
|
||||
self.byonm_enabled()
|
||||
}
|
||||
|
||||
pub fn unstable_sloppy_imports(&self) -> bool {
|
||||
|
@ -1651,25 +1599,24 @@ impl CliOptions {
|
|||
}
|
||||
});
|
||||
|
||||
if *DENO_FUTURE {
|
||||
let future_features = [
|
||||
deno_runtime::deno_ffi::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
deno_runtime::deno_fs::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
deno_runtime::deno_webgpu::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
];
|
||||
future_features.iter().for_each(|future_feature| {
|
||||
if !from_config_file.contains(future_feature) {
|
||||
from_config_file.push(future_feature.to_string());
|
||||
}
|
||||
});
|
||||
}
|
||||
// TODO(2.0): remove this code and enable these features in `99_main.js` by default.
|
||||
let future_features = [
|
||||
deno_runtime::deno_ffi::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
deno_runtime::deno_fs::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
deno_runtime::deno_webgpu::UNSTABLE_FEATURE_NAME.to_string(),
|
||||
];
|
||||
future_features.iter().for_each(|future_feature| {
|
||||
if !from_config_file.contains(future_feature) {
|
||||
from_config_file.push(future_feature.to_string());
|
||||
}
|
||||
});
|
||||
|
||||
if !from_config_file.is_empty() {
|
||||
// collect unstable granular flags
|
||||
let mut all_valid_unstable_flags: Vec<&str> =
|
||||
crate::UNSTABLE_GRANULAR_FLAGS
|
||||
.iter()
|
||||
.map(|granular_flag| granular_flag.0)
|
||||
.map(|granular_flag| granular_flag.name)
|
||||
.collect();
|
||||
|
||||
let mut another_unstable_flags = Vec::from([
|
||||
|
@ -1761,42 +1708,55 @@ impl CliOptions {
|
|||
fn resolve_node_modules_folder(
|
||||
cwd: &Path,
|
||||
flags: &Flags,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
workspace: &Workspace,
|
||||
deno_dir_provider: &Arc<DenoDirProvider>,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
let use_node_modules_dir = flags
|
||||
.node_modules_dir
|
||||
.or_else(|| maybe_config_file.and_then(|c| c.json.node_modules_dir))
|
||||
.or(flags.vendor)
|
||||
.or_else(|| maybe_config_file.and_then(|c| c.json.vendor));
|
||||
fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf {
|
||||
root_folder
|
||||
.deno_json
|
||||
.as_ref()
|
||||
.map(|c| Cow::Owned(c.dir_path()))
|
||||
.or_else(|| {
|
||||
root_folder
|
||||
.pkg_json
|
||||
.as_ref()
|
||||
.map(|c| Cow::Borrowed(c.dir_path()))
|
||||
})
|
||||
.unwrap_or(Cow::Borrowed(cwd))
|
||||
.join("node_modules")
|
||||
}
|
||||
|
||||
let root_folder = workspace.root_folder_configs();
|
||||
let use_node_modules_dir = if let Some(mode) = flags.node_modules_dir {
|
||||
Some(mode.uses_node_modules_dir())
|
||||
} else {
|
||||
workspace
|
||||
.node_modules_dir_mode()?
|
||||
.map(|m| m.uses_node_modules_dir())
|
||||
.or(flags.vendor)
|
||||
.or_else(|| root_folder.deno_json.as_ref().and_then(|c| c.json.vendor))
|
||||
};
|
||||
let path = if use_node_modules_dir == Some(false) {
|
||||
return Ok(None);
|
||||
} else if let Some(state) = &*NPM_PROCESS_STATE {
|
||||
return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from));
|
||||
} else if let Some(package_json_path) = maybe_package_json.map(|c| &c.path) {
|
||||
} else if root_folder.pkg_json.is_some() {
|
||||
let node_modules_dir = resolve_from_root(root_folder, cwd);
|
||||
if let Ok(deno_dir) = deno_dir_provider.get_or_create() {
|
||||
// `deno_dir.root` can be symlink in macOS
|
||||
if let Ok(root) = canonicalize_path_maybe_not_exists(&deno_dir.root) {
|
||||
if package_json_path.starts_with(root) {
|
||||
if node_modules_dir.starts_with(root) {
|
||||
// if the package.json is in deno_dir, then do not use node_modules
|
||||
// next to it as local node_modules dir
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
// auto-discover the local_node_modules_folder when a package.json exists
|
||||
// and it's not in deno_dir
|
||||
package_json_path.parent().unwrap().join("node_modules")
|
||||
node_modules_dir
|
||||
} else if use_node_modules_dir.is_none() {
|
||||
return Ok(None);
|
||||
} else if let Some(config_path) = maybe_config_file
|
||||
.as_ref()
|
||||
.and_then(|c| c.specifier.to_file_path().ok())
|
||||
{
|
||||
config_path.parent().unwrap().join("node_modules")
|
||||
} else {
|
||||
cwd.join("node_modules")
|
||||
resolve_from_root(root_folder, cwd)
|
||||
};
|
||||
Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
|
||||
}
|
||||
|
@ -1886,19 +1846,18 @@ pub fn npm_pkg_req_ref_to_binary_command(
|
|||
pub fn config_to_deno_graph_workspace_member(
|
||||
config: &ConfigFile,
|
||||
) -> Result<deno_graph::WorkspaceMember, AnyError> {
|
||||
let nv = deno_semver::package::PackageNv {
|
||||
name: match &config.json.name {
|
||||
Some(name) => name.clone(),
|
||||
None => bail!("Missing 'name' field in config file."),
|
||||
},
|
||||
version: match &config.json.version {
|
||||
Some(name) => deno_semver::Version::parse_standard(name)?,
|
||||
None => bail!("Missing 'version' field in config file."),
|
||||
},
|
||||
let name = match &config.json.name {
|
||||
Some(name) => name.clone(),
|
||||
None => bail!("Missing 'name' field in config file."),
|
||||
};
|
||||
let version = match &config.json.version {
|
||||
Some(name) => Some(deno_semver::Version::parse_standard(name)?),
|
||||
None => None,
|
||||
};
|
||||
Ok(deno_graph::WorkspaceMember {
|
||||
base: config.specifier.join("./").unwrap(),
|
||||
nv,
|
||||
name,
|
||||
version,
|
||||
exports: config.to_exports_config()?.into_map(),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -123,19 +123,19 @@ impl AuthTokens {
|
|||
pub fn new(maybe_tokens_str: Option<String>) -> Self {
|
||||
let mut tokens = Vec::new();
|
||||
if let Some(tokens_str) = maybe_tokens_str {
|
||||
for token_str in tokens_str.split(';') {
|
||||
for token_str in tokens_str.trim().split(';') {
|
||||
if token_str.contains('@') {
|
||||
let pair: Vec<&str> = token_str.rsplitn(2, '@').collect();
|
||||
let token = pair[1];
|
||||
let host = AuthDomain::from(pair[0]);
|
||||
let mut iter = token_str.rsplitn(2, '@');
|
||||
let host = AuthDomain::from(iter.next().unwrap());
|
||||
let token = iter.next().unwrap();
|
||||
if token.contains(':') {
|
||||
let pair: Vec<&str> = token.rsplitn(2, ':').collect();
|
||||
let username = pair[1].to_string();
|
||||
let password = pair[0].to_string();
|
||||
let mut iter = token.rsplitn(2, ':');
|
||||
let password = iter.next().unwrap().to_owned();
|
||||
let username = iter.next().unwrap().to_owned();
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
token: AuthTokenData::Basic { username, password },
|
||||
})
|
||||
});
|
||||
} else {
|
||||
tokens.push(AuthToken {
|
||||
host,
|
||||
|
@ -211,6 +211,40 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_space() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
" abc123@deno.land;def456@example.com\t".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_newline() {
|
||||
let auth_tokens = AuthTokens::new(Some(
|
||||
"\nabc123@deno.land;def456@example.com\n".to_string(),
|
||||
));
|
||||
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer abc123".to_string()
|
||||
);
|
||||
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
|
||||
assert_eq!(
|
||||
auth_tokens.get(&fixture).unwrap().to_string(),
|
||||
"Bearer def456".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auth_tokens_port() {
|
||||
let auth_tokens =
|
||||
|
|
|
@ -4,9 +4,10 @@ use deno_core::serde::Deserialize;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use lsp_types::Uri;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use test_util::lsp::LspClientBuilder;
|
||||
use test_util::PathRef;
|
||||
|
@ -91,7 +92,7 @@ fn bench_deco_apps_edits(deno_exe: &Path) -> Duration {
|
|||
.build();
|
||||
client.initialize(|c| {
|
||||
c.set_workspace_folders(vec![lsp_types::WorkspaceFolder {
|
||||
uri: Url::from_file_path(&apps).unwrap(),
|
||||
uri: apps.uri_dir(),
|
||||
name: "apps".to_string(),
|
||||
}]);
|
||||
c.set_deno_enable(true);
|
||||
|
@ -283,7 +284,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
"textDocument/didChange",
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier {
|
||||
uri: Url::parse(&file_name).unwrap(),
|
||||
uri: Uri::from_str(&file_name).unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![lsp::TextDocumentContentChangeEvent {
|
||||
|
@ -310,7 +311,7 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
"textDocument/formatting",
|
||||
lsp::DocumentFormattingParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: Url::parse(&file_name).unwrap(),
|
||||
uri: Uri::from_str(&file_name).unwrap(),
|
||||
},
|
||||
options: lsp::FormattingOptions {
|
||||
tab_size: 2,
|
||||
|
|
|
@ -143,29 +143,6 @@ const EXEC_TIME_BENCHMARKS: &[(&str, &[&str], Option<i32>)] = &[
|
|||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"bundle",
|
||||
&[
|
||||
"bundle",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
(
|
||||
"bundle_no_check",
|
||||
&[
|
||||
"bundle",
|
||||
"--no-check",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
"tests/util/std/http/file_server_test.ts",
|
||||
],
|
||||
None,
|
||||
),
|
||||
];
|
||||
|
||||
const RESULT_KEYS: &[&str] =
|
||||
|
@ -314,40 +291,6 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> {
|
|||
Ok(sizes)
|
||||
}
|
||||
|
||||
const BUNDLES: &[(&str, &str)] = &[
|
||||
("file_server", "./tests/util/std/http/file_server.ts"),
|
||||
("welcome", "./tests/testdata/welcome.ts"),
|
||||
];
|
||||
fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
|
||||
let mut sizes = HashMap::<String, i64>::new();
|
||||
|
||||
for (name, url) in BUNDLES {
|
||||
let path = format!("{name}.bundle.js");
|
||||
test_util::run(
|
||||
&[
|
||||
deno_exe.to_str().unwrap(),
|
||||
"bundle",
|
||||
"--unstable",
|
||||
"--config",
|
||||
"tests/config/deno.json",
|
||||
url,
|
||||
&path,
|
||||
],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
true,
|
||||
);
|
||||
|
||||
let file = PathBuf::from(path);
|
||||
assert!(file.is_file());
|
||||
sizes.insert(name.to_string(), file.metadata()?.len() as i64);
|
||||
let _ = fs::remove_file(file);
|
||||
}
|
||||
|
||||
Ok(sizes)
|
||||
}
|
||||
|
||||
fn run_max_mem_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
|
||||
let mut results = HashMap::<String, i64>::new();
|
||||
|
||||
|
@ -415,7 +358,6 @@ async fn main() -> Result<()> {
|
|||
let mut args = env::args();
|
||||
|
||||
let mut benchmarks = vec![
|
||||
"bundle",
|
||||
"exec_time",
|
||||
"binary_size",
|
||||
"cargo_deps",
|
||||
|
@ -465,11 +407,6 @@ async fn main() -> Result<()> {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
if benchmarks.contains(&"bundle") {
|
||||
let bundle_size = bundle_benchmark(&deno_exe)?;
|
||||
new_data.bundle_size = bundle_size;
|
||||
}
|
||||
|
||||
if benchmarks.contains(&"exec_time") {
|
||||
let exec_times = run_exec_time(&deno_exe, &target_dir)?;
|
||||
new_data.benchmark = exec_times;
|
||||
|
|
165
cli/cache/emit.rs
vendored
165
cli/cache/emit.rs
vendored
|
@ -5,33 +5,25 @@ use std::path::PathBuf;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::DiskCache;
|
||||
use super::FastInsecureHasher;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
struct EmitMetadata {
|
||||
pub source_hash: u64,
|
||||
pub emit_hash: u64,
|
||||
}
|
||||
|
||||
/// The cache that stores previously emitted files.
|
||||
pub struct EmitCache {
|
||||
disk_cache: DiskCache,
|
||||
cli_version: &'static str,
|
||||
emit_failed_flag: AtomicFlag,
|
||||
file_serializer: EmitFileSerializer,
|
||||
}
|
||||
|
||||
impl EmitCache {
|
||||
pub fn new(disk_cache: DiskCache) -> Self {
|
||||
Self {
|
||||
disk_cache,
|
||||
cli_version: crate::version::DENO_VERSION_INFO.deno,
|
||||
emit_failed_flag: Default::default(),
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: crate::version::DENO_VERSION_INFO.deno,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,37 +40,11 @@ impl EmitCache {
|
|||
specifier: &ModuleSpecifier,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
let meta_filename = self.get_meta_filename(specifier)?;
|
||||
let emit_filename = self.get_emit_filename(specifier)?;
|
||||
|
||||
// load and verify the meta data file is for this source and CLI version
|
||||
let bytes = self.disk_cache.get(&meta_filename).ok()?;
|
||||
let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?;
|
||||
if meta.source_hash != expected_source_hash {
|
||||
return None;
|
||||
}
|
||||
|
||||
// load and verify the emit is for the meta data
|
||||
let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
|
||||
if meta.emit_hash != compute_emit_hash(&emit_bytes, self.cli_version) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// everything looks good, return it
|
||||
Some(emit_bytes)
|
||||
}
|
||||
|
||||
/// Gets the filepath which stores the emit.
|
||||
pub fn get_emit_filepath(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<PathBuf> {
|
||||
Some(
|
||||
self
|
||||
.disk_cache
|
||||
.location
|
||||
.join(self.get_emit_filename(specifier)?),
|
||||
)
|
||||
let bytes = self.disk_cache.get(&emit_filename).ok()?;
|
||||
self
|
||||
.file_serializer
|
||||
.deserialize(bytes, expected_source_hash)
|
||||
}
|
||||
|
||||
/// Sets the emit code in the cache.
|
||||
|
@ -107,32 +73,26 @@ impl EmitCache {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let meta_filename = self
|
||||
.get_meta_filename(specifier)
|
||||
.ok_or_else(|| anyhow!("Could not get meta filename."))?;
|
||||
let emit_filename = self
|
||||
.get_emit_filename(specifier)
|
||||
.ok_or_else(|| anyhow!("Could not get emit filename."))?;
|
||||
|
||||
// save the metadata
|
||||
let metadata = EmitMetadata {
|
||||
source_hash,
|
||||
emit_hash: compute_emit_hash(code, self.cli_version),
|
||||
};
|
||||
self
|
||||
.disk_cache
|
||||
.set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
|
||||
|
||||
// save the emit source
|
||||
self.disk_cache.set(&emit_filename, code)?;
|
||||
let cache_data = self.file_serializer.serialize(code, source_hash);
|
||||
self.disk_cache.set(&emit_filename, &cache_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier, "meta")
|
||||
/// Gets the filepath which stores the emit.
|
||||
pub fn get_emit_filepath(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<PathBuf> {
|
||||
Some(
|
||||
self
|
||||
.disk_cache
|
||||
.location
|
||||
.join(self.get_emit_filename(specifier)?),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
|
@ -142,15 +102,68 @@ impl EmitCache {
|
|||
}
|
||||
}
|
||||
|
||||
fn compute_emit_hash(bytes: &[u8], cli_version: &str) -> u64 {
|
||||
// it's ok to use an insecure hash here because
|
||||
// if someone can change the emit source then they
|
||||
// can also change the version hash
|
||||
FastInsecureHasher::new_without_deno_version() // use cli_version param instead
|
||||
.write(bytes)
|
||||
// emit should not be re-used between cli versions
|
||||
.write_str(cli_version)
|
||||
.finish()
|
||||
const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
|
||||
|
||||
struct EmitFileSerializer {
|
||||
cli_version: &'static str,
|
||||
}
|
||||
|
||||
impl EmitFileSerializer {
|
||||
pub fn deserialize(
|
||||
&self,
|
||||
mut bytes: Vec<u8>,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
|
||||
let (content, last_line) = bytes.split_at(last_newline_index);
|
||||
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
|
||||
let hashes = String::from_utf8_lossy(hashes);
|
||||
let (source_hash, emit_hash) = hashes.split_once(',')?;
|
||||
|
||||
// verify the meta data file is for this source and CLI version
|
||||
let source_hash = source_hash.parse::<u64>().ok()?;
|
||||
if source_hash != expected_source_hash {
|
||||
return None;
|
||||
}
|
||||
let emit_hash = emit_hash.parse::<u64>().ok()?;
|
||||
// prevent using an emit from a different cli version or emits that were tampered with
|
||||
if emit_hash != self.compute_emit_hash(content) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// everything looks good, truncate and return it
|
||||
bytes.truncate(content.len());
|
||||
Some(bytes)
|
||||
}
|
||||
|
||||
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
|
||||
let source_hash = source_hash.to_string();
|
||||
let emit_hash = self.compute_emit_hash(code).to_string();
|
||||
let capacity = code.len()
|
||||
+ LAST_LINE_PREFIX.len()
|
||||
+ source_hash.len()
|
||||
+ 1
|
||||
+ emit_hash.len();
|
||||
let mut cache_data = Vec::with_capacity(capacity);
|
||||
cache_data.extend(code);
|
||||
cache_data.extend(LAST_LINE_PREFIX.as_bytes());
|
||||
cache_data.extend(source_hash.as_bytes());
|
||||
cache_data.push(b',');
|
||||
cache_data.extend(emit_hash.as_bytes());
|
||||
debug_assert_eq!(cache_data.len(), capacity);
|
||||
cache_data
|
||||
}
|
||||
|
||||
fn compute_emit_hash(&self, bytes: &[u8]) -> u64 {
|
||||
// it's ok to use an insecure hash here because
|
||||
// if someone can change the emit source then they
|
||||
// can also change the version hash
|
||||
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
|
||||
.write(bytes)
|
||||
// emit should not be re-used between cli versions
|
||||
.write_str(self.cli_version)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -165,7 +178,9 @@ mod test {
|
|||
let disk_cache = DiskCache::new(temp_dir.path().as_path());
|
||||
let cache = EmitCache {
|
||||
disk_cache: disk_cache.clone(),
|
||||
cli_version: "1.0.0",
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "1.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
let to_string =
|
||||
|
@ -197,7 +212,9 @@ mod test {
|
|||
// try changing the cli version (should not load previous ones)
|
||||
let cache = EmitCache {
|
||||
disk_cache: disk_cache.clone(),
|
||||
cli_version: "2.0.0",
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "2.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 10), None);
|
||||
|
@ -206,7 +223,9 @@ mod test {
|
|||
// recreating the cache should still load the data because the CLI version is the same
|
||||
let cache = EmitCache {
|
||||
disk_cache,
|
||||
cli_version: "2.0.0",
|
||||
file_serializer: EmitFileSerializer {
|
||||
cli_version: "2.0.0",
|
||||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
|
|
12
cli/cache/mod.rs
vendored
12
cli/cache/mod.rs
vendored
|
@ -62,12 +62,8 @@ pub const CACHE_PERM: u32 = 0o644;
|
|||
pub struct RealDenoCacheEnv;
|
||||
|
||||
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Option<Vec<u8>>> {
|
||||
match std::fs::read(path) {
|
||||
Ok(s) => Ok(Some(s)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
||||
std::fs::read(path)
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
|
@ -78,6 +74,10 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
|||
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
|
||||
}
|
||||
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::remove_file(path)
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
match std::fs::metadata(path) {
|
||||
Ok(metadata) => Ok(Some(
|
||||
|
|
|
@ -4,3 +4,6 @@ disallowed-methods = [
|
|||
disallowed-types = [
|
||||
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
|
||||
]
|
||||
ignore-interior-mutability = [
|
||||
"lsp_types::Uri",
|
||||
]
|
||||
|
|
74
cli/emit.rs
74
cli/emit.rs
|
@ -5,6 +5,9 @@ use crate::cache::FastInsecureHasher;
|
|||
use crate::cache::ParsedSourceCache;
|
||||
|
||||
use deno_ast::SourceMapOption;
|
||||
use deno_ast::SourceRange;
|
||||
use deno_ast::SourceRanged;
|
||||
use deno_ast::SourceRangedForSpanned;
|
||||
use deno_ast::TranspileResult;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
|
@ -259,6 +262,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
// the cache in order to not transpile owned
|
||||
let parsed_source = parsed_source_cache
|
||||
.remove_or_parse_module(specifier, source, media_type)?;
|
||||
ensure_no_import_assertion(&parsed_source)?;
|
||||
Ok(parsed_source.transpile(transpile_options, emit_options)?)
|
||||
}
|
||||
|
||||
|
@ -284,3 +288,73 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
transpiled_source.source.into_boxed_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): this is a temporary measure until we have swc erroring for this
|
||||
fn ensure_no_import_assertion(
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
) -> Result<(), AnyError> {
|
||||
fn has_import_assertion(text: &str) -> bool {
|
||||
// good enough
|
||||
text.contains(" assert ") && !text.contains(" with ")
|
||||
}
|
||||
|
||||
fn create_err(
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
range: SourceRange,
|
||||
) -> AnyError {
|
||||
let text_info = parsed_source.text_info_lazy();
|
||||
let loc = text_info.line_and_column_display(range.start);
|
||||
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
|
||||
msg.push_str("\n\n");
|
||||
msg.push_str(range.text_fast(text_info));
|
||||
msg.push_str("\n\n");
|
||||
msg.push_str(&format!(
|
||||
" at {}:{}:{}\n",
|
||||
parsed_source.specifier(),
|
||||
loc.line_number,
|
||||
loc.column_number,
|
||||
));
|
||||
deno_core::anyhow::anyhow!("{}", msg)
|
||||
}
|
||||
|
||||
let Some(module) = parsed_source.program_ref().as_module() else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for item in &module.body {
|
||||
match item {
|
||||
deno_ast::swc::ast::ModuleItem::ModuleDecl(decl) => match decl {
|
||||
deno_ast::swc::ast::ModuleDecl::Import(n) => {
|
||||
if n.with.is_some()
|
||||
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
|
||||
{
|
||||
return Err(create_err(parsed_source, n.range()));
|
||||
}
|
||||
}
|
||||
deno_ast::swc::ast::ModuleDecl::ExportAll(n) => {
|
||||
if n.with.is_some()
|
||||
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
|
||||
{
|
||||
return Err(create_err(parsed_source, n.range()));
|
||||
}
|
||||
}
|
||||
deno_ast::swc::ast::ModuleDecl::ExportNamed(n) => {
|
||||
if n.with.is_some()
|
||||
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
|
||||
{
|
||||
return Err(create_err(parsed_source, n.range()));
|
||||
}
|
||||
}
|
||||
deno_ast::swc::ast::ModuleDecl::ExportDecl(_)
|
||||
| deno_ast::swc::ast::ModuleDecl::ExportDefaultDecl(_)
|
||||
| deno_ast::swc::ast::ModuleDecl::ExportDefaultExpr(_)
|
||||
| deno_ast::swc::ast::ModuleDecl::TsImportEquals(_)
|
||||
| deno_ast::swc::ast::ModuleDecl::TsExportAssignment(_)
|
||||
| deno_ast::swc::ast::ModuleDecl::TsNamespaceExport(_) => {}
|
||||
},
|
||||
deno_ast::swc::ast::ModuleItem::Stmt(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -304,8 +304,11 @@ impl CliFactory {
|
|||
let global_cache = self.global_http_cache()?.clone();
|
||||
match self.cli_options()?.vendor_dir_path() {
|
||||
Some(local_path) => {
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_path.clone(), global_cache);
|
||||
let local_cache = LocalHttpCache::new(
|
||||
local_path.clone(),
|
||||
global_cache,
|
||||
deno_cache_dir::GlobalToLocalCopy::Allow,
|
||||
);
|
||||
Ok(Arc::new(local_cache))
|
||||
}
|
||||
None => Ok(global_cache),
|
||||
|
@ -720,9 +723,9 @@ impl CliFactory {
|
|||
checker.warn_on_legacy_unstable();
|
||||
}
|
||||
let unstable_features = cli_options.unstable_features();
|
||||
for (flag_name, _, _) in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if unstable_features.contains(&flag_name.to_string()) {
|
||||
checker.enable_feature(flag_name);
|
||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if unstable_features.contains(&granular_flag.name.to_string()) {
|
||||
checker.enable_feature(granular_flag.name);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -790,20 +793,12 @@ impl CliFactory {
|
|||
self.maybe_inspector_server()?.clone(),
|
||||
cli_options.maybe_lockfile().cloned(),
|
||||
self.feature_checker()?.clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
cli_options.node_ipc_fd(),
|
||||
cli_options.serve_port(),
|
||||
cli_options.serve_host(),
|
||||
cli_options.enable_future_features(),
|
||||
// TODO(bartlomieju): temporarily disabled
|
||||
// cli_options.disable_deprecated_api_warning,
|
||||
true,
|
||||
cli_options.verbose_deprecated_api_warning,
|
||||
if cli_options.code_cache_enabled() {
|
||||
Some(self.code_cache()?.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
self.create_cli_main_worker_options()?,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -868,6 +863,9 @@ impl CliFactory {
|
|||
unstable: cli_options.legacy_unstable_flag(),
|
||||
create_hmr_runner,
|
||||
create_coverage_collector,
|
||||
node_ipc: cli_options.node_ipc_fd(),
|
||||
serve_port: cli_options.serve_port(),
|
||||
serve_host: cli_options.serve_host(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ use crate::http_util::HttpClientProvider;
|
|||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -52,6 +51,25 @@ pub enum FileOrRedirect {
|
|||
Redirect(ModuleSpecifier),
|
||||
}
|
||||
|
||||
impl FileOrRedirect {
|
||||
fn from_deno_cache_entry(
|
||||
specifier: &ModuleSpecifier,
|
||||
cache_entry: deno_cache_dir::CacheEntry,
|
||||
) -> Result<Self, AnyError> {
|
||||
if let Some(redirect_to) = cache_entry.metadata.headers.get("location") {
|
||||
let redirect =
|
||||
deno_core::resolve_import(redirect_to, specifier.as_str())?;
|
||||
Ok(FileOrRedirect::Redirect(redirect))
|
||||
} else {
|
||||
Ok(FileOrRedirect::File(File {
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: Some(cache_entry.metadata.headers),
|
||||
source: Arc::from(cache_entry.content),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure representing a source file.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct File {
|
||||
|
@ -238,45 +256,32 @@ impl FileFetcher {
|
|||
);
|
||||
|
||||
let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once
|
||||
let Some(headers) = self.http_cache.read_headers(&cache_key)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
if let Some(redirect_to) = headers.get("location") {
|
||||
let redirect =
|
||||
deno_core::resolve_import(redirect_to, specifier.as_str())?;
|
||||
return Ok(Some(FileOrRedirect::Redirect(redirect)));
|
||||
}
|
||||
let result = self.http_cache.read_file_bytes(
|
||||
let result = self.http_cache.get(
|
||||
&cache_key,
|
||||
maybe_checksum
|
||||
.as_ref()
|
||||
.map(|c| deno_cache_dir::Checksum::new(c.as_str())),
|
||||
deno_cache_dir::GlobalToLocalCopy::Allow,
|
||||
);
|
||||
let bytes = match result {
|
||||
Ok(Some(bytes)) => bytes,
|
||||
Ok(None) => return Ok(None),
|
||||
match result {
|
||||
Ok(Some(cache_data)) => Ok(Some(FileOrRedirect::from_deno_cache_entry(
|
||||
specifier, cache_data,
|
||||
)?)),
|
||||
Ok(None) => Ok(None),
|
||||
Err(err) => match err {
|
||||
deno_cache_dir::CacheReadFileError::Io(err) => return Err(err.into()),
|
||||
deno_cache_dir::CacheReadFileError::Io(err) => Err(err.into()),
|
||||
deno_cache_dir::CacheReadFileError::ChecksumIntegrity(err) => {
|
||||
// convert to the equivalent deno_graph error so that it
|
||||
// enhances it if this is passed to deno_graph
|
||||
return Err(
|
||||
Err(
|
||||
deno_graph::source::ChecksumIntegrityError {
|
||||
actual: err.actual,
|
||||
expected: err.expected,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Ok(Some(FileOrRedirect::File(File {
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: Some(headers),
|
||||
source: Arc::from(bytes),
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a data URL into a file, resulting in an error if the URL is
|
||||
|
@ -363,12 +368,30 @@ impl FileFetcher {
|
|||
);
|
||||
}
|
||||
|
||||
let maybe_etag = self
|
||||
let maybe_etag_cache_entry = self
|
||||
.http_cache
|
||||
.cache_item_key(specifier)
|
||||
.ok()
|
||||
.and_then(|key| self.http_cache.read_headers(&key).ok().flatten())
|
||||
.and_then(|headers| headers.get("etag").cloned());
|
||||
.and_then(|key| {
|
||||
self
|
||||
.http_cache
|
||||
.get(
|
||||
&key,
|
||||
maybe_checksum
|
||||
.as_ref()
|
||||
.map(|c| deno_cache_dir::Checksum::new(c.as_str())),
|
||||
)
|
||||
.ok()
|
||||
.flatten()
|
||||
})
|
||||
.and_then(|cache_entry| {
|
||||
cache_entry
|
||||
.metadata
|
||||
.headers
|
||||
.get("etag")
|
||||
.cloned()
|
||||
.map(|etag| (cache_entry, etag))
|
||||
});
|
||||
let maybe_auth_token = self.auth_tokens.get(specifier);
|
||||
|
||||
async fn handle_request_or_server_error(
|
||||
|
@ -390,7 +413,6 @@ impl FileFetcher {
|
|||
}
|
||||
}
|
||||
|
||||
let mut maybe_etag = maybe_etag;
|
||||
let mut retried = false; // retry intermittent failures
|
||||
let result = loop {
|
||||
let result = match self
|
||||
|
@ -399,31 +421,17 @@ impl FileFetcher {
|
|||
.fetch_no_follow(FetchOnceArgs {
|
||||
url: specifier.clone(),
|
||||
maybe_accept: maybe_accept.map(ToOwned::to_owned),
|
||||
maybe_etag: maybe_etag.clone(),
|
||||
maybe_etag: maybe_etag_cache_entry
|
||||
.as_ref()
|
||||
.map(|(_, etag)| etag.clone()),
|
||||
maybe_auth_token: maybe_auth_token.clone(),
|
||||
maybe_progress_guard: maybe_progress_guard.as_ref(),
|
||||
})
|
||||
.await?
|
||||
{
|
||||
FetchOnceResult::NotModified => {
|
||||
let file_or_redirect =
|
||||
self.fetch_cached_no_follow(specifier, maybe_checksum)?;
|
||||
match file_or_redirect {
|
||||
Some(file_or_redirect) => Ok(file_or_redirect),
|
||||
None => {
|
||||
// Someone may have deleted the body from the cache since
|
||||
// it's currently stored in a separate file from the headers,
|
||||
// so delete the etag and try again
|
||||
if maybe_etag.is_some() {
|
||||
debug!("Cache body not found. Trying again without etag.");
|
||||
maybe_etag = None;
|
||||
continue;
|
||||
} else {
|
||||
// should never happen
|
||||
bail!("Your deno cache directory is in an unrecoverable state. Please delete it and try again.")
|
||||
}
|
||||
}
|
||||
}
|
||||
let (cache_entry, _) = maybe_etag_cache_entry.unwrap();
|
||||
FileOrRedirect::from_deno_cache_entry(specifier, cache_entry)
|
||||
}
|
||||
FetchOnceResult::Redirect(redirect_url, headers) => {
|
||||
self.http_cache.set(specifier, headers, &[])?;
|
||||
|
@ -1480,13 +1488,10 @@ mod tests {
|
|||
let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap();
|
||||
let bytes = file_fetcher
|
||||
.http_cache
|
||||
.read_file_bytes(
|
||||
&cache_key,
|
||||
None,
|
||||
deno_cache_dir::GlobalToLocalCopy::Allow,
|
||||
)
|
||||
.get(&cache_key, None)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.content;
|
||||
String::from_utf8(bytes).unwrap()
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ use deno_graph::ModuleLoadError;
|
|||
use deno_graph::WorkspaceFastCheckOption;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -35,7 +34,6 @@ use deno_graph::source::Loader;
|
|||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ModuleGraphError;
|
||||
|
@ -44,10 +42,13 @@ use deno_graph::SpecifierError;
|
|||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use import_map::ImportMapError;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -110,7 +111,7 @@ pub fn graph_valid(
|
|||
ModuleGraphError::ModuleError(error) => {
|
||||
enhanced_lockfile_error_message(error)
|
||||
.or_else(|| enhanced_sloppy_imports_error_message(fs, error))
|
||||
.unwrap_or_else(|| format!("{}", error))
|
||||
.unwrap_or_else(|| format_deno_graph_error(error))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -164,7 +165,10 @@ pub fn graph_valid(
|
|||
} else {
|
||||
// finally surface the npm resolution result
|
||||
if let Err(err) = &graph.npm_dep_graph_result {
|
||||
return Err(custom_error(get_error_class_name(err), format!("{}", err)));
|
||||
return Err(custom_error(
|
||||
get_error_class_name(err),
|
||||
format_deno_graph_error(err.as_ref().deref()),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -463,7 +467,7 @@ impl ModuleGraphBuilder {
|
|||
.content
|
||||
.packages
|
||||
.jsr
|
||||
.get(&package_nv.to_string())
|
||||
.get(package_nv)
|
||||
.map(|s| LoaderChecksum::new(s.integrity.clone()))
|
||||
}
|
||||
|
||||
|
@ -477,7 +481,7 @@ impl ModuleGraphBuilder {
|
|||
self
|
||||
.0
|
||||
.lock()
|
||||
.insert_package(package_nv.to_string(), checksum.into_string());
|
||||
.insert_package(package_nv.clone(), checksum.into_string());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -535,7 +539,12 @@ impl ModuleGraphBuilder {
|
|||
) -> Result<(), AnyError> {
|
||||
// ensure an "npm install" is done if the user has explicitly
|
||||
// opted into using a node_modules directory
|
||||
if self.options.node_modules_dir_enablement() == Some(true) {
|
||||
if self
|
||||
.options
|
||||
.node_modules_dir()?
|
||||
.map(|m| m.uses_node_modules_dir())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
|
@ -556,16 +565,21 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (key, value) in &lockfile.content.packages.specifiers {
|
||||
if let Some(key) = key
|
||||
.strip_prefix("jsr:")
|
||||
.and_then(|key| PackageReq::from_str(key).ok())
|
||||
{
|
||||
if let Some(value) = value
|
||||
.strip_prefix("jsr:")
|
||||
.and_then(|value| PackageNv::from_str(value).ok())
|
||||
{
|
||||
graph.packages.add_nv(key, value);
|
||||
for (req_dep, value) in &lockfile.content.packages.specifiers {
|
||||
match req_dep.kind {
|
||||
deno_semver::package::PackageKind::Jsr => {
|
||||
if let Ok(version) = Version::parse_standard(value) {
|
||||
graph.packages.add_nv(
|
||||
req_dep.req.clone(),
|
||||
PackageNv {
|
||||
name: req_dep.req.name.clone(),
|
||||
version,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
deno_semver::package::PackageKind::Npm => {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -603,16 +617,15 @@ impl ModuleGraphBuilder {
|
|||
if has_jsr_package_mappings_changed {
|
||||
for (from, to) in graph.packages.mappings() {
|
||||
lockfile.insert_package_specifier(
|
||||
format!("jsr:{}", from),
|
||||
format!("jsr:{}", to),
|
||||
JsrDepPackageReq::jsr(from.clone()),
|
||||
to.version.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
// jsr packages
|
||||
if has_jsr_package_deps_changed {
|
||||
for (name, deps) in graph.packages.packages_with_deps() {
|
||||
lockfile
|
||||
.add_package_deps(&name.to_string(), deps.map(|s| s.to_string()));
|
||||
for (nv, deps) in graph.packages.packages_with_deps() {
|
||||
lockfile.add_package_deps(nv, deps.cloned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -707,26 +720,9 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn error_for_any_npm_specifier(
|
||||
graph: &ModuleGraph,
|
||||
) -> Result<(), AnyError> {
|
||||
for module in graph.modules() {
|
||||
match module {
|
||||
Module::Npm(module) => {
|
||||
bail!("npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier)
|
||||
}
|
||||
Module::Node(module) => {
|
||||
bail!("Node specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name)
|
||||
}
|
||||
Module::Js(_) | Module::Json(_) | Module::External(_) => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Adds more explanatory information to a resolution error.
|
||||
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
|
||||
let mut message = format!("{error}");
|
||||
let mut message = format_deno_graph_error(error);
|
||||
|
||||
if let Some(specifier) = get_resolution_error_bare_node_specifier(error) {
|
||||
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
|
||||
|
@ -1022,6 +1018,49 @@ impl deno_graph::source::JsrUrlProvider for CliJsrUrlProvider {
|
|||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): We should change ModuleError to use thiserror so that
|
||||
// we don't need to do this.
|
||||
fn format_deno_graph_error(err: &dyn Error) -> String {
|
||||
use std::fmt::Write;
|
||||
|
||||
let mut message = format!("{}", err);
|
||||
let mut maybe_source = err.source();
|
||||
|
||||
if maybe_source.is_some() {
|
||||
let mut past_message = message.clone();
|
||||
let mut count = 0;
|
||||
let mut display_count = 0;
|
||||
while let Some(source) = maybe_source {
|
||||
let current_message = format!("{}", source);
|
||||
maybe_source = source.source();
|
||||
|
||||
// sometimes an error might be repeated due to
|
||||
// being boxed multiple times in another AnyError
|
||||
if current_message != past_message {
|
||||
write!(message, "\n {}: ", display_count,).unwrap();
|
||||
for (i, line) in current_message.lines().enumerate() {
|
||||
if i > 0 {
|
||||
write!(message, "\n {}", line).unwrap();
|
||||
} else {
|
||||
write!(message, "{}", line).unwrap();
|
||||
}
|
||||
}
|
||||
display_count += 1;
|
||||
}
|
||||
|
||||
if count > 8 {
|
||||
write!(message, "\n {}: ...", count).unwrap();
|
||||
break;
|
||||
}
|
||||
|
||||
past_message = current_message;
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
message
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::sync::Arc;
|
||||
|
|
|
@ -6,6 +6,7 @@ use super::documents::Documents;
|
|||
use super::language_server;
|
||||
use super::resolver::LspResolver;
|
||||
use super::tsc;
|
||||
use super::urls::url_to_uri;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::tools::lint::CliLinter;
|
||||
|
@ -750,10 +751,11 @@ impl CodeActionCollection {
|
|||
.as_ref()
|
||||
.and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok())
|
||||
{
|
||||
let uri = url_to_uri(specifier)?;
|
||||
for quick_fix in data_quick_fixes {
|
||||
let mut changes = HashMap::new();
|
||||
changes.insert(
|
||||
specifier.clone(),
|
||||
uri.clone(),
|
||||
quick_fix
|
||||
.changes
|
||||
.into_iter()
|
||||
|
@ -795,6 +797,7 @@ impl CodeActionCollection {
|
|||
maybe_text_info: Option<&SourceTextInfo>,
|
||||
maybe_parsed_source: Option<&deno_ast::ParsedSource>,
|
||||
) -> Result<(), AnyError> {
|
||||
let uri = url_to_uri(specifier)?;
|
||||
let code = diagnostic
|
||||
.code
|
||||
.as_ref()
|
||||
|
@ -811,7 +814,7 @@ impl CodeActionCollection {
|
|||
|
||||
let mut changes = HashMap::new();
|
||||
changes.insert(
|
||||
specifier.clone(),
|
||||
uri.clone(),
|
||||
vec![lsp::TextEdit {
|
||||
new_text: prepend_whitespace(
|
||||
format!("// deno-lint-ignore {code}\n"),
|
||||
|
@ -892,7 +895,7 @@ impl CodeActionCollection {
|
|||
}
|
||||
|
||||
let mut changes = HashMap::new();
|
||||
changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]);
|
||||
changes.insert(uri.clone(), vec![lsp::TextEdit { new_text, range }]);
|
||||
let ignore_file_action = lsp::CodeAction {
|
||||
title: format!("Disable {code} for the entire file"),
|
||||
kind: Some(lsp::CodeActionKind::QUICKFIX),
|
||||
|
@ -913,7 +916,7 @@ impl CodeActionCollection {
|
|||
|
||||
let mut changes = HashMap::new();
|
||||
changes.insert(
|
||||
specifier.clone(),
|
||||
uri,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: "// deno-lint-ignore-file\n".to_string(),
|
||||
range: lsp::Range {
|
||||
|
|
|
@ -17,16 +17,6 @@ use std::path::Path;
|
|||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// In the LSP, we disallow the cache from automatically copying from
|
||||
/// the global cache to the local cache for technical reasons.
|
||||
///
|
||||
/// 1. We need to verify the checksums from the lockfile are correct when
|
||||
/// moving from the global to the local cache.
|
||||
/// 2. We need to verify the checksums for JSR https specifiers match what
|
||||
/// is found in the package's manifest.
|
||||
pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy =
|
||||
deno_cache_dir::GlobalToLocalCopy::Disallow;
|
||||
|
||||
pub fn calculate_fs_version(
|
||||
cache: &LspCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -154,5 +154,7 @@ pub fn server_capabilities(
|
|||
// TODO(nayeemrmn): Support pull-based diagnostics.
|
||||
diagnostic_provider: None,
|
||||
inline_value_provider: None,
|
||||
inline_completion_provider: None,
|
||||
notebook_document_sync: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use deno_core::anyhow::bail;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::unsync::spawn;
|
||||
use lsp_types::Uri;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
use tower_lsp::lsp_types::ConfigurationItem;
|
||||
|
||||
|
@ -17,7 +18,6 @@ use super::config::WorkspaceSettings;
|
|||
use super::config::SETTINGS_SECTION;
|
||||
use super::lsp_custom;
|
||||
use super::testing::lsp_custom as testing_lsp_custom;
|
||||
use super::urls::LspClientUrl;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TestingNotification {
|
||||
|
@ -52,14 +52,11 @@ impl Client {
|
|||
|
||||
pub async fn publish_diagnostics(
|
||||
&self,
|
||||
uri: LspClientUrl,
|
||||
uri: Uri,
|
||||
diags: Vec<lsp::Diagnostic>,
|
||||
version: Option<i32>,
|
||||
) {
|
||||
self
|
||||
.0
|
||||
.publish_diagnostics(uri.into_url(), diags, version)
|
||||
.await;
|
||||
self.0.publish_diagnostics(uri, diags, version).await;
|
||||
}
|
||||
|
||||
pub fn send_registry_state_notification(
|
||||
|
@ -149,7 +146,7 @@ impl OutsideLockClient {
|
|||
|
||||
pub async fn workspace_configuration(
|
||||
&self,
|
||||
scopes: Vec<Option<lsp::Url>>,
|
||||
scopes: Vec<Option<lsp::Uri>>,
|
||||
) -> Result<Vec<WorkspaceSettings>, AnyError> {
|
||||
self.0.workspace_configuration(scopes).await
|
||||
}
|
||||
|
@ -159,7 +156,7 @@ impl OutsideLockClient {
|
|||
trait ClientTrait: Send + Sync {
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
uri: lsp::Url,
|
||||
uri: lsp::Uri,
|
||||
diagnostics: Vec<lsp::Diagnostic>,
|
||||
version: Option<i32>,
|
||||
);
|
||||
|
@ -182,7 +179,7 @@ trait ClientTrait: Send + Sync {
|
|||
);
|
||||
async fn workspace_configuration(
|
||||
&self,
|
||||
scopes: Vec<Option<lsp::Url>>,
|
||||
scopes: Vec<Option<lsp::Uri>>,
|
||||
) -> Result<Vec<WorkspaceSettings>, AnyError>;
|
||||
async fn show_message(&self, message_type: lsp::MessageType, text: String);
|
||||
async fn register_capability(
|
||||
|
@ -198,7 +195,7 @@ struct TowerClient(tower_lsp::Client);
|
|||
impl ClientTrait for TowerClient {
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
uri: lsp::Url,
|
||||
uri: lsp::Uri,
|
||||
diagnostics: Vec<lsp::Diagnostic>,
|
||||
version: Option<i32>,
|
||||
) {
|
||||
|
@ -276,7 +273,7 @@ impl ClientTrait for TowerClient {
|
|||
|
||||
async fn workspace_configuration(
|
||||
&self,
|
||||
scopes: Vec<Option<lsp::Url>>,
|
||||
scopes: Vec<Option<lsp::Uri>>,
|
||||
) -> Result<Vec<WorkspaceSettings>, AnyError> {
|
||||
let config_response = self
|
||||
.0
|
||||
|
@ -349,7 +346,7 @@ struct ReplClient;
|
|||
impl ClientTrait for ReplClient {
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
_uri: lsp::Url,
|
||||
_uri: lsp::Uri,
|
||||
_diagnostics: Vec<lsp::Diagnostic>,
|
||||
_version: Option<i32>,
|
||||
) {
|
||||
|
@ -383,7 +380,7 @@ impl ClientTrait for ReplClient {
|
|||
|
||||
async fn workspace_configuration(
|
||||
&self,
|
||||
scopes: Vec<Option<lsp::Url>>,
|
||||
scopes: Vec<Option<lsp::Uri>>,
|
||||
) -> Result<Vec<WorkspaceSettings>, AnyError> {
|
||||
Ok(vec![get_repl_workspace_settings(); scopes.len()])
|
||||
}
|
||||
|
|
|
@ -838,7 +838,7 @@ mod tests {
|
|||
fs_sources: &[(&str, &str)],
|
||||
) -> Documents {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
|
||||
let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
|
||||
let mut documents = Documents::default();
|
||||
documents.update_config(
|
||||
&Default::default(),
|
||||
|
@ -859,7 +859,7 @@ mod tests {
|
|||
.set(&specifier, HashMap::default(), source.as_bytes())
|
||||
.expect("could not cache file");
|
||||
let document = documents
|
||||
.get_or_load(&specifier, Some(&temp_dir.uri().join("$").unwrap()));
|
||||
.get_or_load(&specifier, Some(&temp_dir.url().join("$").unwrap()));
|
||||
assert!(document.is_some(), "source could not be setup");
|
||||
}
|
||||
documents
|
||||
|
|
|
@ -5,6 +5,7 @@ use deno_config::deno_json::DenoJsonCache;
|
|||
use deno_config::deno_json::FmtConfig;
|
||||
use deno_config::deno_json::FmtOptionsConfig;
|
||||
use deno_config::deno_json::LintConfig;
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_config::deno_json::TestConfig;
|
||||
use deno_config::deno_json::TsConfig;
|
||||
use deno_config::fs::DenoConfigFs;
|
||||
|
@ -30,6 +31,7 @@ use deno_core::serde::Serialize;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
|
@ -38,7 +40,6 @@ use deno_runtime::deno_node::PackageJson;
|
|||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use indexmap::IndexSet;
|
||||
use lsp::Url;
|
||||
use lsp_types::ClientCapabilities;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
|
@ -54,7 +55,6 @@ use crate::args::CliLockfile;
|
|||
use crate::args::ConfigFile;
|
||||
use crate::args::LintFlags;
|
||||
use crate::args::LintOptions;
|
||||
use crate::args::DENO_FUTURE;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
|
@ -844,14 +844,17 @@ pub struct Config {
|
|||
|
||||
impl Config {
|
||||
#[cfg(test)]
|
||||
pub fn new_with_roots(root_uris: impl IntoIterator<Item = Url>) -> Self {
|
||||
pub fn new_with_roots(root_urls: impl IntoIterator<Item = Url>) -> Self {
|
||||
use super::urls::url_to_uri;
|
||||
|
||||
let mut config = Self::default();
|
||||
let mut folders = vec![];
|
||||
for root_uri in root_uris {
|
||||
let name = root_uri.path_segments().and_then(|s| s.last());
|
||||
for root_url in root_urls {
|
||||
let root_uri = url_to_uri(&root_url).unwrap();
|
||||
let name = root_url.path_segments().and_then(|s| s.last());
|
||||
let name = name.unwrap_or_default().to_string();
|
||||
folders.push((
|
||||
root_uri.clone(),
|
||||
root_url,
|
||||
lsp::WorkspaceFolder {
|
||||
uri: root_uri,
|
||||
name,
|
||||
|
@ -1384,11 +1387,14 @@ impl ConfigData {
|
|||
}
|
||||
}
|
||||
|
||||
let byonm = std::env::var("DENO_UNSTABLE_BYONM").is_ok()
|
||||
|| member_dir.workspace.has_unstable("byonm")
|
||||
|| (*DENO_FUTURE
|
||||
&& member_dir.workspace.package_jsons().next().is_some()
|
||||
&& member_dir.workspace.node_modules_dir().is_none());
|
||||
let node_modules_dir = member_dir
|
||||
.workspace
|
||||
.node_modules_dir_mode()
|
||||
.unwrap_or_default();
|
||||
let byonm = match node_modules_dir {
|
||||
Some(mode) => mode == NodeModulesDirMode::Manual,
|
||||
None => member_dir.workspace.root_pkg_json().is_some(),
|
||||
};
|
||||
if byonm {
|
||||
lsp_log!(" Enabled 'bring your own node_modules'.");
|
||||
}
|
||||
|
@ -1862,13 +1868,17 @@ fn resolve_node_modules_dir(
|
|||
// `nodeModulesDir: true` setting in the deno.json file. This is to
|
||||
// reduce the chance of modifying someone's node_modules directory
|
||||
// without them having asked us to do so.
|
||||
let explicitly_disabled = workspace.node_modules_dir() == Some(false);
|
||||
let node_modules_mode = workspace.node_modules_dir_mode().ok().flatten();
|
||||
let explicitly_disabled = node_modules_mode == Some(NodeModulesDirMode::None);
|
||||
if explicitly_disabled {
|
||||
return None;
|
||||
}
|
||||
let enabled = byonm
|
||||
|| workspace.node_modules_dir() == Some(true)
|
||||
|| node_modules_mode
|
||||
.map(|m| m.uses_node_modules_dir())
|
||||
.unwrap_or(false)
|
||||
|| workspace.vendor_dir_path().is_some();
|
||||
|
||||
if !enabled {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use super::language_server::StateSnapshot;
|
|||
use super::performance::Performance;
|
||||
use super::tsc;
|
||||
use super::tsc::TsServer;
|
||||
use super::urls::LspClientUrl;
|
||||
use super::urls::url_to_uri;
|
||||
use super::urls::LspUrlMap;
|
||||
|
||||
use crate::graph_util;
|
||||
|
@ -37,6 +37,7 @@ use deno_core::serde_json::json;
|
|||
use deno_core::unsync::spawn;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::unsync::JoinHandle;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
|
@ -52,9 +53,11 @@ use deno_semver::package::PackageReq;
|
|||
use import_map::ImportMap;
|
||||
use import_map::ImportMapError;
|
||||
use log::error;
|
||||
use lsp_types::Uri;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
@ -160,15 +163,14 @@ impl DiagnosticsPublisher {
|
|||
.state
|
||||
.update(&record.specifier, version, &all_specifier_diagnostics);
|
||||
let file_referrer = documents.get_file_referrer(&record.specifier);
|
||||
let Ok(uri) =
|
||||
url_map.specifier_to_uri(&record.specifier, file_referrer.as_deref())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
self
|
||||
.client
|
||||
.publish_diagnostics(
|
||||
url_map
|
||||
.normalize_specifier(&record.specifier, file_referrer.as_deref())
|
||||
.unwrap_or(LspClientUrl::new(record.specifier)),
|
||||
all_specifier_diagnostics,
|
||||
version,
|
||||
)
|
||||
.publish_diagnostics(uri, all_specifier_diagnostics, version)
|
||||
.await;
|
||||
messages_sent += 1;
|
||||
}
|
||||
|
@ -191,15 +193,14 @@ impl DiagnosticsPublisher {
|
|||
// clear out any diagnostics for this specifier
|
||||
self.state.update(specifier, removed_value.version, &[]);
|
||||
let file_referrer = documents.get_file_referrer(specifier);
|
||||
let Ok(uri) =
|
||||
url_map.specifier_to_uri(specifier, file_referrer.as_deref())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
self
|
||||
.client
|
||||
.publish_diagnostics(
|
||||
url_map
|
||||
.normalize_specifier(specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| LspClientUrl::new(specifier.clone())),
|
||||
Vec::new(),
|
||||
removed_value.version,
|
||||
)
|
||||
.publish_diagnostics(uri, Vec::new(), removed_value.version)
|
||||
.await;
|
||||
messages_sent += 1;
|
||||
}
|
||||
|
@ -337,9 +338,9 @@ impl DiagnosticsState {
|
|||
if diagnostic.code
|
||||
== Some(lsp::NumberOrString::String("no-cache".to_string()))
|
||||
|| diagnostic.code
|
||||
== Some(lsp::NumberOrString::String("no-cache-jsr".to_string()))
|
||||
== Some(lsp::NumberOrString::String("not-installed-jsr".to_string()))
|
||||
|| diagnostic.code
|
||||
== Some(lsp::NumberOrString::String("no-cache-npm".to_string()))
|
||||
== Some(lsp::NumberOrString::String("not-installed-npm".to_string()))
|
||||
{
|
||||
no_cache_diagnostics.push(diagnostic.clone());
|
||||
}
|
||||
|
@ -737,7 +738,7 @@ fn to_lsp_related_information(
|
|||
if let (Some(file_name), Some(start), Some(end)) =
|
||||
(&ri.file_name, &ri.start, &ri.end)
|
||||
{
|
||||
let uri = lsp::Url::parse(file_name).unwrap();
|
||||
let uri = Uri::from_str(file_name).unwrap();
|
||||
Some(lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location {
|
||||
uri,
|
||||
|
@ -991,9 +992,9 @@ pub enum DenoDiagnostic {
|
|||
/// A remote module was not found in the cache.
|
||||
NoCache(ModuleSpecifier),
|
||||
/// A remote jsr package reference was not found in the cache.
|
||||
NoCacheJsr(PackageReq, ModuleSpecifier),
|
||||
NotInstalledJsr(PackageReq, ModuleSpecifier),
|
||||
/// A remote npm package reference was not found in the cache.
|
||||
NoCacheNpm(PackageReq, ModuleSpecifier),
|
||||
NotInstalledNpm(PackageReq, ModuleSpecifier),
|
||||
/// A local module was not found on the local file system.
|
||||
NoLocal(ModuleSpecifier),
|
||||
/// The specifier resolved to a remote specifier that was redirected to
|
||||
|
@ -1018,8 +1019,8 @@ impl DenoDiagnostic {
|
|||
Self::InvalidAttributeType(_) => "invalid-attribute-type",
|
||||
Self::NoAttributeType => "no-attribute-type",
|
||||
Self::NoCache(_) => "no-cache",
|
||||
Self::NoCacheJsr(_, _) => "no-cache-jsr",
|
||||
Self::NoCacheNpm(_, _) => "no-cache-npm",
|
||||
Self::NotInstalledJsr(_, _) => "not-installed-jsr",
|
||||
Self::NotInstalledNpm(_, _) => "not-installed-npm",
|
||||
Self::NoLocal(_) => "no-local",
|
||||
Self::Redirect { .. } => "redirect",
|
||||
Self::ResolutionError(err) => {
|
||||
|
@ -1070,7 +1071,7 @@ impl DenoDiagnostic {
|
|||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(HashMap::from([(
|
||||
specifier.clone(),
|
||||
url_to_uri(specifier)?,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: format!("\"{to}\""),
|
||||
range: diagnostic.range,
|
||||
|
@ -1087,7 +1088,7 @@ impl DenoDiagnostic {
|
|||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(HashMap::from([(
|
||||
specifier.clone(),
|
||||
url_to_uri(specifier)?,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: " with { type: \"json\" }".to_string(),
|
||||
range: lsp::Range {
|
||||
|
@ -1100,17 +1101,22 @@ impl DenoDiagnostic {
|
|||
}),
|
||||
..Default::default()
|
||||
},
|
||||
"no-cache" | "no-cache-jsr" | "no-cache-npm" => {
|
||||
"no-cache" | "not-installed-jsr" | "not-installed-npm" => {
|
||||
let data = diagnostic
|
||||
.data
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
|
||||
let data: DiagnosticDataSpecifier = serde_json::from_value(data)?;
|
||||
let title = if matches!(
|
||||
code.as_str(),
|
||||
"not-installed-jsr" | "not-installed-npm"
|
||||
) {
|
||||
format!("Install \"{}\" and its dependencies.", data.specifier)
|
||||
} else {
|
||||
format!("Cache \"{}\" and its dependencies.", data.specifier)
|
||||
};
|
||||
lsp::CodeAction {
|
||||
title: format!(
|
||||
"Cache \"{}\" and its dependencies.",
|
||||
data.specifier
|
||||
),
|
||||
title,
|
||||
kind: Some(lsp::CodeActionKind::QUICKFIX),
|
||||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
command: Some(lsp::Command {
|
||||
|
@ -1133,7 +1139,7 @@ impl DenoDiagnostic {
|
|||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(HashMap::from([(
|
||||
specifier.clone(),
|
||||
url_to_uri(specifier)?,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: format!(
|
||||
"\"{}\"",
|
||||
|
@ -1159,7 +1165,7 @@ impl DenoDiagnostic {
|
|||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(HashMap::from([(
|
||||
specifier.clone(),
|
||||
url_to_uri(specifier)?,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: format!(
|
||||
"\"{}\"",
|
||||
|
@ -1185,7 +1191,7 @@ impl DenoDiagnostic {
|
|||
diagnostics: Some(vec![diagnostic.clone()]),
|
||||
edit: Some(lsp::WorkspaceEdit {
|
||||
changes: Some(HashMap::from([(
|
||||
specifier.clone(),
|
||||
url_to_uri(specifier)?,
|
||||
vec![lsp::TextEdit {
|
||||
new_text: format!("\"node:{}\"", data.specifier),
|
||||
range: diagnostic.range,
|
||||
|
@ -1216,8 +1222,8 @@ impl DenoDiagnostic {
|
|||
match code.as_str() {
|
||||
"import-map-remap"
|
||||
| "no-cache"
|
||||
| "no-cache-jsr"
|
||||
| "no-cache-npm"
|
||||
| "not-installed-jsr"
|
||||
| "not-installed-npm"
|
||||
| "no-attribute-type"
|
||||
| "redirect"
|
||||
| "import-node-prefix-missing" => true,
|
||||
|
@ -1255,8 +1261,8 @@ impl DenoDiagnostic {
|
|||
Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None),
|
||||
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
|
||||
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoCacheJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing jsr package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoCacheNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: {}", pkg_req), Some(json!({ "specifier": specifier }))),
|
||||
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoLocal(specifier) => {
|
||||
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
|
||||
let data = maybe_sloppy_resolution.as_ref().map(|res| {
|
||||
|
@ -1303,10 +1309,7 @@ impl DenoDiagnostic {
|
|||
}
|
||||
}
|
||||
|
||||
fn specifier_text_for_redirected(
|
||||
redirect: &lsp::Url,
|
||||
referrer: &lsp::Url,
|
||||
) -> String {
|
||||
fn specifier_text_for_redirected(redirect: &Url, referrer: &Url) -> String {
|
||||
if redirect.scheme() == "file" && referrer.scheme() == "file" {
|
||||
// use a relative specifier when it's going to a file url
|
||||
relative_specifier(redirect, referrer)
|
||||
|
@ -1315,7 +1318,7 @@ fn specifier_text_for_redirected(
|
|||
}
|
||||
}
|
||||
|
||||
fn relative_specifier(specifier: &lsp::Url, referrer: &lsp::Url) -> String {
|
||||
fn relative_specifier(specifier: &Url, referrer: &Url) -> String {
|
||||
match referrer.make_relative(specifier) {
|
||||
Some(relative) => {
|
||||
if relative.starts_with('.') {
|
||||
|
@ -1410,7 +1413,8 @@ fn diagnose_resolution(
|
|||
JsrPackageReqReference::from_specifier(specifier)
|
||||
{
|
||||
let req = pkg_ref.into_inner().req;
|
||||
diagnostics.push(DenoDiagnostic::NoCacheJsr(req, specifier.clone()));
|
||||
diagnostics
|
||||
.push(DenoDiagnostic::NotInstalledJsr(req, specifier.clone()));
|
||||
} else if let Ok(pkg_ref) =
|
||||
NpmPackageReqReference::from_specifier(specifier)
|
||||
{
|
||||
|
@ -1419,7 +1423,7 @@ fn diagnose_resolution(
|
|||
let req = pkg_ref.into_inner().req;
|
||||
if !npm_resolver.is_pkg_req_folder_cached(&req) {
|
||||
diagnostics
|
||||
.push(DenoDiagnostic::NoCacheNpm(req, specifier.clone()));
|
||||
.push(DenoDiagnostic::NotInstalledNpm(req, specifier.clone()));
|
||||
}
|
||||
}
|
||||
} else if let Some(module_name) = specifier.as_str().strip_prefix("node:")
|
||||
|
@ -1445,7 +1449,7 @@ fn diagnose_resolution(
|
|||
// check that a @types/node package exists in the resolver
|
||||
let types_node_req = PackageReq::from_str("@types/node").unwrap();
|
||||
if !npm_resolver.is_pkg_req_folder_cached(&types_node_req) {
|
||||
diagnostics.push(DenoDiagnostic::NoCacheNpm(
|
||||
diagnostics.push(DenoDiagnostic::NotInstalledNpm(
|
||||
types_node_req,
|
||||
ModuleSpecifier::parse("npm:@types/node").unwrap(),
|
||||
));
|
||||
|
@ -1634,7 +1638,8 @@ mod tests {
|
|||
use test_util::TempDir;
|
||||
|
||||
fn mock_config() -> Config {
|
||||
let root_uri = resolve_url("file:///").unwrap();
|
||||
let root_url = resolve_url("file:///").unwrap();
|
||||
let root_uri = url_to_uri(&root_url).unwrap();
|
||||
Config {
|
||||
settings: Arc::new(Settings {
|
||||
unscoped: Arc::new(WorkspaceSettings {
|
||||
|
@ -1645,7 +1650,7 @@ mod tests {
|
|||
..Default::default()
|
||||
}),
|
||||
workspace_folders: Arc::new(vec![(
|
||||
root_uri.clone(),
|
||||
root_url,
|
||||
lsp::WorkspaceFolder {
|
||||
uri: root_uri,
|
||||
name: "".to_string(),
|
||||
|
@ -1660,7 +1665,7 @@ mod tests {
|
|||
maybe_import_map: Option<(&str, &str)>,
|
||||
) -> (TempDir, StateSnapshot) {
|
||||
let temp_dir = TempDir::new();
|
||||
let root_uri = temp_dir.uri();
|
||||
let root_uri = temp_dir.url();
|
||||
let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap()));
|
||||
let mut config = Config::new_with_roots([root_uri.clone()]);
|
||||
if let Some((relative_path, json_string)) = maybe_import_map {
|
||||
|
@ -1827,7 +1832,7 @@ let c: number = "a";
|
|||
assert_eq!(actual.len(), 2);
|
||||
for record in actual {
|
||||
let relative_specifier =
|
||||
temp_dir.uri().make_relative(&record.specifier).unwrap();
|
||||
temp_dir.url().make_relative(&record.specifier).unwrap();
|
||||
match relative_specifier.as_str() {
|
||||
"std/assert/mod.ts" => {
|
||||
assert_eq!(json!(record.versioned.diagnostics), json!([]))
|
||||
|
@ -2046,7 +2051,7 @@ let c: number = "a";
|
|||
"source": "deno",
|
||||
"message": format!(
|
||||
"Unable to load a local module: {}🦕.ts\nPlease check the file path.",
|
||||
temp_dir.uri(),
|
||||
temp_dir.url(),
|
||||
),
|
||||
}
|
||||
])
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
use super::cache::calculate_fs_version;
|
||||
use super::cache::LspCache;
|
||||
use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY;
|
||||
use super::config::Config;
|
||||
use super::resolver::LspResolver;
|
||||
use super::testing::TestCollector;
|
||||
|
@ -61,6 +60,9 @@ pub enum LanguageId {
|
|||
Json,
|
||||
JsonC,
|
||||
Markdown,
|
||||
Html,
|
||||
Css,
|
||||
Yaml,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
|
@ -74,6 +76,9 @@ impl LanguageId {
|
|||
LanguageId::Json => Some("json"),
|
||||
LanguageId::JsonC => Some("jsonc"),
|
||||
LanguageId::Markdown => Some("md"),
|
||||
LanguageId::Html => Some("html"),
|
||||
LanguageId::Css => Some("css"),
|
||||
LanguageId::Yaml => Some("yaml"),
|
||||
LanguageId::Unknown => None,
|
||||
}
|
||||
}
|
||||
|
@ -86,6 +91,9 @@ impl LanguageId {
|
|||
LanguageId::Tsx => Some("text/tsx"),
|
||||
LanguageId::Json | LanguageId::JsonC => Some("application/json"),
|
||||
LanguageId::Markdown => Some("text/markdown"),
|
||||
LanguageId::Html => Some("text/html"),
|
||||
LanguageId::Css => Some("text/css"),
|
||||
LanguageId::Yaml => Some("application/yaml"),
|
||||
LanguageId::Unknown => None,
|
||||
}
|
||||
}
|
||||
|
@ -110,6 +118,9 @@ impl FromStr for LanguageId {
|
|||
"json" => Ok(Self::Json),
|
||||
"jsonc" => Ok(Self::JsonC),
|
||||
"markdown" => Ok(Self::Markdown),
|
||||
"html" => Ok(Self::Html),
|
||||
"css" => Ok(Self::Css),
|
||||
"yaml" => Ok(Self::Yaml),
|
||||
_ => Ok(Self::Unknown),
|
||||
}
|
||||
}
|
||||
|
@ -872,22 +883,19 @@ impl FileSystemDocuments {
|
|||
} else {
|
||||
let http_cache = cache.for_specifier(file_referrer);
|
||||
let cache_key = http_cache.cache_item_key(specifier).ok()?;
|
||||
let bytes = http_cache
|
||||
.read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY)
|
||||
.ok()??;
|
||||
let specifier_headers = http_cache.read_headers(&cache_key).ok()??;
|
||||
let cached_file = http_cache.get(&cache_key, None).ok()??;
|
||||
let (_, maybe_charset) =
|
||||
deno_graph::source::resolve_media_type_and_charset_from_headers(
|
||||
specifier,
|
||||
Some(&specifier_headers),
|
||||
Some(&cached_file.metadata.headers),
|
||||
);
|
||||
let content = deno_graph::source::decode_owned_source(
|
||||
specifier,
|
||||
bytes,
|
||||
cached_file.content,
|
||||
maybe_charset,
|
||||
)
|
||||
.ok()?;
|
||||
let maybe_headers = Some(specifier_headers);
|
||||
let maybe_headers = Some(cached_file.metadata.headers);
|
||||
Document::new(
|
||||
specifier.clone(),
|
||||
content.into(),
|
||||
|
@ -1414,11 +1422,9 @@ impl Documents {
|
|||
if let Some(lockfile) = config_data.lockfile.as_ref() {
|
||||
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
|
||||
let lockfile = lockfile.lock();
|
||||
for key in lockfile.content.packages.specifiers.keys() {
|
||||
if let Some(key) = key.strip_prefix("npm:") {
|
||||
if let Ok(req) = PackageReq::from_str(key) {
|
||||
reqs.insert(req);
|
||||
}
|
||||
for dep_req in lockfile.content.packages.specifiers.keys() {
|
||||
if dep_req.kind == deno_semver::package::PackageKind::Npm {
|
||||
reqs.insert(dep_req.req.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1607,7 +1613,7 @@ mod tests {
|
|||
async fn setup() -> (Documents, LspCache, TempDir) {
|
||||
let temp_dir = TempDir::new();
|
||||
temp_dir.create_dir_all(".deno_dir");
|
||||
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
|
||||
let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
|
||||
let config = Config::default();
|
||||
let resolver =
|
||||
Arc::new(LspResolver::from_config(&config, &cache, None).await);
|
||||
|
@ -1690,7 +1696,7 @@ console.log(b, "hello deno");
|
|||
// but we'll guard against it anyway
|
||||
let (mut documents, _, temp_dir) = setup().await;
|
||||
let file_path = temp_dir.path().join("file.ts");
|
||||
let file_specifier = temp_dir.uri().join("file.ts").unwrap();
|
||||
let file_specifier = temp_dir.url().join("file.ts").unwrap();
|
||||
file_path.write("");
|
||||
|
||||
// open the document
|
||||
|
@ -1718,18 +1724,18 @@ console.log(b, "hello deno");
|
|||
let (mut documents, cache, temp_dir) = setup().await;
|
||||
|
||||
let file1_path = temp_dir.path().join("file1.ts");
|
||||
let file1_specifier = temp_dir.uri().join("file1.ts").unwrap();
|
||||
let file1_specifier = temp_dir.url().join("file1.ts").unwrap();
|
||||
fs::write(&file1_path, "").unwrap();
|
||||
|
||||
let file2_path = temp_dir.path().join("file2.ts");
|
||||
let file2_specifier = temp_dir.uri().join("file2.ts").unwrap();
|
||||
let file2_specifier = temp_dir.url().join("file2.ts").unwrap();
|
||||
fs::write(&file2_path, "").unwrap();
|
||||
|
||||
let file3_path = temp_dir.path().join("file3.ts");
|
||||
let file3_specifier = temp_dir.uri().join("file3.ts").unwrap();
|
||||
let file3_specifier = temp_dir.url().join("file3.ts").unwrap();
|
||||
fs::write(&file3_path, "").unwrap();
|
||||
|
||||
let mut config = Config::new_with_roots([temp_dir.uri()]);
|
||||
let mut config = Config::new_with_roots([temp_dir.url()]);
|
||||
let workspace_settings =
|
||||
serde_json::from_str(r#"{ "enable": true }"#).unwrap();
|
||||
config.set_workspace_settings(workspace_settings, vec![]);
|
||||
|
|
|
@ -92,20 +92,23 @@ impl JsrCacheResolver {
|
|||
}
|
||||
}
|
||||
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
|
||||
for (req_url, nv_url) in &lockfile.lock().content.packages.specifiers {
|
||||
let Some(req) = req_url.strip_prefix("jsr:") else {
|
||||
for (dep_req, version) in &lockfile.lock().content.packages.specifiers {
|
||||
let req = match dep_req.kind {
|
||||
deno_semver::package::PackageKind::Jsr => &dep_req.req,
|
||||
deno_semver::package::PackageKind::Npm => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let Ok(version) = Version::parse_standard(version) else {
|
||||
continue;
|
||||
};
|
||||
let Some(nv) = nv_url.strip_prefix("jsr:") else {
|
||||
continue;
|
||||
};
|
||||
let Ok(req) = PackageReq::from_str(req) else {
|
||||
continue;
|
||||
};
|
||||
let Ok(nv) = PackageNv::from_str(nv) else {
|
||||
continue;
|
||||
};
|
||||
nv_by_req.insert(req, Some(nv));
|
||||
nv_by_req.insert(
|
||||
req.clone(),
|
||||
Some(PackageNv {
|
||||
name: req.name.clone(),
|
||||
version,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
Self {
|
||||
|
@ -258,12 +261,9 @@ fn read_cached_url(
|
|||
cache: &Arc<dyn HttpCache>,
|
||||
) -> Option<Vec<u8>> {
|
||||
cache
|
||||
.read_file_bytes(
|
||||
&cache.cache_item_key(url).ok()?,
|
||||
None,
|
||||
deno_cache_dir::GlobalToLocalCopy::Disallow,
|
||||
)
|
||||
.get(&cache.cache_item_key(url).ok()?, None)
|
||||
.ok()?
|
||||
.map(|f| f.content)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -12,6 +12,7 @@ use deno_core::serde_json::json;
|
|||
use deno_core::serde_json::Value;
|
||||
use deno_core::unsync::spawn;
|
||||
use deno_core::url;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Resolution;
|
||||
|
@ -31,6 +32,7 @@ use std::collections::VecDeque;
|
|||
use std::env;
|
||||
use std::fmt::Write as _;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
|
@ -85,6 +87,8 @@ use super::tsc::ChangeKind;
|
|||
use super::tsc::GetCompletionDetailsArgs;
|
||||
use super::tsc::TsServer;
|
||||
use super::urls;
|
||||
use super::urls::uri_to_url;
|
||||
use super::urls::url_to_uri;
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::has_flag_env_var;
|
||||
|
@ -720,7 +724,9 @@ impl Inner {
|
|||
.into_iter()
|
||||
.map(|folder| {
|
||||
(
|
||||
self.url_map.normalize_url(&folder.uri, LspUrlKind::Folder),
|
||||
self
|
||||
.url_map
|
||||
.uri_to_specifier(&folder.uri, LspUrlKind::Folder),
|
||||
folder,
|
||||
)
|
||||
})
|
||||
|
@ -728,14 +734,17 @@ impl Inner {
|
|||
}
|
||||
// rootUri is deprecated by the LSP spec. If it's specified, merge it into
|
||||
// workspace_folders.
|
||||
#[allow(deprecated)]
|
||||
if let Some(root_uri) = params.root_uri {
|
||||
if !workspace_folders.iter().any(|(_, f)| f.uri == root_uri) {
|
||||
let name = root_uri.path_segments().and_then(|s| s.last());
|
||||
let root_url =
|
||||
self.url_map.uri_to_specifier(&root_uri, LspUrlKind::Folder);
|
||||
let name = root_url.path_segments().and_then(|s| s.last());
|
||||
let name = name.unwrap_or_default().to_string();
|
||||
workspace_folders.insert(
|
||||
0,
|
||||
(
|
||||
self.url_map.normalize_url(&root_uri, LspUrlKind::Folder),
|
||||
root_url,
|
||||
WorkspaceFolder {
|
||||
uri: root_uri,
|
||||
name,
|
||||
|
@ -956,31 +965,27 @@ impl Inner {
|
|||
.refresh(&self.config.settings, &self.workspace_files, &file_fetcher)
|
||||
.await;
|
||||
for config_file in self.config.tree.config_files() {
|
||||
if let Ok((compiler_options, _)) = config_file.to_compiler_options() {
|
||||
if let Some(compiler_options_obj) = compiler_options.as_object() {
|
||||
if let Some(jsx_import_source) =
|
||||
compiler_options_obj.get("jsxImportSource")
|
||||
{
|
||||
if let Some(jsx_import_source) = jsx_import_source.as_str() {
|
||||
let specifiers = vec![Url::parse(&format!(
|
||||
"data:application/typescript;base64,{}",
|
||||
base64::engine::general_purpose::STANDARD
|
||||
.encode(format!("import '{jsx_import_source}/jsx-runtime';"))
|
||||
))
|
||||
.unwrap()];
|
||||
let referrer = config_file.specifier.clone();
|
||||
self.task_queue.queue_task(Box::new(|ls: LanguageServer| {
|
||||
spawn(async move {
|
||||
if let Err(err) = ls.cache(specifiers, referrer, false).await
|
||||
{
|
||||
lsp_warn!("{:#}", err);
|
||||
}
|
||||
});
|
||||
}));
|
||||
(|| {
|
||||
let compiler_options = config_file.to_compiler_options().ok()?.0;
|
||||
let compiler_options_obj = compiler_options.as_object()?;
|
||||
let jsx_import_source = compiler_options_obj.get("jsxImportSource")?;
|
||||
let jsx_import_source = jsx_import_source.as_str()?;
|
||||
let referrer = config_file.specifier.clone();
|
||||
let specifier = Url::parse(&format!(
|
||||
"data:application/typescript;base64,{}",
|
||||
base64::engine::general_purpose::STANDARD
|
||||
.encode(format!("import '{jsx_import_source}/jsx-runtime';"))
|
||||
))
|
||||
.unwrap();
|
||||
self.task_queue.queue_task(Box::new(|ls: LanguageServer| {
|
||||
spawn(async move {
|
||||
if let Err(err) = ls.cache(vec![specifier], referrer, false).await {
|
||||
lsp_warn!("{:#}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}));
|
||||
Some(())
|
||||
})();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1012,7 +1017,10 @@ impl Inner {
|
|||
|
||||
async fn did_open(&mut self, params: DidOpenTextDocumentParams) {
|
||||
let mark = self.performance.mark_with_args("lsp.did_open", ¶ms);
|
||||
if params.text_document.uri.scheme() == "deno" {
|
||||
let Some(scheme) = params.text_document.uri.scheme() else {
|
||||
return;
|
||||
};
|
||||
if scheme.as_str() == "deno" {
|
||||
// we can ignore virtual text documents opening, as they don't need to
|
||||
// be tracked in memory, as they are static assets that won't change
|
||||
// already managed by the language service
|
||||
|
@ -1031,16 +1039,14 @@ impl Inner {
|
|||
lsp_warn!(
|
||||
"Unsupported language id \"{}\" received for document \"{}\".",
|
||||
params.text_document.language_id,
|
||||
params.text_document.uri
|
||||
params.text_document.uri.as_str()
|
||||
);
|
||||
}
|
||||
let file_referrer = (self
|
||||
.documents
|
||||
.is_valid_file_referrer(¶ms.text_document.uri))
|
||||
.then(|| params.text_document.uri.clone());
|
||||
let file_referrer = Some(uri_to_url(¶ms.text_document.uri))
|
||||
.filter(|s| self.documents.is_valid_file_referrer(s));
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
let document = self.documents.open(
|
||||
specifier.clone(),
|
||||
params.text_document.version,
|
||||
|
@ -1062,7 +1068,7 @@ impl Inner {
|
|||
let mark = self.performance.mark_with_args("lsp.did_change", ¶ms);
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
match self.documents.change(
|
||||
&specifier,
|
||||
params.text_document.version,
|
||||
|
@ -1099,7 +1105,7 @@ impl Inner {
|
|||
let _mark = self.performance.measure_scope("lsp.did_save");
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
self.documents.save(&specifier);
|
||||
if !self
|
||||
.config
|
||||
|
@ -1134,8 +1140,10 @@ impl Inner {
|
|||
|
||||
async fn did_close(&mut self, params: DidCloseTextDocumentParams) {
|
||||
let mark = self.performance.mark_with_args("lsp.did_close", ¶ms);
|
||||
self.diagnostics_state.clear(¶ms.text_document.uri);
|
||||
if params.text_document.uri.scheme() == "deno" {
|
||||
let Some(scheme) = params.text_document.uri.scheme() else {
|
||||
return;
|
||||
};
|
||||
if scheme.as_str() == "deno" {
|
||||
// we can ignore virtual text documents closing, as they don't need to
|
||||
// be tracked in memory, as they are static assets that won't change
|
||||
// already managed by the language service
|
||||
|
@ -1143,7 +1151,8 @@ impl Inner {
|
|||
}
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
self.diagnostics_state.clear(&specifier);
|
||||
if self.is_diagnosable(&specifier) {
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.diagnostics_server.invalidate(&[specifier.clone()]);
|
||||
|
@ -1196,7 +1205,7 @@ impl Inner {
|
|||
let changes = params
|
||||
.changes
|
||||
.into_iter()
|
||||
.map(|e| (self.url_map.normalize_url(&e.uri, LspUrlKind::File), e))
|
||||
.map(|e| (self.url_map.uri_to_specifier(&e.uri, LspUrlKind::File), e))
|
||||
.collect::<Vec<_>>();
|
||||
if changes
|
||||
.iter()
|
||||
|
@ -1215,7 +1224,7 @@ impl Inner {
|
|||
_ => return None,
|
||||
};
|
||||
Some(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri: t.0.clone(),
|
||||
scope_uri: url_to_uri(t.0).ok()?,
|
||||
file_uri: e.uri.clone(),
|
||||
typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type(
|
||||
e.typ,
|
||||
|
@ -1250,7 +1259,7 @@ impl Inner {
|
|||
_ => return None,
|
||||
};
|
||||
Some(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri: t.0.clone(),
|
||||
scope_uri: url_to_uri(t.0).ok()?,
|
||||
file_uri: e.uri.clone(),
|
||||
typ: lsp_custom::DenoConfigurationChangeType::from_file_change_type(
|
||||
e.typ,
|
||||
|
@ -1276,7 +1285,7 @@ impl Inner {
|
|||
) -> LspResult<Option<DocumentSymbolResponse>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -1316,13 +1325,11 @@ impl Inner {
|
|||
&self,
|
||||
params: DocumentFormattingParams,
|
||||
) -> LspResult<Option<Vec<TextEdit>>> {
|
||||
let file_referrer = (self
|
||||
.documents
|
||||
.is_valid_file_referrer(¶ms.text_document.uri))
|
||||
.then(|| params.text_document.uri.clone());
|
||||
let file_referrer = Some(uri_to_url(¶ms.text_document.uri))
|
||||
.filter(|s| self.documents.is_valid_file_referrer(s));
|
||||
let mut specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
// skip formatting any files ignored by the config file
|
||||
if !self
|
||||
.config
|
||||
|
@ -1343,9 +1350,9 @@ impl Inner {
|
|||
// counterparts, but for formatting we want to favour the file URL.
|
||||
// TODO(nayeemrmn): Implement `Document::file_resource_path()` or similar.
|
||||
if specifier.scheme() != "file"
|
||||
&& params.text_document.uri.scheme() == "file"
|
||||
&& params.text_document.uri.scheme().map(|s| s.as_str()) == Some("file")
|
||||
{
|
||||
specifier = params.text_document.uri.clone();
|
||||
specifier = uri_to_url(¶ms.text_document.uri);
|
||||
}
|
||||
let file_path = specifier_to_file_path(&specifier).map_err(|err| {
|
||||
error!("{:#}", err);
|
||||
|
@ -1437,7 +1444,7 @@ impl Inner {
|
|||
}
|
||||
|
||||
async fn hover(&self, params: HoverParams) -> LspResult<Option<Hover>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -1570,7 +1577,7 @@ impl Inner {
|
|||
) -> LspResult<Option<CodeActionResponse>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -1667,9 +1674,9 @@ impl Inner {
|
|||
if diagnostic.code
|
||||
== Some(NumberOrString::String("no-cache".to_string()))
|
||||
|| diagnostic.code
|
||||
== Some(NumberOrString::String("no-cache-jsr".to_string()))
|
||||
== Some(NumberOrString::String("not-installed-jsr".to_string()))
|
||||
|| diagnostic.code
|
||||
== Some(NumberOrString::String("no-cache-npm".to_string()))
|
||||
== Some(NumberOrString::String("not-installed-npm".to_string()))
|
||||
{
|
||||
includes_no_cache = true;
|
||||
}
|
||||
|
@ -1914,7 +1921,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<CodeLens>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -2000,7 +2007,7 @@ impl Inner {
|
|||
&self,
|
||||
params: DocumentHighlightParams,
|
||||
) -> LspResult<Option<Vec<DocumentHighlight>>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2044,7 +2051,7 @@ impl Inner {
|
|||
&self,
|
||||
params: ReferenceParams,
|
||||
) -> LspResult<Option<Vec<Location>>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2100,7 +2107,7 @@ impl Inner {
|
|||
&self,
|
||||
params: GotoDefinitionParams,
|
||||
) -> LspResult<Option<GotoDefinitionResponse>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2139,7 +2146,7 @@ impl Inner {
|
|||
&self,
|
||||
params: GotoTypeDefinitionParams,
|
||||
) -> LspResult<Option<GotoTypeDefinitionResponse>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2185,7 +2192,7 @@ impl Inner {
|
|||
&self,
|
||||
params: CompletionParams,
|
||||
) -> LspResult<Option<CompletionResponse>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2374,7 +2381,7 @@ impl Inner {
|
|||
&self,
|
||||
params: GotoImplementationParams,
|
||||
) -> LspResult<Option<GotoImplementationResponse>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2425,7 +2432,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<FoldingRange>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -2472,7 +2479,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<CallHierarchyIncomingCall>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.item.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.item.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -2521,7 +2528,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<CallHierarchyOutgoingCall>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.item.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.item.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -2566,7 +2573,7 @@ impl Inner {
|
|||
&self,
|
||||
params: CallHierarchyPrepareParams,
|
||||
) -> LspResult<Option<Vec<CallHierarchyItem>>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2630,7 +2637,7 @@ impl Inner {
|
|||
&self,
|
||||
params: RenameParams,
|
||||
) -> LspResult<Option<WorkspaceEdit>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2679,7 +2686,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<SelectionRange>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
{
|
||||
|
@ -2717,7 +2724,7 @@ impl Inner {
|
|||
) -> LspResult<Option<SemanticTokensResult>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -2770,7 +2777,7 @@ impl Inner {
|
|||
) -> LspResult<Option<SemanticTokensRangeResult>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -2819,7 +2826,7 @@ impl Inner {
|
|||
&self,
|
||||
params: SignatureHelpParams,
|
||||
) -> LspResult<Option<SignatureHelp>> {
|
||||
let specifier = self.url_map.normalize_url(
|
||||
let specifier = self.url_map.uri_to_specifier(
|
||||
¶ms.text_document_position_params.text_document.uri,
|
||||
LspUrlKind::File,
|
||||
);
|
||||
|
@ -2873,8 +2880,8 @@ impl Inner {
|
|||
) -> LspResult<Option<WorkspaceEdit>> {
|
||||
let mut changes = vec![];
|
||||
for rename in params.files {
|
||||
let old_specifier = self.url_map.normalize_url(
|
||||
&resolve_url(&rename.old_uri).unwrap(),
|
||||
let old_specifier = self.url_map.uri_to_specifier(
|
||||
&Uri::from_str(&rename.old_uri).unwrap(),
|
||||
LspUrlKind::File,
|
||||
);
|
||||
let options = self
|
||||
|
@ -2899,8 +2906,8 @@ impl Inner {
|
|||
.get_edits_for_file_rename(
|
||||
self.snapshot(),
|
||||
old_specifier,
|
||||
self.url_map.normalize_url(
|
||||
&resolve_url(&rename.new_uri).unwrap(),
|
||||
self.url_map.uri_to_specifier(
|
||||
&Uri::from_str(&rename.new_uri).unwrap(),
|
||||
LspUrlKind::File,
|
||||
),
|
||||
format_code_settings,
|
||||
|
@ -3498,22 +3505,29 @@ impl Inner {
|
|||
}
|
||||
|
||||
let mut config_events = vec![];
|
||||
for (scope_uri, config_data) in self.config.tree.data_by_scope().iter() {
|
||||
for (scope_url, config_data) in self.config.tree.data_by_scope().iter() {
|
||||
let Ok(scope_uri) = url_to_uri(scope_url) else {
|
||||
continue;
|
||||
};
|
||||
if let Some(config_file) = config_data.maybe_deno_json() {
|
||||
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri: scope_uri.clone(),
|
||||
file_uri: config_file.specifier.clone(),
|
||||
typ: lsp_custom::DenoConfigurationChangeType::Added,
|
||||
configuration_type: lsp_custom::DenoConfigurationType::DenoJson,
|
||||
});
|
||||
if let Ok(file_uri) = url_to_uri(&config_file.specifier) {
|
||||
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri: scope_uri.clone(),
|
||||
file_uri,
|
||||
typ: lsp_custom::DenoConfigurationChangeType::Added,
|
||||
configuration_type: lsp_custom::DenoConfigurationType::DenoJson,
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(package_json) = config_data.maybe_pkg_json() {
|
||||
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri: scope_uri.clone(),
|
||||
file_uri: package_json.specifier(),
|
||||
typ: lsp_custom::DenoConfigurationChangeType::Added,
|
||||
configuration_type: lsp_custom::DenoConfigurationType::PackageJson,
|
||||
});
|
||||
if let Ok(file_uri) = url_to_uri(&package_json.specifier()) {
|
||||
config_events.push(lsp_custom::DenoConfigurationChangeEvent {
|
||||
scope_uri,
|
||||
file_uri,
|
||||
typ: lsp_custom::DenoConfigurationChangeType::Added,
|
||||
configuration_type: lsp_custom::DenoConfigurationType::PackageJson,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if !config_events.is_empty() {
|
||||
|
@ -3533,19 +3547,22 @@ impl Inner {
|
|||
force_global_cache: bool,
|
||||
) -> Result<PrepareCacheResult, AnyError> {
|
||||
let config_data = self.config.tree.data_for_specifier(&referrer);
|
||||
let byonm = config_data.map(|d| d.byonm).unwrap_or(false);
|
||||
let mut roots = if !specifiers.is_empty() {
|
||||
specifiers
|
||||
} else {
|
||||
vec![referrer.clone()]
|
||||
};
|
||||
|
||||
// always include the npm packages since resolution of one npm package
|
||||
// might affect the resolution of other npm packages
|
||||
if let Some(npm_reqs) = self
|
||||
if byonm {
|
||||
roots.retain(|s| s.scheme() != "npm");
|
||||
} else if let Some(npm_reqs) = self
|
||||
.documents
|
||||
.npm_reqs_by_scope()
|
||||
.get(&config_data.map(|d| d.scope.as_ref().clone()))
|
||||
{
|
||||
// always include the npm packages since resolution of one npm package
|
||||
// might affect the resolution of other npm packages
|
||||
roots.extend(
|
||||
npm_reqs
|
||||
.iter()
|
||||
|
@ -3594,11 +3611,6 @@ impl Inner {
|
|||
.as_ref()
|
||||
.map(|url| url.to_string())
|
||||
}),
|
||||
node_modules_dir: Some(
|
||||
config_data
|
||||
.and_then(|d| d.node_modules_dir.as_ref())
|
||||
.is_some(),
|
||||
),
|
||||
// bit of a hack to force the lsp to cache the @types/node package
|
||||
type_check_mode: crate::args::TypeCheckMode::Local,
|
||||
..Default::default()
|
||||
|
@ -3640,7 +3652,9 @@ impl Inner {
|
|||
.into_iter()
|
||||
.map(|folder| {
|
||||
(
|
||||
self.url_map.normalize_url(&folder.uri, LspUrlKind::Folder),
|
||||
self
|
||||
.url_map
|
||||
.uri_to_specifier(&folder.uri, LspUrlKind::Folder),
|
||||
folder,
|
||||
)
|
||||
})
|
||||
|
@ -3716,7 +3730,8 @@ impl Inner {
|
|||
result.push(TaskDefinition {
|
||||
name: name.clone(),
|
||||
command: command.to_string(),
|
||||
source_uri: config_file.specifier.clone(),
|
||||
source_uri: url_to_uri(&config_file.specifier)
|
||||
.map_err(|_| LspError::internal_error())?,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@ -3727,7 +3742,8 @@ impl Inner {
|
|||
result.push(TaskDefinition {
|
||||
name: name.clone(),
|
||||
command: command.clone(),
|
||||
source_uri: package_json.specifier(),
|
||||
source_uri: url_to_uri(&package_json.specifier())
|
||||
.map_err(|_| LspError::internal_error())?,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -3742,7 +3758,7 @@ impl Inner {
|
|||
) -> LspResult<Option<Vec<InlayHint>>> {
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
if !self.is_diagnosable(&specifier)
|
||||
|| !self.config.specifier_enabled(&specifier)
|
||||
|| !self.config.enabled_inlay_hints_for_specifier(&specifier)
|
||||
|
@ -3805,7 +3821,7 @@ impl Inner {
|
|||
.mark_with_args("lsp.virtual_text_document", ¶ms);
|
||||
let specifier = self
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
let contents = if specifier.scheme() == "deno"
|
||||
&& specifier.path() == "/status.md"
|
||||
{
|
||||
|
@ -3957,11 +3973,11 @@ mod tests {
|
|||
temp_dir.write("root4_parent/root4/main.ts", ""); // yes, enabled
|
||||
|
||||
let mut config = Config::new_with_roots(vec![
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.uri().join("root2/").unwrap(),
|
||||
temp_dir.uri().join("root2/root2.1/").unwrap(),
|
||||
temp_dir.uri().join("root3/").unwrap(),
|
||||
temp_dir.uri().join("root4_parent/root4/").unwrap(),
|
||||
temp_dir.url().join("root1/").unwrap(),
|
||||
temp_dir.url().join("root2/").unwrap(),
|
||||
temp_dir.url().join("root2/root2.1/").unwrap(),
|
||||
temp_dir.url().join("root3/").unwrap(),
|
||||
temp_dir.url().join("root4_parent/root4/").unwrap(),
|
||||
]);
|
||||
config.set_client_capabilities(ClientCapabilities {
|
||||
workspace: Some(Default::default()),
|
||||
|
@ -3971,14 +3987,14 @@ mod tests {
|
|||
Default::default(),
|
||||
vec![
|
||||
(
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.url().join("root1/").unwrap(),
|
||||
WorkspaceSettings {
|
||||
enable: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
(
|
||||
temp_dir.uri().join("root2/").unwrap(),
|
||||
temp_dir.url().join("root2/").unwrap(),
|
||||
WorkspaceSettings {
|
||||
enable: Some(true),
|
||||
enable_paths: Some(vec![
|
||||
|
@ -3990,21 +4006,21 @@ mod tests {
|
|||
},
|
||||
),
|
||||
(
|
||||
temp_dir.uri().join("root2/root2.1/").unwrap(),
|
||||
temp_dir.url().join("root2/root2.1/").unwrap(),
|
||||
WorkspaceSettings {
|
||||
enable: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
(
|
||||
temp_dir.uri().join("root3/").unwrap(),
|
||||
temp_dir.url().join("root3/").unwrap(),
|
||||
WorkspaceSettings {
|
||||
enable: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
(
|
||||
temp_dir.uri().join("root4_parent/root4/").unwrap(),
|
||||
temp_dir.url().join("root4_parent/root4/").unwrap(),
|
||||
WorkspaceSettings {
|
||||
enable: Some(true),
|
||||
..Default::default()
|
||||
|
@ -4018,22 +4034,22 @@ mod tests {
|
|||
assert_eq!(
|
||||
json!(workspace_files),
|
||||
json!([
|
||||
temp_dir.uri().join("root4_parent/deno.json").unwrap(),
|
||||
temp_dir.uri().join("root1/mod0.ts").unwrap(),
|
||||
temp_dir.uri().join("root1/mod1.js").unwrap(),
|
||||
temp_dir.uri().join("root1/mod2.tsx").unwrap(),
|
||||
temp_dir.uri().join("root1/mod3.d.ts").unwrap(),
|
||||
temp_dir.uri().join("root1/mod4.jsx").unwrap(),
|
||||
temp_dir.uri().join("root1/mod5.mjs").unwrap(),
|
||||
temp_dir.uri().join("root1/mod6.mts").unwrap(),
|
||||
temp_dir.uri().join("root1/mod7.d.mts").unwrap(),
|
||||
temp_dir.uri().join("root1/mod8.json").unwrap(),
|
||||
temp_dir.uri().join("root1/mod9.jsonc").unwrap(),
|
||||
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.uri().join("root4_parent/root4/main.ts").unwrap(),
|
||||
temp_dir.uri().join("root1/folder/mod.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/folder/main.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/root2.1/main.ts").unwrap(),
|
||||
temp_dir.url().join("root4_parent/deno.json").unwrap(),
|
||||
temp_dir.url().join("root1/mod0.ts").unwrap(),
|
||||
temp_dir.url().join("root1/mod1.js").unwrap(),
|
||||
temp_dir.url().join("root1/mod2.tsx").unwrap(),
|
||||
temp_dir.url().join("root1/mod3.d.ts").unwrap(),
|
||||
temp_dir.url().join("root1/mod4.jsx").unwrap(),
|
||||
temp_dir.url().join("root1/mod5.mjs").unwrap(),
|
||||
temp_dir.url().join("root1/mod6.mts").unwrap(),
|
||||
temp_dir.url().join("root1/mod7.d.mts").unwrap(),
|
||||
temp_dir.url().join("root1/mod8.json").unwrap(),
|
||||
temp_dir.url().join("root1/mod9.jsonc").unwrap(),
|
||||
temp_dir.url().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.url().join("root4_parent/root4/main.ts").unwrap(),
|
||||
temp_dir.url().join("root1/folder/mod.ts").unwrap(),
|
||||
temp_dir.url().join("root2/folder/main.ts").unwrap(),
|
||||
temp_dir.url().join("root2/root2.1/main.ts").unwrap(),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ pub struct TaskDefinition {
|
|||
// TODO(nayeemrmn): Rename this to `command` in vscode_deno.
|
||||
#[serde(rename = "detail")]
|
||||
pub command: String,
|
||||
pub source_uri: lsp::Url,
|
||||
pub source_uri: lsp::Uri,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
|
@ -75,8 +75,8 @@ pub enum DenoConfigurationType {
|
|||
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DenoConfigurationChangeEvent {
|
||||
pub scope_uri: lsp::Url,
|
||||
pub file_uri: lsp::Url,
|
||||
pub scope_uri: lsp::Uri,
|
||||
pub file_uri: lsp::Uri,
|
||||
#[serde(rename = "type")]
|
||||
pub typ: DenoConfigurationChangeType,
|
||||
pub configuration_type: DenoConfigurationType,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deno_ast::LineAndColumnIndex;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -8,6 +9,7 @@ use deno_ast::SourceTextInfo;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use lsp_types::Uri;
|
||||
use tower_lsp::lsp_types::ClientCapabilities;
|
||||
use tower_lsp::lsp_types::ClientInfo;
|
||||
use tower_lsp::lsp_types::CompletionContext;
|
||||
|
@ -40,6 +42,7 @@ use super::config::LanguageWorkspaceSettings;
|
|||
use super::config::ObjectLiteralMethodSnippets;
|
||||
use super::config::TestingSettings;
|
||||
use super::config::WorkspaceSettings;
|
||||
use super::urls::url_to_uri;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReplCompletionItem {
|
||||
|
@ -73,7 +76,7 @@ impl ReplLanguageServer {
|
|||
.initialize(InitializeParams {
|
||||
process_id: None,
|
||||
root_path: None,
|
||||
root_uri: Some(cwd_uri.clone()),
|
||||
root_uri: Some(url_to_uri(&cwd_uri).unwrap()),
|
||||
initialization_options: Some(
|
||||
serde_json::to_value(get_repl_workspace_settings()).unwrap(),
|
||||
),
|
||||
|
@ -84,6 +87,7 @@ impl ReplLanguageServer {
|
|||
general: None,
|
||||
experimental: None,
|
||||
offset_encoding: None,
|
||||
notebook_document: None,
|
||||
},
|
||||
trace: None,
|
||||
workspace_folders: None,
|
||||
|
@ -92,6 +96,7 @@ impl ReplLanguageServer {
|
|||
version: None,
|
||||
}),
|
||||
locale: None,
|
||||
work_done_progress_params: Default::default(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -133,7 +138,7 @@ impl ReplLanguageServer {
|
|||
.completion(CompletionParams {
|
||||
text_document_position: TextDocumentPositionParams {
|
||||
text_document: TextDocumentIdentifier {
|
||||
uri: self.get_document_specifier(),
|
||||
uri: self.get_document_uri(),
|
||||
},
|
||||
position: Position {
|
||||
line: line_and_column.line_index as u32,
|
||||
|
@ -208,7 +213,7 @@ impl ReplLanguageServer {
|
|||
.language_server
|
||||
.did_change(DidChangeTextDocumentParams {
|
||||
text_document: VersionedTextDocumentIdentifier {
|
||||
uri: self.get_document_specifier(),
|
||||
uri: self.get_document_uri(),
|
||||
version: self.document_version,
|
||||
},
|
||||
content_changes: vec![TextDocumentContentChangeEvent {
|
||||
|
@ -233,7 +238,7 @@ impl ReplLanguageServer {
|
|||
.language_server
|
||||
.did_close(DidCloseTextDocumentParams {
|
||||
text_document: TextDocumentIdentifier {
|
||||
uri: self.get_document_specifier(),
|
||||
uri: self.get_document_uri(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
@ -248,7 +253,7 @@ impl ReplLanguageServer {
|
|||
.language_server
|
||||
.did_open(DidOpenTextDocumentParams {
|
||||
text_document: TextDocumentItem {
|
||||
uri: self.get_document_specifier(),
|
||||
uri: self.get_document_uri(),
|
||||
language_id: "typescript".to_string(),
|
||||
version: self.document_version,
|
||||
text: format!("{}{}", self.document_text, self.pending_text),
|
||||
|
@ -257,8 +262,8 @@ impl ReplLanguageServer {
|
|||
.await;
|
||||
}
|
||||
|
||||
fn get_document_specifier(&self) -> ModuleSpecifier {
|
||||
self.cwd_uri.join("$deno$repl.ts").unwrap()
|
||||
fn get_document_uri(&self) -> Uri {
|
||||
Uri::from_str(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,10 +5,12 @@ use super::lsp_custom::TestData;
|
|||
|
||||
use crate::lsp::client::TestingNotification;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::lsp::urls::url_to_uri;
|
||||
use crate::tools::test::TestDescription;
|
||||
use crate::tools::test::TestStepDescription;
|
||||
use crate::util::checksum;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use lsp::Range;
|
||||
use std::collections::HashMap;
|
||||
|
@ -143,21 +145,23 @@ impl TestModule {
|
|||
pub fn as_replace_notification(
|
||||
&self,
|
||||
maybe_root_uri: Option<&ModuleSpecifier>,
|
||||
) -> TestingNotification {
|
||||
) -> Result<TestingNotification, AnyError> {
|
||||
let label = self.label(maybe_root_uri);
|
||||
TestingNotification::Module(lsp_custom::TestModuleNotificationParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: self.specifier.clone(),
|
||||
Ok(TestingNotification::Module(
|
||||
lsp_custom::TestModuleNotificationParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: url_to_uri(&self.specifier)?,
|
||||
},
|
||||
kind: lsp_custom::TestModuleNotificationKind::Replace,
|
||||
label,
|
||||
tests: self
|
||||
.defs
|
||||
.iter()
|
||||
.filter(|(_, def)| def.parent_id.is_none())
|
||||
.map(|(id, _)| self.get_test_data(id))
|
||||
.collect(),
|
||||
},
|
||||
kind: lsp_custom::TestModuleNotificationKind::Replace,
|
||||
label,
|
||||
tests: self
|
||||
.defs
|
||||
.iter()
|
||||
.filter(|(_, def)| def.parent_id.is_none())
|
||||
.map(|(id, _)| self.get_test_data(id))
|
||||
.collect(),
|
||||
})
|
||||
))
|
||||
}
|
||||
|
||||
pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String {
|
||||
|
|
|
@ -12,6 +12,8 @@ use crate::lsp::client::Client;
|
|||
use crate::lsp::client::TestingNotification;
|
||||
use crate::lsp::config;
|
||||
use crate::lsp::logging::lsp_log;
|
||||
use crate::lsp::urls::uri_to_url;
|
||||
use crate::lsp::urls::url_to_uri;
|
||||
use crate::tools::test;
|
||||
use crate::tools::test::create_test_event_channel;
|
||||
use crate::tools::test::FailFastTracker;
|
||||
|
@ -30,9 +32,11 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::tokio_util::create_and_run_current_thread;
|
||||
use indexmap::IndexMap;
|
||||
use lsp_types::Uri;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
@ -53,12 +57,12 @@ fn as_queue_and_filters(
|
|||
|
||||
if let Some(include) = ¶ms.include {
|
||||
for item in include {
|
||||
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) {
|
||||
queue.insert(item.text_document.uri.clone());
|
||||
let url = uri_to_url(&item.text_document.uri);
|
||||
if let Some((test_definitions, _)) = tests.get(&url) {
|
||||
queue.insert(url.clone());
|
||||
if let Some(id) = &item.id {
|
||||
if let Some(test) = test_definitions.get(id) {
|
||||
let filter =
|
||||
filters.entry(item.text_document.uri.clone()).or_default();
|
||||
let filter = filters.entry(url).or_default();
|
||||
if let Some(include) = filter.include.as_mut() {
|
||||
include.insert(test.id.clone(), test.clone());
|
||||
} else {
|
||||
|
@ -75,19 +79,19 @@ fn as_queue_and_filters(
|
|||
}
|
||||
|
||||
for item in ¶ms.exclude {
|
||||
if let Some((test_definitions, _)) = tests.get(&item.text_document.uri) {
|
||||
let url = uri_to_url(&item.text_document.uri);
|
||||
if let Some((test_definitions, _)) = tests.get(&url) {
|
||||
if let Some(id) = &item.id {
|
||||
// there is no way to exclude a test step
|
||||
if item.step_id.is_none() {
|
||||
if let Some(test) = test_definitions.get(id) {
|
||||
let filter =
|
||||
filters.entry(item.text_document.uri.clone()).or_default();
|
||||
let filter = filters.entry(url.clone()).or_default();
|
||||
filter.exclude.insert(test.id.clone(), test.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// the entire test module is excluded
|
||||
queue.remove(&item.text_document.uri);
|
||||
queue.remove(&url);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -182,7 +186,7 @@ impl TestRun {
|
|||
self
|
||||
.queue
|
||||
.iter()
|
||||
.map(|s| {
|
||||
.filter_map(|s| {
|
||||
let ids = if let Some((test_module, _)) = tests.get(s) {
|
||||
if let Some(filter) = self.filters.get(s) {
|
||||
filter.as_ids(test_module)
|
||||
|
@ -192,10 +196,12 @@ impl TestRun {
|
|||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
lsp_custom::EnqueuedTestModule {
|
||||
text_document: lsp::TextDocumentIdentifier { uri: s.clone() },
|
||||
Some(lsp_custom::EnqueuedTestModule {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: url_to_uri(s).ok()?,
|
||||
},
|
||||
ids,
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -523,7 +529,7 @@ impl LspTestDescription {
|
|||
&self,
|
||||
tests: &IndexMap<usize, LspTestDescription>,
|
||||
) -> lsp_custom::TestIdentifier {
|
||||
let uri = ModuleSpecifier::parse(&self.location().file_name).unwrap();
|
||||
let uri = Uri::from_str(&self.location().file_name).unwrap();
|
||||
let static_id = self.static_id();
|
||||
let mut root_desc = self;
|
||||
while let Some(parent_id) = root_desc.parent_id() {
|
||||
|
@ -587,6 +593,9 @@ impl LspTestReporter {
|
|||
let (test_module, _) = files
|
||||
.entry(specifier.clone())
|
||||
.or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
|
||||
let Ok(uri) = url_to_uri(&test_module.specifier) else {
|
||||
return;
|
||||
};
|
||||
let (static_id, is_new) = test_module.register_dynamic(desc);
|
||||
self.tests.insert(
|
||||
desc.id,
|
||||
|
@ -597,9 +606,7 @@ impl LspTestReporter {
|
|||
.client
|
||||
.send_test_notification(TestingNotification::Module(
|
||||
lsp_custom::TestModuleNotificationParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: test_module.specifier.clone(),
|
||||
},
|
||||
text_document: lsp::TextDocumentIdentifier { uri },
|
||||
kind: lsp_custom::TestModuleNotificationKind::Insert,
|
||||
label: test_module.label(self.maybe_root_uri.as_ref()),
|
||||
tests: vec![test_module.get_test_data(&static_id)],
|
||||
|
@ -697,6 +704,9 @@ impl LspTestReporter {
|
|||
let (test_module, _) = files
|
||||
.entry(specifier.clone())
|
||||
.or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
|
||||
let Ok(uri) = url_to_uri(&test_module.specifier) else {
|
||||
return;
|
||||
};
|
||||
let (static_id, is_new) = test_module.register_step_dynamic(
|
||||
desc,
|
||||
self.tests.get(&desc.parent_id).unwrap().static_id(),
|
||||
|
@ -710,9 +720,7 @@ impl LspTestReporter {
|
|||
.client
|
||||
.send_test_notification(TestingNotification::Module(
|
||||
lsp_custom::TestModuleNotificationParams {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: test_module.specifier.clone(),
|
||||
},
|
||||
text_document: lsp::TextDocumentIdentifier { uri },
|
||||
kind: lsp_custom::TestModuleNotificationKind::Insert,
|
||||
label: test_module.label(self.maybe_root_uri.as_ref()),
|
||||
tests: vec![test_module.get_test_data(&static_id)],
|
||||
|
@ -796,14 +804,14 @@ mod tests {
|
|||
include: Some(vec![
|
||||
lsp_custom::TestIdentifier {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: specifier.clone(),
|
||||
uri: url_to_uri(&specifier).unwrap(),
|
||||
},
|
||||
id: None,
|
||||
step_id: None,
|
||||
},
|
||||
lsp_custom::TestIdentifier {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: non_test_specifier.clone(),
|
||||
uri: url_to_uri(&non_test_specifier).unwrap(),
|
||||
},
|
||||
id: None,
|
||||
step_id: None,
|
||||
|
@ -811,7 +819,7 @@ mod tests {
|
|||
]),
|
||||
exclude: vec![lsp_custom::TestIdentifier {
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: specifier.clone(),
|
||||
uri: url_to_uri(&specifier).unwrap(),
|
||||
},
|
||||
id: Some(
|
||||
"69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"
|
||||
|
|
|
@ -10,6 +10,7 @@ use crate::lsp::config;
|
|||
use crate::lsp::documents::DocumentsFilter;
|
||||
use crate::lsp::language_server::StateSnapshot;
|
||||
use crate::lsp::performance::Performance;
|
||||
use crate::lsp::urls::url_to_uri;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -26,12 +27,16 @@ use tower_lsp::jsonrpc::Error as LspError;
|
|||
use tower_lsp::jsonrpc::Result as LspResult;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
fn as_delete_notification(uri: ModuleSpecifier) -> TestingNotification {
|
||||
TestingNotification::DeleteModule(
|
||||
fn as_delete_notification(
|
||||
url: &ModuleSpecifier,
|
||||
) -> Result<TestingNotification, AnyError> {
|
||||
Ok(TestingNotification::DeleteModule(
|
||||
lsp_custom::TestModuleDeleteNotificationParams {
|
||||
text_document: lsp::TextDocumentIdentifier { uri },
|
||||
text_document: lsp::TextDocumentIdentifier {
|
||||
uri: url_to_uri(url)?,
|
||||
},
|
||||
},
|
||||
)
|
||||
))
|
||||
}
|
||||
|
||||
pub type TestServerTests =
|
||||
|
@ -123,20 +128,24 @@ impl TestServer {
|
|||
.map(|tm| tm.as_ref().clone())
|
||||
.unwrap_or_else(|| TestModule::new(specifier.clone()));
|
||||
if !test_module.is_empty() {
|
||||
client.send_test_notification(
|
||||
test_module.as_replace_notification(mru.as_ref()),
|
||||
);
|
||||
if let Ok(params) =
|
||||
test_module.as_replace_notification(mru.as_ref())
|
||||
{
|
||||
client.send_test_notification(params);
|
||||
}
|
||||
} else if !was_empty {
|
||||
client.send_test_notification(as_delete_notification(
|
||||
specifier.clone(),
|
||||
));
|
||||
if let Ok(params) = as_delete_notification(specifier) {
|
||||
client.send_test_notification(params);
|
||||
}
|
||||
}
|
||||
tests
|
||||
.insert(specifier.clone(), (test_module, script_version));
|
||||
}
|
||||
}
|
||||
for key in keys {
|
||||
client.send_test_notification(as_delete_notification(key));
|
||||
for key in &keys {
|
||||
if let Ok(params) = as_delete_notification(key) {
|
||||
client.send_test_notification(params);
|
||||
}
|
||||
}
|
||||
performance.measure(mark);
|
||||
}
|
||||
|
|
104
cli/lsp/tsc.rs
104
cli/lsp/tsc.rs
|
@ -19,8 +19,10 @@ use super::refactor::EXTRACT_TYPE;
|
|||
use super::semantic_tokens;
|
||||
use super::semantic_tokens::SemanticTokensBuilder;
|
||||
use super::text::LineIndex;
|
||||
use super::urls::LspClientUrl;
|
||||
use super::urls::uri_to_url;
|
||||
use super::urls::url_to_uri;
|
||||
use super::urls::INVALID_SPECIFIER;
|
||||
use super::urls::INVALID_URI;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::FmtOptionsConfig;
|
||||
|
@ -2046,7 +2048,7 @@ impl DocumentSpan {
|
|||
let file_referrer = target_asset_or_doc.file_referrer();
|
||||
let target_uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&target_specifier, file_referrer)
|
||||
.specifier_to_uri(&target_specifier, file_referrer)
|
||||
.ok()?;
|
||||
let (target_range, target_selection_range) =
|
||||
if let Some(context_span) = &self.context_span {
|
||||
|
@ -2071,7 +2073,7 @@ impl DocumentSpan {
|
|||
};
|
||||
let link = lsp::LocationLink {
|
||||
origin_selection_range,
|
||||
target_uri: target_uri.into_url(),
|
||||
target_uri,
|
||||
target_range,
|
||||
target_selection_range,
|
||||
};
|
||||
|
@ -2091,11 +2093,11 @@ impl DocumentSpan {
|
|||
let line_index = asset_or_doc.line_index();
|
||||
let range = self.text_span.to_range(line_index);
|
||||
let file_referrer = asset_or_doc.file_referrer();
|
||||
let mut target = language_server
|
||||
let target_uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&specifier, file_referrer)
|
||||
.ok()?
|
||||
.into_url();
|
||||
.specifier_to_uri(&specifier, file_referrer)
|
||||
.ok()?;
|
||||
let mut target = uri_to_url(&target_uri);
|
||||
target.set_fragment(Some(&format!(
|
||||
"L{},{}",
|
||||
range.start.line + 1,
|
||||
|
@ -2154,13 +2156,10 @@ impl NavigateToItem {
|
|||
let file_referrer = asset_or_doc.file_referrer();
|
||||
let uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&specifier, file_referrer)
|
||||
.specifier_to_uri(&specifier, file_referrer)
|
||||
.ok()?;
|
||||
let range = self.text_span.to_range(line_index);
|
||||
let location = lsp::Location {
|
||||
uri: uri.into_url(),
|
||||
range,
|
||||
};
|
||||
let location = lsp::Location { uri, range };
|
||||
|
||||
let mut tags: Option<Vec<lsp::SymbolTag>> = None;
|
||||
let kind_modifiers = parse_kind_modifier(&self.kind_modifiers);
|
||||
|
@ -2413,12 +2412,10 @@ impl ImplementationLocation {
|
|||
let file_referrer = language_server.documents.get_file_referrer(&specifier);
|
||||
let uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| {
|
||||
LspClientUrl::new(ModuleSpecifier::parse("deno://invalid").unwrap())
|
||||
});
|
||||
.specifier_to_uri(&specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| INVALID_URI.clone());
|
||||
lsp::Location {
|
||||
uri: uri.into_url(),
|
||||
uri,
|
||||
range: self.document_span.text_span.to_range(line_index),
|
||||
}
|
||||
}
|
||||
|
@ -2474,7 +2471,7 @@ impl RenameLocations {
|
|||
language_server.documents.get_file_referrer(&specifier);
|
||||
let uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&specifier, file_referrer.as_deref())?;
|
||||
.specifier_to_uri(&specifier, file_referrer.as_deref())?;
|
||||
let asset_or_doc = language_server.get_asset_or_document(&specifier)?;
|
||||
|
||||
// ensure TextDocumentEdit for `location.file_name`.
|
||||
|
@ -2483,7 +2480,7 @@ impl RenameLocations {
|
|||
uri.clone(),
|
||||
lsp::TextDocumentEdit {
|
||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||
uri: uri.as_url().clone(),
|
||||
uri: uri.clone(),
|
||||
version: asset_or_doc.document_lsp_version(),
|
||||
},
|
||||
edits:
|
||||
|
@ -2685,7 +2682,7 @@ impl FileTextChanges {
|
|||
.collect();
|
||||
Ok(lsp::TextDocumentEdit {
|
||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||
uri: specifier,
|
||||
uri: url_to_uri(&specifier)?,
|
||||
version: asset_or_doc.document_lsp_version(),
|
||||
},
|
||||
edits,
|
||||
|
@ -2712,7 +2709,7 @@ impl FileTextChanges {
|
|||
if self.is_new_file.unwrap_or(false) {
|
||||
ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(
|
||||
lsp::CreateFile {
|
||||
uri: specifier.clone(),
|
||||
uri: url_to_uri(&specifier)?,
|
||||
options: Some(lsp::CreateFileOptions {
|
||||
ignore_if_exists: Some(true),
|
||||
overwrite: None,
|
||||
|
@ -2729,7 +2726,7 @@ impl FileTextChanges {
|
|||
.collect();
|
||||
ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
|
||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||
uri: specifier,
|
||||
uri: url_to_uri(&specifier)?,
|
||||
version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()),
|
||||
},
|
||||
edits,
|
||||
|
@ -3127,10 +3124,10 @@ impl ReferenceEntry {
|
|||
let file_referrer = language_server.documents.get_file_referrer(&specifier);
|
||||
let uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone()));
|
||||
.specifier_to_uri(&specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| INVALID_URI.clone());
|
||||
lsp::Location {
|
||||
uri: uri.into_url(),
|
||||
uri,
|
||||
range: self.document_span.text_span.to_range(line_index),
|
||||
}
|
||||
}
|
||||
|
@ -3188,12 +3185,13 @@ impl CallHierarchyItem {
|
|||
.get_file_referrer(&target_specifier);
|
||||
let uri = language_server
|
||||
.url_map
|
||||
.normalize_specifier(&target_specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| LspClientUrl::new(INVALID_SPECIFIER.clone()));
|
||||
.specifier_to_uri(&target_specifier, file_referrer.as_deref())
|
||||
.unwrap_or_else(|_| INVALID_URI.clone());
|
||||
|
||||
let use_file_name = self.is_source_file_item();
|
||||
let maybe_file_path = if uri.as_url().scheme() == "file" {
|
||||
specifier_to_file_path(uri.as_url()).ok()
|
||||
let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file")
|
||||
{
|
||||
specifier_to_file_path(&uri_to_url(&uri)).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -3237,7 +3235,7 @@ impl CallHierarchyItem {
|
|||
lsp::CallHierarchyItem {
|
||||
name,
|
||||
tags,
|
||||
uri: uri.into_url(),
|
||||
uri,
|
||||
detail: Some(detail),
|
||||
kind: self.kind.clone().into(),
|
||||
range: self.span.to_range(line_index.clone()),
|
||||
|
@ -5398,7 +5396,7 @@ mod tests {
|
|||
sources: &[(&str, &str, i32, LanguageId)],
|
||||
) -> (TempDir, TsServer, Arc<StateSnapshot>, LspCache) {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap()));
|
||||
let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
|
||||
let mut config = Config::default();
|
||||
config
|
||||
.tree
|
||||
|
@ -5408,7 +5406,7 @@ mod tests {
|
|||
"compilerOptions": ts_config,
|
||||
})
|
||||
.to_string(),
|
||||
temp_dir.uri().join("deno.json").unwrap(),
|
||||
temp_dir.url().join("deno.json").unwrap(),
|
||||
&Default::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
|
@ -5419,7 +5417,7 @@ mod tests {
|
|||
let mut documents = Documents::default();
|
||||
documents.update_config(&config, &resolver, &cache, &Default::default());
|
||||
for (relative_specifier, source, version, language_id) in sources {
|
||||
let specifier = temp_dir.uri().join(relative_specifier).unwrap();
|
||||
let specifier = temp_dir.url().join(relative_specifier).unwrap();
|
||||
documents.open(specifier, *version, *language_id, (*source).into(), None);
|
||||
}
|
||||
let snapshot = Arc::new(StateSnapshot {
|
||||
|
@ -5489,7 +5487,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5536,7 +5534,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5567,7 +5565,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5594,7 +5592,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5644,7 +5642,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5678,7 +5676,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5736,7 +5734,7 @@ mod tests {
|
|||
)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(snapshot, vec![specifier.clone()], Default::default())
|
||||
.await
|
||||
|
@ -5829,7 +5827,7 @@ mod tests {
|
|||
b"export const b = \"b\";\n",
|
||||
)
|
||||
.unwrap();
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(
|
||||
snapshot.clone(),
|
||||
|
@ -5879,7 +5877,7 @@ mod tests {
|
|||
[(&specifier_dep, ChangeKind::Opened)],
|
||||
None,
|
||||
);
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let diagnostics = ts_server
|
||||
.get_diagnostics(
|
||||
snapshot.clone(),
|
||||
|
@ -5951,7 +5949,7 @@ mod tests {
|
|||
&[("a.ts", fixture, 1, LanguageId::TypeScript)],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let info = ts_server
|
||||
.get_completions(
|
||||
snapshot.clone(),
|
||||
|
@ -5966,7 +5964,7 @@ mod tests {
|
|||
trigger_kind: None,
|
||||
},
|
||||
Default::default(),
|
||||
Some(temp_dir.uri()),
|
||||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -5983,7 +5981,7 @@ mod tests {
|
|||
preferences: None,
|
||||
data: None,
|
||||
},
|
||||
Some(temp_dir.uri()),
|
||||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
|
@ -6105,7 +6103,7 @@ mod tests {
|
|||
],
|
||||
)
|
||||
.await;
|
||||
let specifier = temp_dir.uri().join("a.ts").unwrap();
|
||||
let specifier = temp_dir.url().join("a.ts").unwrap();
|
||||
let fmt_options_config = FmtOptionsConfig {
|
||||
semi_colons: Some(false),
|
||||
single_quote: Some(true),
|
||||
|
@ -6126,7 +6124,7 @@ mod tests {
|
|||
..Default::default()
|
||||
},
|
||||
FormatCodeSettings::from(&fmt_options_config),
|
||||
Some(temp_dir.uri()),
|
||||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -6152,7 +6150,7 @@ mod tests {
|
|||
}),
|
||||
data: entry.data.clone(),
|
||||
},
|
||||
Some(temp_dir.uri()),
|
||||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
|
@ -6217,8 +6215,8 @@ mod tests {
|
|||
let changes = ts_server
|
||||
.get_edits_for_file_rename(
|
||||
snapshot,
|
||||
temp_dir.uri().join("b.ts").unwrap(),
|
||||
temp_dir.uri().join("🦕.ts").unwrap(),
|
||||
temp_dir.url().join("b.ts").unwrap(),
|
||||
temp_dir.url().join("🦕.ts").unwrap(),
|
||||
FormatCodeSettings::default(),
|
||||
UserPreferences::default(),
|
||||
)
|
||||
|
@ -6227,7 +6225,7 @@ mod tests {
|
|||
assert_eq!(
|
||||
changes,
|
||||
vec![FileTextChanges {
|
||||
file_name: temp_dir.uri().join("a.ts").unwrap().to_string(),
|
||||
file_name: temp_dir.url().join("a.ts").unwrap().to_string(),
|
||||
text_changes: vec![TextChange {
|
||||
span: TextSpan {
|
||||
start: 8,
|
||||
|
@ -6286,7 +6284,7 @@ mod tests {
|
|||
let resolved = op_resolve_inner(
|
||||
&mut state,
|
||||
ResolveArgs {
|
||||
base: temp_dir.uri().join("a.ts").unwrap().to_string(),
|
||||
base: temp_dir.url().join("a.ts").unwrap().to_string(),
|
||||
is_base_cjs: false,
|
||||
specifiers: vec!["./b.ts".to_string()],
|
||||
},
|
||||
|
@ -6295,7 +6293,7 @@ mod tests {
|
|||
assert_eq!(
|
||||
resolved,
|
||||
vec![Some((
|
||||
temp_dir.uri().join("b.ts").unwrap().to_string(),
|
||||
temp_dir.url().join("b.ts").unwrap().to_string(),
|
||||
MediaType::TypeScript.as_ts_extension().to_string()
|
||||
))]
|
||||
);
|
||||
|
|
185
cli/lsp/urls.rs
185
cli/lsp/urls.rs
|
@ -6,17 +6,25 @@ use deno_core::parking_lot::Mutex;
|
|||
use deno_core::url::Position;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use lsp_types::Uri;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::cache::LspCache;
|
||||
use super::logging::lsp_warn;
|
||||
|
||||
/// Used in situations where a default URL needs to be used where otherwise a
|
||||
/// panic is undesired.
|
||||
pub static INVALID_SPECIFIER: Lazy<ModuleSpecifier> =
|
||||
Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap());
|
||||
|
||||
/// Used in situations where a default URL needs to be used where otherwise a
|
||||
/// panic is undesired.
|
||||
pub static INVALID_URI: Lazy<Uri> =
|
||||
Lazy::new(|| Uri::from_str("deno://invalid").unwrap());
|
||||
|
||||
/// Matches the `encodeURIComponent()` encoding from JavaScript, which matches
|
||||
/// the component percent encoding set.
|
||||
///
|
||||
|
@ -56,7 +64,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String {
|
|||
crate::util::checksum::gen(&[file_name_str.as_bytes()])
|
||||
}
|
||||
|
||||
fn to_deno_url(specifier: &Url) -> String {
|
||||
fn to_deno_uri(specifier: &Url) -> String {
|
||||
let mut string = String::with_capacity(specifier.as_str().len() + 6);
|
||||
string.push_str("deno:/");
|
||||
string.push_str(specifier.scheme());
|
||||
|
@ -93,58 +101,37 @@ fn from_deno_url(url: &Url) -> Option<Url> {
|
|||
Url::parse(&string).ok()
|
||||
}
|
||||
|
||||
/// This exists to make it a little bit harder to accidentally use a `Url`
|
||||
/// in the wrong place where a client url should be used.
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
|
||||
pub struct LspClientUrl(Url);
|
||||
|
||||
impl LspClientUrl {
|
||||
pub fn new(url: Url) -> Self {
|
||||
Self(url)
|
||||
}
|
||||
|
||||
pub fn as_url(&self) -> &Url {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn into_url(self) -> Url {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LspClientUrl {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct LspUrlMapInner {
|
||||
specifier_to_url: HashMap<ModuleSpecifier, LspClientUrl>,
|
||||
url_to_specifier: HashMap<Url, ModuleSpecifier>,
|
||||
specifier_to_uri: HashMap<ModuleSpecifier, Uri>,
|
||||
uri_to_specifier: HashMap<Uri, ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl LspUrlMapInner {
|
||||
fn put(&mut self, specifier: ModuleSpecifier, url: LspClientUrl) {
|
||||
self
|
||||
.url_to_specifier
|
||||
.insert(url.as_url().clone(), specifier.clone());
|
||||
self.specifier_to_url.insert(specifier, url);
|
||||
fn put(&mut self, specifier: ModuleSpecifier, uri: Uri) {
|
||||
self.uri_to_specifier.insert(uri.clone(), specifier.clone());
|
||||
self.specifier_to_uri.insert(specifier, uri);
|
||||
}
|
||||
|
||||
fn get_url(&self, specifier: &ModuleSpecifier) -> Option<&LspClientUrl> {
|
||||
self.specifier_to_url.get(specifier)
|
||||
fn get_uri(&self, specifier: &ModuleSpecifier) -> Option<&Uri> {
|
||||
self.specifier_to_uri.get(specifier)
|
||||
}
|
||||
|
||||
fn get_specifier(&self, url: &Url) -> Option<&ModuleSpecifier> {
|
||||
self.url_to_specifier.get(url)
|
||||
fn get_specifier(&self, uri: &Uri) -> Option<&ModuleSpecifier> {
|
||||
self.uri_to_specifier.get(uri)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url_to_uri(url: &Url) -> Result<Uri, AnyError> {
|
||||
Ok(Uri::from_str(url.as_str()).inspect_err(|err| {
|
||||
lsp_warn!("Could not convert URL \"{url}\" to URI: {err}")
|
||||
})?)
|
||||
}
|
||||
|
||||
pub fn uri_to_url(uri: &Uri) -> Url {
|
||||
Url::parse(uri.as_str()).unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum LspUrlKind {
|
||||
File,
|
||||
|
@ -167,24 +154,24 @@ impl LspUrlMap {
|
|||
|
||||
/// Normalize a specifier that is used internally within Deno (or tsc) to a
|
||||
/// URL that can be handled as a "virtual" document by an LSP client.
|
||||
pub fn normalize_specifier(
|
||||
pub fn specifier_to_uri(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<LspClientUrl, AnyError> {
|
||||
) -> Result<Uri, AnyError> {
|
||||
if let Some(file_url) =
|
||||
self.cache.vendored_specifier(specifier, file_referrer)
|
||||
{
|
||||
return Ok(LspClientUrl(file_url));
|
||||
return url_to_uri(&file_url);
|
||||
}
|
||||
let mut inner = self.inner.lock();
|
||||
if let Some(url) = inner.get_url(specifier).cloned() {
|
||||
Ok(url)
|
||||
if let Some(uri) = inner.get_uri(specifier).cloned() {
|
||||
Ok(uri)
|
||||
} else {
|
||||
let url = if specifier.scheme() == "file" {
|
||||
LspClientUrl(specifier.clone())
|
||||
let uri = if specifier.scheme() == "file" {
|
||||
url_to_uri(specifier)?
|
||||
} else {
|
||||
let specifier_str = if specifier.scheme() == "asset" {
|
||||
let uri_str = if specifier.scheme() == "asset" {
|
||||
format!("deno:/asset{}", specifier.path())
|
||||
} else if specifier.scheme() == "data" {
|
||||
let data_url = deno_graph::source::RawDataUrl::parse(specifier)?;
|
||||
|
@ -200,13 +187,13 @@ impl LspUrlMap {
|
|||
extension
|
||||
)
|
||||
} else {
|
||||
to_deno_url(specifier)
|
||||
to_deno_uri(specifier)
|
||||
};
|
||||
let url = LspClientUrl(Url::parse(&specifier_str)?);
|
||||
inner.put(specifier.clone(), url.clone());
|
||||
url
|
||||
let uri = Uri::from_str(&uri_str)?;
|
||||
inner.put(specifier.clone(), uri.clone());
|
||||
uri
|
||||
};
|
||||
Ok(url)
|
||||
Ok(uri)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,12 +205,17 @@ impl LspUrlMap {
|
|||
/// Note: Sometimes the url provided by the client may not have a trailing slash,
|
||||
/// so we need to force it to in the mapping and nee to explicitly state whether
|
||||
/// this is a file or directory url.
|
||||
pub fn normalize_url(&self, url: &Url, kind: LspUrlKind) -> ModuleSpecifier {
|
||||
if let Some(remote_url) = self.cache.unvendored_specifier(url) {
|
||||
pub fn uri_to_specifier(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
kind: LspUrlKind,
|
||||
) -> ModuleSpecifier {
|
||||
let url = uri_to_url(uri);
|
||||
if let Some(remote_url) = self.cache.unvendored_specifier(&url) {
|
||||
return remote_url;
|
||||
}
|
||||
let mut inner = self.inner.lock();
|
||||
if let Some(specifier) = inner.get_specifier(url).cloned() {
|
||||
if let Some(specifier) = inner.get_specifier(uri).cloned() {
|
||||
return specifier;
|
||||
}
|
||||
let mut specifier = None;
|
||||
|
@ -234,13 +226,13 @@ impl LspUrlMap {
|
|||
LspUrlKind::File => Url::from_file_path(path).unwrap(),
|
||||
});
|
||||
}
|
||||
} else if let Some(s) = file_like_to_file_specifier(url) {
|
||||
} else if let Some(s) = file_like_to_file_specifier(&url) {
|
||||
specifier = Some(s);
|
||||
} else if let Some(s) = from_deno_url(url) {
|
||||
} else if let Some(s) = from_deno_url(&url) {
|
||||
specifier = Some(s);
|
||||
}
|
||||
let specifier = specifier.unwrap_or_else(|| url.clone());
|
||||
inner.put(specifier.clone(), LspClientUrl(url.clone()));
|
||||
inner.put(specifier.clone(), uri.clone());
|
||||
specifier
|
||||
}
|
||||
}
|
||||
|
@ -288,15 +280,14 @@ mod tests {
|
|||
fn test_lsp_url_map() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture, None)
|
||||
let actual_uri = map
|
||||
.specifier_to_uri(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url =
|
||||
Url::parse("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
|
||||
let actual_specifier =
|
||||
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
|
||||
assert_eq!(
|
||||
actual_uri.as_str(),
|
||||
"deno:/https/deno.land/x/pkg%401.0.0/mod.ts"
|
||||
);
|
||||
let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
|
||||
assert_eq!(actual_specifier, fixture);
|
||||
}
|
||||
|
||||
|
@ -304,18 +295,14 @@ mod tests {
|
|||
fn test_lsp_url_reverse() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture =
|
||||
resolve_url("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
|
||||
let actual_specifier = map.normalize_url(&fixture, LspUrlKind::File);
|
||||
Uri::from_str("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap();
|
||||
let actual_specifier = map.uri_to_specifier(&fixture, LspUrlKind::File);
|
||||
let expected_specifier =
|
||||
Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap();
|
||||
assert_eq!(&actual_specifier, &expected_specifier);
|
||||
|
||||
let actual_url = map
|
||||
.normalize_specifier(&actual_specifier, None)
|
||||
.unwrap()
|
||||
.as_url()
|
||||
.clone();
|
||||
assert_eq!(actual_url, fixture);
|
||||
let actual_uri = map.specifier_to_uri(&actual_specifier, None).unwrap();
|
||||
assert_eq!(actual_uri, fixture);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -323,14 +310,11 @@ mod tests {
|
|||
// Test fix for #9741 - not properly encoding certain URLs
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture, None)
|
||||
let actual_uri = map
|
||||
.specifier_to_uri(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url = Url::parse("deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
|
||||
let actual_specifier =
|
||||
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
|
||||
assert_eq!(actual_uri.as_str(), "deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts");
|
||||
let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
|
||||
assert_eq!(actual_specifier, fixture);
|
||||
}
|
||||
|
||||
|
@ -338,14 +322,13 @@ mod tests {
|
|||
fn test_lsp_url_map_data() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture, None)
|
||||
let actual_uri = map
|
||||
.specifier_to_uri(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
assert_eq!(&uri_to_url(&actual_uri), &expected_url);
|
||||
|
||||
let actual_specifier =
|
||||
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
|
||||
let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
|
||||
assert_eq!(actual_specifier, fixture);
|
||||
}
|
||||
|
||||
|
@ -353,15 +336,11 @@ mod tests {
|
|||
fn test_lsp_url_map_host_with_port() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap();
|
||||
let actual_url = map
|
||||
.normalize_specifier(&fixture, None)
|
||||
let actual_uri = map
|
||||
.specifier_to_uri(&fixture, None)
|
||||
.expect("could not handle specifier");
|
||||
let expected_url =
|
||||
Url::parse("deno:/http/localhost%3A8000/mod.ts").unwrap();
|
||||
assert_eq!(actual_url.as_url(), &expected_url);
|
||||
|
||||
let actual_specifier =
|
||||
map.normalize_url(actual_url.as_url(), LspUrlKind::File);
|
||||
assert_eq!(actual_uri.as_str(), "deno:/http/localhost%3A8000/mod.ts");
|
||||
let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File);
|
||||
assert_eq!(actual_specifier, fixture);
|
||||
}
|
||||
|
||||
|
@ -369,11 +348,11 @@ mod tests {
|
|||
#[test]
|
||||
fn test_normalize_windows_path() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url(
|
||||
let fixture = Uri::from_str(
|
||||
"file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
|
||||
)
|
||||
.unwrap();
|
||||
let actual = map.normalize_url(&fixture, LspUrlKind::File);
|
||||
let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
|
||||
let expected =
|
||||
Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt")
|
||||
.unwrap();
|
||||
|
@ -384,11 +363,11 @@ mod tests {
|
|||
#[test]
|
||||
fn test_normalize_percent_encoded_path() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url(
|
||||
let fixture = Uri::from_str(
|
||||
"file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt",
|
||||
)
|
||||
.unwrap();
|
||||
let actual = map.normalize_url(&fixture, LspUrlKind::File);
|
||||
let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
|
||||
let expected =
|
||||
Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt")
|
||||
.unwrap();
|
||||
|
@ -398,9 +377,9 @@ mod tests {
|
|||
#[test]
|
||||
fn test_normalize_deno_status() {
|
||||
let map = LspUrlMap::default();
|
||||
let fixture = resolve_url("deno:/status.md").unwrap();
|
||||
let actual = map.normalize_url(&fixture, LspUrlKind::File);
|
||||
assert_eq!(actual, fixture);
|
||||
let fixture = Uri::from_str("deno:/status.md").unwrap();
|
||||
let actual = map.uri_to_specifier(&fixture, LspUrlKind::File);
|
||||
assert_eq!(actual.as_str(), fixture.as_str());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
61
cli/main.rs
61
cli/main.rs
|
@ -32,7 +32,6 @@ mod worker;
|
|||
use crate::args::flags_from_vec;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::DENO_FUTURE;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::util::display;
|
||||
use crate::util::v8::get_v8_flags_from_env;
|
||||
|
@ -53,6 +52,7 @@ use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
|
|||
use deno_terminal::colors;
|
||||
use factory::CliFactory;
|
||||
use standalone::MODULE_NOT_FOUND;
|
||||
use standalone::UNSUPPORTED_SCHEME;
|
||||
use std::env;
|
||||
use std::future::Future;
|
||||
use std::ops::Deref;
|
||||
|
@ -110,9 +110,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
tools::bench::run_benchmarks(flags, bench_flags).await
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async {
|
||||
tools::bundle::bundle(flags, bundle_flags).await
|
||||
}),
|
||||
DenoSubcommand::Bundle => exit_with_message("⚠️ `deno bundle` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
|
||||
DenoSubcommand::Doc(doc_flags) => {
|
||||
spawn_subcommand(async { tools::doc::doc(flags, doc_flags).await })
|
||||
}
|
||||
|
@ -196,7 +194,8 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
match result {
|
||||
Ok(v) => Ok(v),
|
||||
Err(script_err) => {
|
||||
if script_err.to_string().starts_with(MODULE_NOT_FOUND) {
|
||||
let script_err_msg = script_err.to_string();
|
||||
if script_err_msg.starts_with(MODULE_NOT_FOUND) || script_err_msg.starts_with(UNSUPPORTED_SCHEME) {
|
||||
if run_flags.bare {
|
||||
let mut cmd = args::clap_root();
|
||||
cmd.build();
|
||||
|
@ -218,9 +217,10 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
let task_flags = TaskFlags {
|
||||
cwd: None,
|
||||
task: Some(run_flags.script.clone()),
|
||||
is_run: true,
|
||||
};
|
||||
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
|
||||
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone(), true).await;
|
||||
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await;
|
||||
match result {
|
||||
Ok(v) => Ok(v),
|
||||
Err(_) => {
|
||||
|
@ -240,7 +240,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
tools::serve::serve(flags, serve_flags).await
|
||||
}),
|
||||
DenoSubcommand::Task(task_flags) => spawn_subcommand(async {
|
||||
tools::task::execute_script(flags, task_flags, false).await
|
||||
tools::task::execute_script(flags, task_flags).await
|
||||
}),
|
||||
DenoSubcommand::Test(test_flags) => {
|
||||
spawn_subcommand(async {
|
||||
|
@ -283,14 +283,26 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
"This deno was built without the \"upgrade\" feature. Please upgrade using the installation method originally used to install Deno.",
|
||||
1,
|
||||
),
|
||||
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
|
||||
tools::vendor::vendor(flags, vendor_flags).await
|
||||
}),
|
||||
DenoSubcommand::Vendor => exit_with_message("⚠️ `deno vendor` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", 1),
|
||||
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
|
||||
tools::registry::publish(flags, publish_flags).await
|
||||
}),
|
||||
DenoSubcommand::Help(help_flags) => spawn_subcommand(async move {
|
||||
display::write_to_stdout_ignore_sigpipe(help_flags.help.ansi().to_string().as_bytes())
|
||||
use std::io::Write;
|
||||
|
||||
let mut stream = anstream::AutoStream::new(std::io::stdout(), if colors::use_color() {
|
||||
anstream::ColorChoice::Auto
|
||||
} else {
|
||||
anstream::ColorChoice::Never
|
||||
});
|
||||
|
||||
match stream.write_all(help_flags.help.ansi().to_string().as_bytes()) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => match e.kind() {
|
||||
std::io::ErrorKind::BrokenPipe => Ok(()),
|
||||
_ => Err(e),
|
||||
},
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
|
@ -437,30 +449,19 @@ fn resolve_flags_and_init(
|
|||
// https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214
|
||||
DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()],
|
||||
_ => {
|
||||
if *DENO_FUTURE {
|
||||
// TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
|
||||
// and its settings.
|
||||
// deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript
|
||||
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
|
||||
vec!["--no-harmony-import-assertions".to_string()]
|
||||
} else {
|
||||
vec![
|
||||
// TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
|
||||
// and its settings.
|
||||
// If we're still in v1.X version we want to support import assertions.
|
||||
// V8 12.6 unshipped the support by default, so force it by passing a
|
||||
// flag.
|
||||
"--harmony-import-assertions".to_string(),
|
||||
// Verify with DENO_FUTURE for now.
|
||||
"--no-maglev".to_string(),
|
||||
]
|
||||
}
|
||||
// TODO(bartlomieju): I think this can be removed as it's handled by `deno_core`
|
||||
// and its settings.
|
||||
// deno_ast removes TypeScript `assert` keywords, so this flag only affects JavaScript
|
||||
// TODO(petamoriken): Need to check TypeScript `assert` keywords in deno_ast
|
||||
vec!["--no-harmony-import-assertions".to_string()]
|
||||
}
|
||||
};
|
||||
|
||||
init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env());
|
||||
// TODO(bartlomieju): remove last argument in Deno 2.
|
||||
deno_core::JsRuntime::init_platform(None, !*DENO_FUTURE);
|
||||
deno_core::JsRuntime::init_platform(
|
||||
None, /* import assertions enabled */ false,
|
||||
);
|
||||
util::logger::init(flags.log_level);
|
||||
|
||||
Ok(flags)
|
||||
|
|
|
@ -18,7 +18,6 @@ use crate::cache::CodeCache;
|
|||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_container::MainModuleGraphContainer;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::graph_container::ModuleGraphUpdatePermit;
|
||||
|
@ -70,54 +69,6 @@ use deno_runtime::deno_permissions::PermissionsContainer;
|
|||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
|
||||
pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let cli_options = factory.cli_options()?;
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
if !npm_resolver.ensure_top_level_package_json_install().await? {
|
||||
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||
lockfile.error_if_changed()?;
|
||||
}
|
||||
|
||||
npm_resolver.cache_packages().await?;
|
||||
}
|
||||
}
|
||||
// cache as many entries in the import map as we can
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
let roots = import_map
|
||||
.imports()
|
||||
.entries()
|
||||
.filter_map(|entry| {
|
||||
if entry.key.ends_with('/') {
|
||||
None
|
||||
} else {
|
||||
entry.value.cloned()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut graph_permit = factory
|
||||
.main_module_graph_container()
|
||||
.await?
|
||||
.acquire_update_permit()
|
||||
.await;
|
||||
let graph = graph_permit.graph_mut();
|
||||
factory
|
||||
.module_load_preparer()
|
||||
.await?
|
||||
.prepare_module_load(
|
||||
graph,
|
||||
&roots,
|
||||
false,
|
||||
factory.cli_options()?.ts_type_lib_window(),
|
||||
deno_runtime::deno_permissions::PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct ModuleLoadPreparer {
|
||||
options: Arc<CliOptions>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
|
|
|
@ -3307,19 +3307,30 @@ fn napi_resolve_deferred(
|
|||
check_arg!(env, result);
|
||||
check_arg!(env, deferred);
|
||||
|
||||
// Make sure microtasks don't run and call back into JS
|
||||
env
|
||||
.scope()
|
||||
.set_microtasks_policy(v8::MicrotasksPolicy::Explicit);
|
||||
|
||||
let deferred_ptr =
|
||||
unsafe { NonNull::new_unchecked(deferred as *mut v8::PromiseResolver) };
|
||||
let global = unsafe { v8::Global::from_raw(env.isolate(), deferred_ptr) };
|
||||
let resolver = v8::Local::new(&mut env.scope(), global);
|
||||
|
||||
if !resolver
|
||||
let success = resolver
|
||||
.resolve(&mut env.scope(), result.unwrap())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return napi_generic_failure;
|
||||
}
|
||||
.unwrap_or(false);
|
||||
|
||||
napi_ok
|
||||
// Restore policy
|
||||
env
|
||||
.scope()
|
||||
.set_microtasks_policy(v8::MicrotasksPolicy::Auto);
|
||||
|
||||
if success {
|
||||
napi_ok
|
||||
} else {
|
||||
napi_generic_failure
|
||||
}
|
||||
}
|
||||
|
||||
#[napi_sym]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.94.0"
|
||||
version = "0.97.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -307,14 +307,9 @@ impl CliNpmResolver for ByonmCliNpmResolver {
|
|||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `{}`?"
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
if *crate::args::DENO_FUTURE {
|
||||
"deno install"
|
||||
} else {
|
||||
"npm install"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -406,8 +406,7 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
}
|
||||
if result.dependencies_result.is_ok() {
|
||||
result.dependencies_result =
|
||||
self.cache_packages().await.map_err(AnyError::from);
|
||||
result.dependencies_result = self.cache_packages().await;
|
||||
}
|
||||
|
||||
result
|
||||
|
|
|
@ -22,6 +22,7 @@ use deno_npm::NpmPackageCacheFolderId;
|
|||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReq;
|
||||
|
@ -329,16 +330,10 @@ fn populate_lockfile_from_snapshot(
|
|||
) {
|
||||
let mut lockfile = lockfile.lock();
|
||||
for (package_req, nv) in snapshot.package_reqs() {
|
||||
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
|
||||
lockfile.insert_package_specifier(
|
||||
format!("npm:{}", package_req),
|
||||
format!(
|
||||
"npm:{}",
|
||||
snapshot
|
||||
.resolve_package_from_deno_module(nv)
|
||||
.unwrap()
|
||||
.id
|
||||
.as_serialized()
|
||||
),
|
||||
JsrDepPackageReq::npm(package_req.clone()),
|
||||
format!("{}{}", id.nv.version, id.peer_deps_serialized()),
|
||||
);
|
||||
}
|
||||
for package in snapshot.all_packages_for_every_system() {
|
||||
|
|
|
@ -831,22 +831,14 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
|
||||
if !packages_with_scripts_not_run.is_empty() {
|
||||
let (maybe_install, maybe_install_example) = if *crate::args::DENO_FUTURE {
|
||||
(
|
||||
" or `deno install`",
|
||||
" or `deno install --allow-scripts=pkg1,pkg2`",
|
||||
)
|
||||
} else {
|
||||
("", "")
|
||||
};
|
||||
let packages = packages_with_scripts_not_run
|
||||
.iter()
|
||||
.map(|(_, p)| format!("npm:{p}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
log::warn!("{}: Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
|
||||
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache`{maybe_install}
|
||||
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>`{maybe_install_example}):\n {packages}", crate::colors::yellow("warning"));
|
||||
log::warn!("{} Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
|
||||
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache` or `deno install`
|
||||
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>` or `deno install --allow-scripts=pkg1,pkg2`):\n {packages}", crate::colors::yellow("Warning"));
|
||||
for (scripts_warned_path, _) in packages_with_scripts_not_run {
|
||||
let _ignore_err = fs::write(scripts_warned_path, "");
|
||||
}
|
||||
|
@ -1048,42 +1040,50 @@ fn symlink_package_dir(
|
|||
// need to delete the previous symlink before creating a new one
|
||||
let _ignore = fs::remove_dir_all(new_path);
|
||||
|
||||
let old_path_relative =
|
||||
crate::util::path::relative_path(new_parent, old_path)
|
||||
.unwrap_or_else(|| old_path.to_path_buf());
|
||||
|
||||
#[cfg(windows)]
|
||||
return junction_or_symlink_dir(old_path, new_path);
|
||||
{
|
||||
junction_or_symlink_dir(&old_path_relative, old_path, new_path)
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
symlink_dir(old_path, new_path)
|
||||
{
|
||||
symlink_dir(&old_path_relative, new_path).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn junction_or_symlink_dir(
|
||||
old_path_relative: &Path,
|
||||
old_path: &Path,
|
||||
new_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
use deno_core::anyhow::bail;
|
||||
// Use junctions because they're supported on ntfs file systems without
|
||||
// needing to elevate privileges on Windows
|
||||
static USE_JUNCTIONS: std::sync::atomic::AtomicBool =
|
||||
std::sync::atomic::AtomicBool::new(false);
|
||||
|
||||
match junction::create(old_path, new_path) {
|
||||
if USE_JUNCTIONS.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
// Use junctions because they're supported on ntfs file systems without
|
||||
// needing to elevate privileges on Windows.
|
||||
// Note: junctions don't support relative paths, so we need to use the
|
||||
// absolute path here.
|
||||
return junction::create(old_path, new_path)
|
||||
.context("Failed creating junction in node_modules folder");
|
||||
}
|
||||
|
||||
match symlink_dir(old_path_relative, new_path) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(junction_err) => {
|
||||
if cfg!(debug_assertions) {
|
||||
// When running the tests, junctions should be created, but if not then
|
||||
// surface this error.
|
||||
log::warn!("Error creating junction. {:#}", junction_err);
|
||||
}
|
||||
|
||||
match symlink_dir(old_path, new_path) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(symlink_err) => bail!(
|
||||
concat!(
|
||||
"Failed creating junction and fallback symlink in node_modules folder.\n\n",
|
||||
"{:#}\n\n{:#}",
|
||||
),
|
||||
junction_err,
|
||||
symlink_err,
|
||||
),
|
||||
}
|
||||
Err(symlink_err)
|
||||
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>
|
||||
{
|
||||
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
junction::create(old_path, new_path).map_err(Into::into)
|
||||
}
|
||||
Err(symlink_err) => Err(
|
||||
AnyError::from(symlink_err)
|
||||
.context("Failed creating symlink in node_modules folder"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -146,7 +146,7 @@ impl CliNodeResolver {
|
|||
concat!(
|
||||
"Could not resolve \"{}\", but found it in a package.json. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `npm install`?"
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
specifier
|
||||
));
|
||||
|
@ -225,13 +225,8 @@ impl CliNodeResolver {
|
|||
let package_json_path = package_folder.join("package.json");
|
||||
if !self.fs.exists_sync(&package_json_path) {
|
||||
return Err(anyhow!(
|
||||
"Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `{}`?",
|
||||
"Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?",
|
||||
package_json_path.display(),
|
||||
if *crate::args::DENO_FUTURE {
|
||||
"deno install"
|
||||
} else {
|
||||
"npm install"
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -754,7 +749,7 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
|
|||
let line = start.line + 1;
|
||||
let column = start.character + 1;
|
||||
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
|
||||
log::warn!("Warning: Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{line}:{column}. If you want to use a built-in Node module, add a \"node:\" prefix.")
|
||||
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{line}:{column}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1246,15 +1241,15 @@ mod test {
|
|||
for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
|
||||
let ts_file = temp_dir.join(format!("file.{}", ext_to));
|
||||
ts_file.write("");
|
||||
assert_eq!(resolve(&ts_file.uri_file()), None);
|
||||
assert_eq!(resolve(&ts_file.url_file()), None);
|
||||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.uri_dir()
|
||||
.url_dir()
|
||||
.join(&format!("file.{}", ext_from))
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::JsToTs(ts_file.uri_file())),
|
||||
Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
|
||||
);
|
||||
ts_file.remove_file();
|
||||
}
|
||||
|
@ -1266,11 +1261,11 @@ mod test {
|
|||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.uri_dir()
|
||||
.url_dir()
|
||||
.join("file") // no ext
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::NoExtension(file.uri_file()))
|
||||
Some(SloppyImportsResolution::NoExtension(file.url_file()))
|
||||
);
|
||||
file.remove_file();
|
||||
}
|
||||
|
@ -1281,15 +1276,15 @@ mod test {
|
|||
ts_file.write("");
|
||||
let js_file = temp_dir.join("file.js");
|
||||
js_file.write("");
|
||||
assert_eq!(resolve(&js_file.uri_file()), None);
|
||||
assert_eq!(resolve(&js_file.url_file()), None);
|
||||
}
|
||||
|
||||
// only js exists, .js specified
|
||||
{
|
||||
let js_only_file = temp_dir.join("js_only.js");
|
||||
js_only_file.write("");
|
||||
assert_eq!(resolve(&js_only_file.uri_file()), None);
|
||||
assert_eq!(resolve_types(&js_only_file.uri_file()), None);
|
||||
assert_eq!(resolve(&js_only_file.url_file()), None);
|
||||
assert_eq!(resolve_types(&js_only_file.url_file()), None);
|
||||
}
|
||||
|
||||
// resolving a directory to an index file
|
||||
|
@ -1299,8 +1294,8 @@ mod test {
|
|||
let index_file = routes_dir.join("index.ts");
|
||||
index_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&routes_dir.uri_file()),
|
||||
Some(SloppyImportsResolution::Directory(index_file.uri_file())),
|
||||
resolve(&routes_dir.url_file()),
|
||||
Some(SloppyImportsResolution::Directory(index_file.url_file())),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1313,8 +1308,8 @@ mod test {
|
|||
let api_file = temp_dir.join("api.ts");
|
||||
api_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&api_dir.uri_file()),
|
||||
Some(SloppyImportsResolution::NoExtension(api_file.uri_file())),
|
||||
resolve(&api_dir.url_file()),
|
||||
Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,10 +41,10 @@
|
|||
"markdownDescription": "Differentiate between undefined and not present when type checking\n\nSee more: https://www.typescriptlang.org/tsconfig#exactOptionalPropertyTypes"
|
||||
},
|
||||
"experimentalDecorators": {
|
||||
"description": "Enable experimental support for TC39 stage 2 draft decorators.",
|
||||
"description": "Enable experimental support for legacy experimental decorators.",
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"markdownDescription": "Enable experimental support for TC39 stage 2 draft decorators.\n\nSee more: https://www.typescriptlang.org/tsconfig#experimentalDecorators"
|
||||
"default": false,
|
||||
"markdownDescription": "Enable experimental support for legacy experimental decorators.\n\nSee more: https://www.typescriptlang.org/tsconfig#experimentalDecorators"
|
||||
},
|
||||
"jsx": {
|
||||
"description": "Specify what JSX code is generated.",
|
||||
|
@ -119,10 +119,10 @@
|
|||
"markdownDescription": "Enable error reporting for fallthrough cases in switch statements.\n\nSee more: https://www.typescriptlang.org/tsconfig#noFallthroughCasesInSwitch"
|
||||
},
|
||||
"noImplicitAny": {
|
||||
"description": "Enable error reporting for expressions and declarations with an implied `any` type..",
|
||||
"description": "Enable error reporting for expressions and declarations with an implied `any` type.",
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"markdownDescription": "Enable error reporting for expressions and declarations with an implied `any` type..\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitAny"
|
||||
"markdownDescription": "Enable error reporting for expressions and declarations with an implied `any` type.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitAny"
|
||||
},
|
||||
"noImplicitOverride": {
|
||||
"description": "Ensure overriding members in derived classes are marked with an override modifier.",
|
||||
|
@ -148,6 +148,12 @@
|
|||
"default": true,
|
||||
"markdownDescription": "Disable adding 'use strict' directives in emitted JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitUseStrict"
|
||||
},
|
||||
"noPropertyAccessFromIndexSignature": {
|
||||
"description": "Enforces using indexed accessors for keys declared using an indexed type.",
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"markdownDescription": "Enforces using indexed accessors for keys declared using an indexed type.\n\nSee more: https://www.typescriptlang.org/tsconfig#noPropertyAccessFromIndexSignature"
|
||||
},
|
||||
"noStrictGenericChecks": {
|
||||
"description": "Disable strict checking of generic signatures in function types.",
|
||||
"type": "boolean",
|
||||
|
@ -213,6 +219,12 @@
|
|||
"type": "boolean",
|
||||
"default": false,
|
||||
"markdownDescription": "Suppress `noImplicitAny` errors when indexing objects that lack index signatures.\n\nSee more: https://www.typescriptlang.org/tsconfig#suppressImplicitAnyIndexErrors"
|
||||
},
|
||||
"useUnknownInCatchVariables": {
|
||||
"description": "Default catch clause variables as `unknown` instead of `any`.",
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"markdownDescription": "Default catch clause variables as `unknown` instead of `any`.\n\nSee more: https://www.typescriptlang.org/tsconfig#useUnknownInCatchVariables"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -117,7 +117,6 @@ pub struct Metadata {
|
|||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub disable_deprecated_api_warning: bool,
|
||||
pub unstable_config: UnstableConfig,
|
||||
}
|
||||
|
||||
|
@ -624,12 +623,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
pkg_json_resolution: self.workspace_resolver.pkg_json_dep_resolution(),
|
||||
},
|
||||
node_modules,
|
||||
disable_deprecated_api_warning: cli_options
|
||||
.disable_deprecated_api_warning,
|
||||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: cli_options.legacy_unstable_flag(),
|
||||
bare_node_builtins: cli_options.unstable_bare_node_builtins(),
|
||||
byonm: cli_options.use_byonm(),
|
||||
sloppy_imports: cli_options.unstable_sloppy_imports(),
|
||||
features: cli_options.unstable_features(),
|
||||
},
|
||||
|
|
|
@ -133,6 +133,7 @@ struct EmbeddedModuleLoader {
|
|||
}
|
||||
|
||||
pub const MODULE_NOT_FOUND: &str = "Module not found";
|
||||
pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
|
||||
|
||||
impl ModuleLoader for EmbeddedModuleLoader {
|
||||
fn resolve(
|
||||
|
@ -705,6 +706,8 @@ pub async fn run(
|
|||
None,
|
||||
None,
|
||||
feature_checker,
|
||||
// Code cache is not supported for standalone binary yet.
|
||||
None,
|
||||
CliMainWorkerOptions {
|
||||
argv: metadata.argv,
|
||||
log_level: WorkerLogLevel::Info,
|
||||
|
@ -731,17 +734,10 @@ pub async fn run(
|
|||
unstable: metadata.unstable_config.legacy_flag_enabled,
|
||||
create_hmr_runner: None,
|
||||
create_coverage_collector: None,
|
||||
node_ipc: None,
|
||||
serve_port: None,
|
||||
serve_host: None,
|
||||
},
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
false,
|
||||
// TODO(bartlomieju): temporarily disabled
|
||||
// metadata.disable_deprecated_api_warning,
|
||||
true,
|
||||
false,
|
||||
// Code cache is not supported for standalone binary yet.
|
||||
None,
|
||||
);
|
||||
|
||||
// Initialize v8 once from the main thread.
|
||||
|
|
|
@ -7,10 +7,21 @@
|
|||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
use crate::colors;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Taken from https://stackoverflow.com/a/76572321
|
||||
fn precision_f64(x: f64, decimals: u32) -> f64 {
|
||||
if x == 0. || decimals == 0 {
|
||||
0.
|
||||
} else {
|
||||
let shift = decimals as i32 - x.abs().log10().ceil() as i32;
|
||||
let shift_factor = 10_f64.powi(shift);
|
||||
|
||||
(x * shift_factor).round() / shift_factor
|
||||
}
|
||||
}
|
||||
|
||||
fn avg_to_iter_per_s(time: f64) -> String {
|
||||
let iter_per_s = 1e9 / time;
|
||||
let iter_per_s = precision_f64(1e9 / time, 4);
|
||||
let (decimals, fractional) = into_decimal_and_fractional_parts(iter_per_s);
|
||||
human_readable_decimal_with_fractional(decimals, fractional)
|
||||
}
|
||||
|
@ -47,55 +58,34 @@ fn human_readable_decimal_with_fractional(
|
|||
.unwrap()
|
||||
.join(",");
|
||||
|
||||
format!("{}.{}", fmt_decimal, fractional)
|
||||
if fmt_decimal.len() >= 4 {
|
||||
fmt_decimal
|
||||
} else {
|
||||
format!("{}.{}", fmt_decimal, fractional)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt_duration(time: f64) -> String {
|
||||
// SAFETY: this is safe since its just reformatting numbers
|
||||
unsafe {
|
||||
if time < 1e0 {
|
||||
return format!(
|
||||
"{} ps",
|
||||
f64::from_str(&format!("{:.2}", time * 1e3)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
|
||||
if time < 1e3 {
|
||||
return format!(
|
||||
"{} ns",
|
||||
f64::from_str(&format!("{:.2}", time)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
if time < 1e6 {
|
||||
return format!(
|
||||
"{} µs",
|
||||
f64::from_str(&format!("{:.2}", time / 1e3)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
if time < 1e9 {
|
||||
return format!(
|
||||
"{} ms",
|
||||
f64::from_str(&format!("{:.2}", time / 1e6)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
if time < 1e12 {
|
||||
return format!(
|
||||
"{} s",
|
||||
f64::from_str(&format!("{:.2}", time / 1e9)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
if time < 36e11 {
|
||||
return format!(
|
||||
"{} m",
|
||||
f64::from_str(&format!("{:.2}", time / 60e9)).unwrap_unchecked()
|
||||
);
|
||||
}
|
||||
|
||||
format!(
|
||||
"{} h",
|
||||
f64::from_str(&format!("{:.2}", time / 36e11)).unwrap_unchecked()
|
||||
)
|
||||
if time < 1e0 {
|
||||
return format!("{:.1} ps", time * 1e3);
|
||||
}
|
||||
if time < 1e3 {
|
||||
return format!("{:.1} ns", time);
|
||||
}
|
||||
if time < 1e6 {
|
||||
return format!("{:.1} µs", time / 1e3);
|
||||
}
|
||||
if time < 1e9 {
|
||||
return format!("{:.1} ms", time / 1e6);
|
||||
}
|
||||
if time < 1e12 {
|
||||
return format!("{:.1} s", time / 1e9);
|
||||
}
|
||||
if time < 36e11 {
|
||||
return format!("{:.1} m", time / 60e9);
|
||||
}
|
||||
|
||||
format!("{:.1} h", time / 36e11)
|
||||
}
|
||||
|
||||
pub mod cpu {
|
||||
|
@ -231,16 +221,19 @@ pub mod reporter {
|
|||
pub fn br(options: &Options) -> String {
|
||||
let mut s = String::new();
|
||||
|
||||
s.push_str(&"-".repeat(
|
||||
options.size
|
||||
+ 14 * options.avg as usize
|
||||
+ 14 * options.avg as usize
|
||||
+ 24 * options.min_max as usize,
|
||||
));
|
||||
s.push_str(&"-".repeat(options.size));
|
||||
|
||||
if options.avg {
|
||||
s.push(' ');
|
||||
s.push_str(&"-".repeat(15 + 1 + 13));
|
||||
}
|
||||
if options.min_max {
|
||||
s.push(' ');
|
||||
s.push_str(&"-".repeat(21));
|
||||
}
|
||||
if options.percentiles {
|
||||
s.push(' ');
|
||||
s.push_str(&"-".repeat(9 + 10 + 10));
|
||||
s.push_str(&"-".repeat(8 + 1 + 8 + 1 + 8));
|
||||
}
|
||||
|
||||
s
|
||||
|
@ -251,7 +244,7 @@ pub mod reporter {
|
|||
let mut s = String::new();
|
||||
|
||||
s.push_str(&format!("{:<size$}", n));
|
||||
s.push_str(&format!("{}: {}", colors::red("error"), e.message));
|
||||
s.push_str(&format!(" {}: {}", colors::red("error"), e.message));
|
||||
|
||||
if let Some(ref stack) = e.stack {
|
||||
s.push('\n');
|
||||
|
@ -268,14 +261,14 @@ pub mod reporter {
|
|||
|
||||
s.push_str(&format!("{:<size$}", "benchmark"));
|
||||
if options.avg {
|
||||
s.push_str(&format!("{:>14}", "time (avg)"));
|
||||
s.push_str(&format!("{:>14}", "iter/s"));
|
||||
s.push_str(&format!(" {:<15}", "time/iter (avg)"));
|
||||
s.push_str(&format!(" {:>13}", "iter/s"));
|
||||
}
|
||||
if options.min_max {
|
||||
s.push_str(&format!("{:>24}", "(min … max)"));
|
||||
s.push_str(&format!(" {:^21}", "(min … max)"));
|
||||
}
|
||||
if options.percentiles {
|
||||
s.push_str(&format!(" {:>9} {:>9} {:>9}", "p75", "p99", "p995"));
|
||||
s.push_str(&format!(" {:>8} {:>8} {:>8}", "p75", "p99", "p995"));
|
||||
}
|
||||
|
||||
s
|
||||
|
@ -293,28 +286,28 @@ pub mod reporter {
|
|||
|
||||
if options.avg {
|
||||
s.push_str(&format!(
|
||||
"{:>30}",
|
||||
format!("{}/iter", colors::yellow(fmt_duration(stats.avg)))
|
||||
" {}",
|
||||
colors::yellow(&format!("{:>15}", fmt_duration(stats.avg)))
|
||||
));
|
||||
s.push_str(&format!("{:>14}", avg_to_iter_per_s(stats.avg)));
|
||||
s.push_str(&format!(" {:>13}", &avg_to_iter_per_s(stats.avg)));
|
||||
}
|
||||
if options.min_max {
|
||||
s.push_str(&format!(
|
||||
"{:>50}",
|
||||
format!(
|
||||
"({} … {})",
|
||||
colors::cyan(fmt_duration(stats.min)),
|
||||
colors::magenta(fmt_duration(stats.max))
|
||||
)
|
||||
" ({} … {})",
|
||||
colors::cyan(format!("{:>8}", fmt_duration(stats.min))),
|
||||
colors::magenta(format!("{:>8}", fmt_duration(stats.max)))
|
||||
));
|
||||
}
|
||||
if options.percentiles {
|
||||
s.push_str(&format!(
|
||||
" {:>22} {:>22} {:>22}",
|
||||
colors::magenta(fmt_duration(stats.p75)),
|
||||
colors::magenta(fmt_duration(stats.p99)),
|
||||
colors::magenta(fmt_duration(stats.p995))
|
||||
));
|
||||
s.push_str(
|
||||
&colors::magenta(format!(
|
||||
" {:>8} {:>8} {:>8}",
|
||||
fmt_duration(stats.p75),
|
||||
fmt_duration(stats.p99),
|
||||
fmt_duration(stats.p995)
|
||||
))
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
s
|
||||
|
@ -337,22 +330,25 @@ pub mod reporter {
|
|||
|
||||
for b in benchmarks.iter().filter(|b| *b != baseline) {
|
||||
let faster = b.stats.avg >= baseline.stats.avg;
|
||||
let diff = f64::from_str(&format!(
|
||||
"{:.2}",
|
||||
1.0 / baseline.stats.avg * b.stats.avg
|
||||
))
|
||||
.unwrap();
|
||||
let inv_diff = f64::from_str(&format!(
|
||||
"{:.2}",
|
||||
1.0 / b.stats.avg * baseline.stats.avg
|
||||
))
|
||||
.unwrap();
|
||||
s.push_str(&format!(
|
||||
"\n {}x {} than {}",
|
||||
let x_faster = precision_f64(
|
||||
if faster {
|
||||
colors::green(diff.to_string()).to_string()
|
||||
b.stats.avg / baseline.stats.avg
|
||||
} else {
|
||||
colors::red(inv_diff.to_string()).to_string()
|
||||
baseline.stats.avg / b.stats.avg
|
||||
},
|
||||
4,
|
||||
);
|
||||
let diff = if x_faster > 1000. {
|
||||
&format!("{:>9.0}", x_faster)
|
||||
} else {
|
||||
&format!("{:>9.2}", x_faster)
|
||||
};
|
||||
s.push_str(&format!(
|
||||
"\n{}x {} than {}",
|
||||
if faster {
|
||||
colors::green(diff)
|
||||
} else {
|
||||
colors::red(diff)
|
||||
},
|
||||
if faster { "faster" } else { "slower" },
|
||||
colors::cyan_bold(&b.name)
|
||||
|
@ -384,9 +380,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_avg_to_iter_per_s() {
|
||||
assert_eq!(avg_to_iter_per_s(55.85), "17,905,103.0");
|
||||
assert_eq!(avg_to_iter_per_s(55.85), "17,910,000");
|
||||
assert_eq!(avg_to_iter_per_s(64_870_000.0), "15.4");
|
||||
assert_eq!(avg_to_iter_per_s(104_370_000.0), "9.6");
|
||||
assert_eq!(avg_to_iter_per_s(640_000.0), "1,563");
|
||||
assert_eq!(avg_to_iter_per_s(6_400_000.0), "156.3");
|
||||
assert_eq!(avg_to_iter_per_s(46_890_000.0), "21.3");
|
||||
assert_eq!(avg_to_iter_per_s(100_000_000.0), "10.0");
|
||||
|
|
|
@ -152,11 +152,14 @@ impl BenchReporter for ConsoleReporter {
|
|||
.compare_exchange(true, false, Ordering::SeqCst, Ordering::SeqCst)
|
||||
.is_ok()
|
||||
{
|
||||
println!("{}", colors::gray(format!("cpu: {}", mitata::cpu::name())));
|
||||
println!(
|
||||
"{}",
|
||||
colors::gray(format!(" CPU | {}", mitata::cpu::name()))
|
||||
);
|
||||
println!(
|
||||
"{}\n",
|
||||
colors::gray(format!(
|
||||
"runtime: deno {} ({})",
|
||||
"Runtime | Deno {} ({})",
|
||||
crate::version::DENO_VERSION_INFO.deno,
|
||||
env!("TARGET")
|
||||
))
|
||||
|
@ -166,7 +169,7 @@ impl BenchReporter for ConsoleReporter {
|
|||
}
|
||||
|
||||
println!(
|
||||
"{}\n{}\n{}",
|
||||
"{}\n\n{}\n{}",
|
||||
colors::gray(&plan.origin),
|
||||
mitata::reporter::header(options),
|
||||
mitata::reporter::br(options)
|
||||
|
|
|
@ -1,164 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::Module;
|
||||
use deno_terminal::colors;
|
||||
|
||||
use crate::args::BundleFlags;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::error_for_any_npm_specifier;
|
||||
use crate::util;
|
||||
use crate::util::display;
|
||||
|
||||
pub async fn bundle(
|
||||
flags: Arc<Flags>,
|
||||
bundle_flags: BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
log::info!(
|
||||
"{}",
|
||||
colors::yellow("⚠️ Warning: `deno bundle` is deprecated and will be removed in Deno 2.0.\nUse an alternative bundler like \"deno_emit\", \"esbuild\" or \"rollup\" instead."),
|
||||
);
|
||||
|
||||
if let Some(watch_flags) = &bundle_flags.watch {
|
||||
util::file_watcher::watch_func(
|
||||
flags,
|
||||
util::file_watcher::PrintConfig::new(
|
||||
"Bundle",
|
||||
!watch_flags.no_clear_screen,
|
||||
),
|
||||
move |flags, watcher_communicator, _changed_paths| {
|
||||
let bundle_flags = bundle_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
watcher_communicator.clone(),
|
||||
);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn bundle_action(
|
||||
factory: CliFactory,
|
||||
bundle_flags: &BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = factory.cli_options()?;
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
log::debug!(">>>>> bundle START");
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let cli_options = factory.cli_options()?;
|
||||
|
||||
let graph = module_graph_creator
|
||||
.create_graph_and_maybe_check(vec![module_specifier.clone()])
|
||||
.await?;
|
||||
|
||||
let mut paths_to_watch: Vec<PathBuf> = graph
|
||||
.specifiers()
|
||||
.filter_map(|(_, r)| {
|
||||
r.ok().and_then(|module| match module {
|
||||
Module::Js(m) => m.specifier.to_file_path().ok(),
|
||||
Module::Json(m) => m.specifier.to_file_path().ok(),
|
||||
// nothing to watch
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Ok(Some(import_map_path)) = cli_options
|
||||
.resolve_specified_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
paths_to_watch.push(import_map_path);
|
||||
}
|
||||
|
||||
// at the moment, we don't support npm specifiers in deno bundle, so show an error
|
||||
error_for_any_npm_specifier(&graph)?;
|
||||
|
||||
let bundle_output = bundle_module_graph(graph.as_ref(), cli_options)?;
|
||||
log::debug!(">>>>> bundle END");
|
||||
let out_file = &bundle_flags.out_file;
|
||||
|
||||
if let Some(out_file) = out_file {
|
||||
let out_file = cli_options.initial_cwd().join(out_file);
|
||||
let output_bytes = bundle_output.code.as_bytes();
|
||||
let output_len = output_bytes.len();
|
||||
util::fs::write_file(&out_file, output_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
out_file,
|
||||
colors::gray(display::human_size(output_len as f64))
|
||||
);
|
||||
if let Some(bundle_map) = bundle_output.maybe_map {
|
||||
let map_bytes = bundle_map.as_bytes();
|
||||
let map_len = map_bytes.len();
|
||||
let ext = if let Some(curr_ext) = out_file.extension() {
|
||||
format!("{}.map", curr_ext.to_string_lossy())
|
||||
} else {
|
||||
"map".to_string()
|
||||
};
|
||||
let map_out_file = out_file.with_extension(ext);
|
||||
util::fs::write_file(&map_out_file, map_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
map_out_file,
|
||||
colors::gray(display::human_size(map_len as f64))
|
||||
);
|
||||
}
|
||||
} else {
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{}", bundle_output.code);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bundle_module_graph(
|
||||
graph: &deno_graph::ModuleGraph,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<deno_emit::BundleEmit, AnyError> {
|
||||
log::info!("{} {}", colors::green("Bundle"), graph.roots[0]);
|
||||
|
||||
let ts_config_result =
|
||||
cli_options.resolve_ts_config_for_emit(TsConfigType::Bundle)?;
|
||||
if !cli_options.type_check_mode().is_true() {
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
}
|
||||
|
||||
let (transpile_options, emit_options) =
|
||||
crate::args::ts_config_to_transpile_and_emit_options(
|
||||
ts_config_result.ts_config,
|
||||
)?;
|
||||
deno_emit::bundle_graph(
|
||||
graph,
|
||||
deno_emit::BundleOptions {
|
||||
minify: false,
|
||||
bundle_type: deno_emit::BundleType::Module,
|
||||
emit_options,
|
||||
emit_ignore_directives: true,
|
||||
transpile_options,
|
||||
},
|
||||
)
|
||||
}
|
|
@ -427,7 +427,7 @@ fn get_tsc_roots(
|
|||
|
||||
// now walk the graph that only includes the fast check dependencies
|
||||
while let Some(specifier) = pending.pop_front() {
|
||||
let Some(module) = graph.get(&specifier) else {
|
||||
let Some(module) = graph.get(specifier) else {
|
||||
continue;
|
||||
};
|
||||
if let Some(entry) = maybe_get_check_entry(module, check_js) {
|
||||
|
|
|
@ -403,15 +403,6 @@ pub fn format_html(
|
|||
let mut typescript_config =
|
||||
get_resolved_typescript_config(fmt_options);
|
||||
typescript_config.line_width = hints.print_width as u32;
|
||||
if hints.attr {
|
||||
typescript_config.quote_style = if let Some(true) =
|
||||
fmt_options.single_quote
|
||||
{
|
||||
dprint_plugin_typescript::configuration::QuoteStyle::AlwaysDouble
|
||||
} else {
|
||||
dprint_plugin_typescript::configuration::QuoteStyle::AlwaysSingle
|
||||
};
|
||||
}
|
||||
dprint_plugin_typescript::format_text(
|
||||
&path,
|
||||
text.to_string(),
|
||||
|
|
|
@ -440,7 +440,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
}
|
||||
|
||||
let root_specifier = self.graph.resolve(&self.graph.roots[0]);
|
||||
match self.graph.try_get(&root_specifier) {
|
||||
match self.graph.try_get(root_specifier) {
|
||||
Ok(Some(root)) => {
|
||||
let maybe_cache_info = match root {
|
||||
Module::Js(module) => module.maybe_cache_info.as_ref(),
|
||||
|
@ -694,9 +694,9 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
Resolution::Ok(resolved) => {
|
||||
let specifier = &resolved.specifier;
|
||||
let resolved_specifier = self.graph.resolve(specifier);
|
||||
Some(match self.graph.try_get(&resolved_specifier) {
|
||||
Some(match self.graph.try_get(resolved_specifier) {
|
||||
Ok(Some(module)) => self.build_module_info(module, type_dep),
|
||||
Err(err) => self.build_error_info(err, &resolved_specifier),
|
||||
Err(err) => self.build_error_info(err, resolved_specifier),
|
||||
Ok(None) => TreeNode::from_text(format!(
|
||||
"{} {}",
|
||||
colors::red(specifier),
|
||||
|
|
|
@ -37,7 +37,7 @@ const routes: Route[] = [
|
|||
},
|
||||
{
|
||||
pattern: new URLPattern({ pathname: "/static/*" }),
|
||||
handler: (req) => serveDir(req, { urlRoot: "./" }),
|
||||
handler: (req) => serveDir(req),
|
||||
},
|
||||
];
|
||||
|
||||
|
@ -52,7 +52,6 @@ export default {
|
|||
return handler(req);
|
||||
},
|
||||
} satisfies Deno.ServeDefaultExport;
|
||||
|
||||
"#,
|
||||
)?;
|
||||
create_file(
|
||||
|
@ -80,13 +79,23 @@ Deno.test(async function serverFetchUsers() {
|
|||
});
|
||||
|
||||
Deno.test(async function serverFetchStatic() {
|
||||
const req = new Request("https://deno.land/static/main.ts");
|
||||
const req = new Request("https://deno.land/static/hello.js");
|
||||
const res = await server.fetch(req);
|
||||
assertEquals(res.headers.get("content-type"), "text/plain;charset=UTF-8");
|
||||
assertEquals(await res.text(), 'console.log("Hello, world!");\n');
|
||||
assertEquals(res.headers.get("content-type"), "text/javascript; charset=UTF-8");
|
||||
});
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let static_dir = dir.join("static");
|
||||
std::fs::create_dir_all(&static_dir)?;
|
||||
create_file(
|
||||
&static_dir,
|
||||
"hello.js",
|
||||
r#"console.log("Hello, world!");
|
||||
"#,
|
||||
)?;
|
||||
|
||||
create_json_file(
|
||||
&dir,
|
||||
"deno.json",
|
||||
|
@ -203,7 +212,7 @@ Deno.test(function addTest() {
|
|||
info!(" deno task dev");
|
||||
info!("");
|
||||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno -R test");
|
||||
info!(" deno test -R");
|
||||
} else if init_flags.lib {
|
||||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno test");
|
||||
|
|
|
@ -275,7 +275,7 @@ async fn install_local(
|
|||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
crate::module_loader::load_top_level_deps(&factory).await?;
|
||||
crate::tools::registry::cache_top_level_deps(&factory, None).await?;
|
||||
|
||||
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
|
||||
lockfile.write_if_changed()?;
|
||||
|
@ -464,10 +464,6 @@ async fn resolve_shim_data(
|
|||
executable_args.push("--no-npm".to_string());
|
||||
}
|
||||
|
||||
if flags.lock_write {
|
||||
executable_args.push("--lock-write".to_string());
|
||||
}
|
||||
|
||||
if flags.cached_only {
|
||||
executable_args.push("--cached-only".to_string());
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
pub mod bench;
|
||||
pub mod bundle;
|
||||
pub mod check;
|
||||
pub mod clean;
|
||||
pub mod compile;
|
||||
|
@ -20,4 +19,3 @@ pub mod serve;
|
|||
pub mod task;
|
||||
pub mod test;
|
||||
pub mod upgrade;
|
||||
pub mod vendor;
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
use crate::http_util;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_fetch;
|
||||
use lsp_types::Url;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::http_util::HttpClient;
|
||||
|
|
|
@ -21,9 +21,9 @@ use deno_ast::SourceRanged;
|
|||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::FastCheckDiagnostic;
|
||||
use deno_semver::Version;
|
||||
use lsp_types::Url;
|
||||
|
||||
use super::unfurl::SpecifierUnfurlerDiagnostic;
|
||||
|
||||
|
|
|
@ -8,13 +8,13 @@ use deno_ast::ParsedSource;
|
|||
use deno_ast::SourceRangedForSpanned;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::ModuleEntryRef;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ResolutionResolved;
|
||||
use deno_graph::WalkOptions;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use lsp_types::Url;
|
||||
|
||||
use crate::cache::ParsedSourceCache;
|
||||
|
||||
|
|
|
@ -25,9 +25,9 @@ use deno_core::futures::StreamExt;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use deno_terminal::colors;
|
||||
use http_body_util::BodyExt;
|
||||
use lsp_types::Url;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sha2::Digest;
|
||||
|
@ -52,6 +52,7 @@ use crate::util::display::human_size;
|
|||
|
||||
mod api;
|
||||
mod auth;
|
||||
|
||||
mod diagnostics;
|
||||
mod graph;
|
||||
mod paths;
|
||||
|
@ -64,6 +65,7 @@ mod unfurl;
|
|||
use auth::get_auth_method;
|
||||
use auth::AuthMethod;
|
||||
pub use pm::add;
|
||||
pub use pm::cache_top_level_deps;
|
||||
pub use pm::remove;
|
||||
pub use pm::AddCommandName;
|
||||
use publish_order::PublishOrderGraph;
|
||||
|
@ -1047,7 +1049,8 @@ async fn publish_package(
|
|||
sha256: faster_hex::hex_string(&sha2::Sha256::digest(&meta_bytes)),
|
||||
},
|
||||
};
|
||||
let bundle = provenance::generate_provenance(http_client, subject).await?;
|
||||
let bundle =
|
||||
provenance::generate_provenance(http_client, vec![subject]).await?;
|
||||
|
||||
let tlog_entry = &bundle.verification_material.tlog_entries[0];
|
||||
log::info!("{}",
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
mod cache_deps;
|
||||
|
||||
pub use cache_deps::cache_top_level_deps;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -136,10 +140,10 @@ impl DenoOrPackageJson {
|
|||
),
|
||||
factory,
|
||||
)),
|
||||
(None, Some(package_json)) if options.enable_future_features() => {
|
||||
(None, Some(package_json)) => {
|
||||
Ok((DenoOrPackageJson::Npm(package_json.clone(), None), factory))
|
||||
}
|
||||
(None, Some(_) | None) => {
|
||||
(None, None) => {
|
||||
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
|
||||
.context("Failed to create deno.json file")?;
|
||||
drop(factory); // drop to prevent use
|
||||
|
@ -236,13 +240,16 @@ pub async fn add(
|
|||
|
||||
let package_futures = package_reqs
|
||||
.into_iter()
|
||||
.map(move |package_req| {
|
||||
find_package_and_select_version_for_req(
|
||||
jsr_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
package_req,
|
||||
)
|
||||
.boxed_local()
|
||||
.map({
|
||||
let jsr_resolver = jsr_resolver.clone();
|
||||
move |package_req| {
|
||||
find_package_and_select_version_for_req(
|
||||
jsr_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
package_req,
|
||||
)
|
||||
.boxed_local()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -350,7 +357,7 @@ pub async fn add(
|
|||
// make a new CliFactory to pick up the updated config file
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
// cache deps
|
||||
crate::module_loader::load_top_level_deps(&cli_factory).await?;
|
||||
cache_deps::cache_top_level_deps(&cli_factory, Some(jsr_resolver)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -597,7 +604,7 @@ pub async fn remove(
|
|||
// Update deno.lock
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
let cli_factory = CliFactory::from_flags(flags);
|
||||
crate::module_loader::load_top_level_deps(&cli_factory).await?;
|
||||
cache_deps::cache_top_level_deps(&cli_factory, None).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
115
cli/tools/registry/pm/cache_deps.rs
Normal file
115
cli/tools/registry/pm/cache_deps.rs
Normal file
|
@ -0,0 +1,115 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::graph_container::ModuleGraphUpdatePermit;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
||||
pub async fn cache_top_level_deps(
|
||||
factory: &CliFactory,
|
||||
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
|
||||
) -> Result<(), AnyError> {
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let cli_options = factory.cli_options()?;
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
if !npm_resolver.ensure_top_level_package_json_install().await? {
|
||||
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||
lockfile.error_if_changed()?;
|
||||
}
|
||||
|
||||
npm_resolver.cache_packages().await?;
|
||||
}
|
||||
}
|
||||
// cache as many entries in the import map as we can
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
let jsr_resolver = if let Some(resolver) = jsr_resolver {
|
||||
resolver
|
||||
} else {
|
||||
Arc::new(crate::jsr::JsrFetchResolver::new(
|
||||
factory.file_fetcher()?.clone(),
|
||||
))
|
||||
};
|
||||
|
||||
let mut roots = Vec::new();
|
||||
|
||||
let mut info_futures = FuturesUnordered::new();
|
||||
|
||||
let mut seen_reqs = std::collections::HashSet::new();
|
||||
|
||||
for entry in import_map.imports().entries() {
|
||||
let Some(specifier) = entry.value else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match specifier.scheme() {
|
||||
"jsr" => {
|
||||
let specifier_str = specifier.as_str();
|
||||
let specifier_str =
|
||||
specifier_str.strip_prefix("jsr:").unwrap_or(specifier_str);
|
||||
if let Ok(req) = PackageReq::from_str(specifier_str) {
|
||||
if !seen_reqs.insert(req.clone()) {
|
||||
continue;
|
||||
}
|
||||
let jsr_resolver = jsr_resolver.clone();
|
||||
info_futures.push(async move {
|
||||
if let Some(nv) = jsr_resolver.req_to_nv(&req).await {
|
||||
if let Some(info) = jsr_resolver.package_version_info(&nv).await
|
||||
{
|
||||
return Some((specifier.clone(), info));
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
}
|
||||
}
|
||||
"npm" => roots.push(specifier.clone()),
|
||||
_ => {
|
||||
if entry.key.ends_with('/') && specifier.as_str().ends_with('/') {
|
||||
continue;
|
||||
}
|
||||
roots.push(specifier.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(info_future) = info_futures.next().await {
|
||||
if let Some((specifier, info)) = info_future {
|
||||
if info.export(".").is_some() {
|
||||
roots.push(specifier.clone());
|
||||
continue;
|
||||
}
|
||||
let exports = info.exports();
|
||||
for (k, _) in exports {
|
||||
if let Ok(spec) = specifier.join(k) {
|
||||
roots.push(spec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut graph_permit = factory
|
||||
.main_module_graph_container()
|
||||
.await?
|
||||
.acquire_update_permit()
|
||||
.await;
|
||||
let graph = graph_permit.graph_mut();
|
||||
factory
|
||||
.module_load_preparer()
|
||||
.await?
|
||||
.prepare_module_load(
|
||||
graph,
|
||||
&roots,
|
||||
false,
|
||||
deno_config::deno_json::TsTypeLib::DenoWorker,
|
||||
deno_runtime::deno_permissions::PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -229,16 +229,16 @@ impl Predicate {
|
|||
struct ProvenanceAttestation {
|
||||
#[serde(rename = "type")]
|
||||
_type: &'static str,
|
||||
subject: Subject,
|
||||
subject: Vec<Subject>,
|
||||
predicate_type: &'static str,
|
||||
predicate: Predicate,
|
||||
}
|
||||
|
||||
impl ProvenanceAttestation {
|
||||
pub fn new_github_actions(subject: Subject) -> Self {
|
||||
pub fn new_github_actions(subjects: Vec<Subject>) -> Self {
|
||||
Self {
|
||||
_type: INTOTO_STATEMENT_TYPE,
|
||||
subject,
|
||||
subject: subjects,
|
||||
predicate_type: SLSA_PREDICATE_TYPE,
|
||||
predicate: Predicate::new_github_actions(),
|
||||
}
|
||||
|
@ -296,7 +296,7 @@ pub struct ProvenanceBundle {
|
|||
|
||||
pub async fn generate_provenance(
|
||||
http_client: &HttpClient,
|
||||
subject: Subject,
|
||||
subjects: Vec<Subject>,
|
||||
) -> Result<ProvenanceBundle, AnyError> {
|
||||
if !is_gha() {
|
||||
bail!("Automatic provenance is only available in GitHub Actions");
|
||||
|
@ -308,7 +308,7 @@ pub async fn generate_provenance(
|
|||
);
|
||||
};
|
||||
|
||||
let slsa = ProvenanceAttestation::new_github_actions(subject);
|
||||
let slsa = ProvenanceAttestation::new_github_actions(subjects);
|
||||
|
||||
let attestation = serde_json::to_string(&slsa)?;
|
||||
let bundle = attest(http_client, &attestation, INTOTO_PAYLOAD_TYPE).await?;
|
||||
|
@ -738,8 +738,13 @@ mod tests {
|
|||
sha256: "yourmom".to_string(),
|
||||
},
|
||||
};
|
||||
let slsa = ProvenanceAttestation::new_github_actions(subject);
|
||||
assert_eq!(slsa.subject.name, "jsr:@divy/sdl2@0.0.1");
|
||||
assert_eq!(slsa.subject.digest.sha256, "yourmom");
|
||||
let slsa = ProvenanceAttestation::new_github_actions(vec![subject]);
|
||||
assert_eq!(
|
||||
slsa.subject.len(),
|
||||
1,
|
||||
"Subject should be an array per the in-toto specification"
|
||||
);
|
||||
assert_eq!(slsa.subject[0].name, "jsr:@divy/sdl2@0.0.1");
|
||||
assert_eq!(slsa.subject[0].digest.sha256, "yourmom");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ impl SpecifierUnfurler {
|
|||
/// or `false` when the import was not analyzable.
|
||||
fn try_unfurl_dynamic_dep(
|
||||
&self,
|
||||
module_url: &lsp_types::Url,
|
||||
module_url: &ModuleSpecifier,
|
||||
text_info: &SourceTextInfo,
|
||||
dep: &deno_graph::DynamicDependencyDescriptor,
|
||||
text_changes: &mut Vec<deno_ast::TextChange>,
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
|
@ -194,7 +195,9 @@ pub async fn eval_command(
|
|||
pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
|
||||
// ensure an "npm install" is done if the user has explicitly
|
||||
// opted into using a managed node_modules directory
|
||||
if factory.cli_options()?.node_modules_dir_enablement() == Some(true) {
|
||||
if factory.cli_options()?.node_modules_dir()?
|
||||
== Some(NodeModulesDirMode::Auto)
|
||||
{
|
||||
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
|
|
|
@ -29,13 +29,24 @@ use std::sync::Arc;
|
|||
pub async fn execute_script(
|
||||
flags: Arc<Flags>,
|
||||
task_flags: TaskFlags,
|
||||
using_run: bool,
|
||||
) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let start_dir = &cli_options.start_dir;
|
||||
if !start_dir.has_deno_or_pkg_json() {
|
||||
bail!("deno task couldn't find deno.json(c). See https://docs.deno.com/go/config")
|
||||
if task_flags.is_run {
|
||||
bail!(
|
||||
r#"deno run couldn't find deno.json(c).
|
||||
If you meant to run a script, specify it, e.g., `deno run ./script.ts`.
|
||||
To run a task, ensure the config file exists.
|
||||
Examples:
|
||||
- Script: `deno run ./script.ts`
|
||||
- Task: `deno run dev`
|
||||
See https://docs.deno.com/go/config"#
|
||||
)
|
||||
} else {
|
||||
bail!("deno task couldn't find deno.json(c). See https://docs.deno.com/go/config")
|
||||
}
|
||||
}
|
||||
let force_use_pkg_json =
|
||||
std::env::var_os(crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
|
||||
|
@ -142,7 +153,7 @@ pub async fn execute_script(
|
|||
}
|
||||
},
|
||||
None => {
|
||||
if using_run {
|
||||
if task_flags.is_run {
|
||||
return Err(anyhow!("Task not found: {}", task_name));
|
||||
}
|
||||
log::error!("Task not found: {}", task_name);
|
||||
|
|
|
@ -320,13 +320,11 @@ pub const OP_DETAILS: phf::Map<&'static str, [&'static str; 2]> = phf_map! {
|
|||
"op_fs_events_poll" => ["get the next file system event", "breaking out of a for await loop looping over `Deno.FsEvents`"],
|
||||
"op_fs_fdatasync_async" => ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fdatasync` or `Deno.FsFile.syncData` call"],
|
||||
"op_fs_file_stat_async" => ["get file metadata", "awaiting the result of a `Deno.fstat` or `Deno.FsFile.stat` call"],
|
||||
"op_fs_flock_async_unstable" => ["lock a file", "awaiting the result of a `Deno.flock` call"],
|
||||
"op_fs_flock_async" => ["lock a file", "awaiting the result of a `Deno.FsFile.lock` call"],
|
||||
"op_fs_fsync_async" => ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fsync` or `Deno.FsFile.sync` call"],
|
||||
"op_fs_ftruncate_async" => ["truncate a file", "awaiting the result of a `Deno.ftruncate` or `Deno.FsFile.truncate` call"],
|
||||
"op_fs_funlock_async_unstable" => ["unlock a file", "awaiting the result of a `Deno.funlock` call"],
|
||||
"op_fs_funlock_async" => ["unlock a file", "awaiting the result of a `Deno.FsFile.unlock` call"],
|
||||
"op_fs_futime_async" => ["change file timestamps", "awaiting the result of a `Deno.futime` or `Deno.FsFile.utime` call"],
|
||||
"op_fs_link_async" => ["create a hard link", "awaiting the result of a `Deno.link` call"],
|
||||
"op_fs_lstat_async" => ["get file metadata", "awaiting the result of a `Deno.lstat` call"],
|
||||
"op_fs_make_temp_dir_async" => ["create a temporary directory", "awaiting the result of a `Deno.makeTempDir` call"],
|
||||
|
|
|
@ -1778,7 +1778,8 @@ pub async fn run_tests(
|
|||
)
|
||||
.await?;
|
||||
|
||||
if !workspace_test_options.allow_none && specifiers_with_mode.is_empty() {
|
||||
if !workspace_test_options.permit_no_files && specifiers_with_mode.is_empty()
|
||||
{
|
||||
return Err(generic_error("No test modules found"));
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ use crate::util::progress_bar::ProgressBarStyle;
|
|||
use crate::version;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use color_print::cstr;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -37,6 +38,8 @@ const RELEASE_URL: &str = "https://github.com/denoland/deno/releases";
|
|||
const CANARY_URL: &str = "https://dl.deno.land/canary";
|
||||
const RC_URL: &str = "https://dl.deno.land/release";
|
||||
|
||||
static EXAMPLE_USAGE: &str = cstr!("Example usage:\n <p(245)>deno upgrade | deno upgrade 1.46 | deno upgrade canary</>");
|
||||
|
||||
pub static ARCHIVE_NAME: Lazy<String> =
|
||||
Lazy::new(|| format!("deno-{}.zip", env!("TARGET")));
|
||||
|
||||
|
@ -226,15 +229,70 @@ impl<
|
|||
}
|
||||
}
|
||||
|
||||
fn get_minor_version(version: &str) -> &str {
|
||||
version.rsplitn(2, '.').collect::<Vec<&str>>()[1]
|
||||
fn get_minor_version_blog_post_url(semver: &Version) -> String {
|
||||
format!("https://deno.com/blog/v{}.{}", semver.major, semver.minor)
|
||||
}
|
||||
|
||||
fn print_release_notes(current_version: &str, new_version: &str) {
|
||||
// TODO(bartlomieju): we might want to reconsider this one for RC releases.
|
||||
// TODO(bartlomieju): also maybe just parse using `Version::standard` instead
|
||||
// of using `get_minor_version`?
|
||||
if get_minor_version(current_version) == get_minor_version(new_version) {
|
||||
fn get_rc_version_blog_post_url(semver: &Version) -> String {
|
||||
format!(
|
||||
"https://deno.com/blog/v{}.{}-rc-{}",
|
||||
semver.major, semver.minor, semver.pre[1]
|
||||
)
|
||||
}
|
||||
|
||||
async fn print_release_notes(
|
||||
current_version: &str,
|
||||
new_version: &str,
|
||||
client: &HttpClient,
|
||||
) {
|
||||
let Ok(current_semver) = Version::parse_standard(current_version) else {
|
||||
return;
|
||||
};
|
||||
let Ok(new_semver) = Version::parse_standard(new_version) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let is_switching_from_deno1_to_deno2 =
|
||||
new_semver.major == 2 && current_semver.major == 1;
|
||||
let is_deno_2_rc = new_semver.major == 2
|
||||
&& new_semver.minor == 0
|
||||
&& new_semver.patch == 0
|
||||
&& new_semver.pre.first() == Some(&"rc".to_string());
|
||||
|
||||
if is_deno_2_rc || is_switching_from_deno1_to_deno2 {
|
||||
log::info!(
|
||||
"{}\n\n {}\n",
|
||||
colors::gray("Migration guide:"),
|
||||
colors::bold(
|
||||
"https://docs.deno.com/runtime/manual/advanced/migrate_deprecations"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if is_deno_2_rc {
|
||||
log::info!(
|
||||
"{}\n\n {}\n",
|
||||
colors::gray("If you find a bug, please report to:"),
|
||||
colors::bold("https://github.com/denoland/deno/issues/new")
|
||||
);
|
||||
|
||||
// Check if there's blog post entry for this release
|
||||
let blog_url_str = get_rc_version_blog_post_url(&new_semver);
|
||||
let blog_url = Url::parse(&blog_url_str).unwrap();
|
||||
if client.download(blog_url).await.is_ok() {
|
||||
log::info!(
|
||||
"{}\n\n {}\n",
|
||||
colors::gray("Blog post:"),
|
||||
colors::bold(blog_url_str)
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let should_print = current_semver.major != new_semver.major
|
||||
|| current_semver.minor != new_semver.minor;
|
||||
|
||||
if !should_print {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -249,10 +307,7 @@ fn print_release_notes(current_version: &str, new_version: &str) {
|
|||
log::info!(
|
||||
"{}\n\n {}\n",
|
||||
colors::gray("Blog post:"),
|
||||
colors::bold(format!(
|
||||
"https://deno.com/blog/v{}",
|
||||
get_minor_version(new_version)
|
||||
))
|
||||
colors::bold(get_minor_version_blog_post_url(&new_semver))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -320,14 +375,14 @@ pub fn check_for_upgrades(
|
|||
log::info!(
|
||||
"{} {}",
|
||||
colors::green("A new canary release of Deno is available."),
|
||||
colors::italic_gray("Run `deno upgrade --canary` to install it.")
|
||||
colors::italic_gray("Run `deno upgrade canary` to install it.")
|
||||
);
|
||||
}
|
||||
ReleaseChannel::Rc => {
|
||||
log::info!(
|
||||
"{} {}",
|
||||
colors::green("A new release candidate of Deno is available."),
|
||||
colors::italic_gray("Run `deno upgrade --rc` to install it.")
|
||||
colors::italic_gray("Run `deno upgrade rc` to install it.")
|
||||
);
|
||||
}
|
||||
// TODO(bartlomieju)
|
||||
|
@ -512,7 +567,9 @@ pub async fn upgrade(
|
|||
print_release_notes(
|
||||
version::DENO_VERSION_INFO.deno,
|
||||
&selected_version_to_upgrade.version_or_hash,
|
||||
);
|
||||
&client,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
drop(temp_dir);
|
||||
return Ok(());
|
||||
|
@ -540,7 +597,9 @@ pub async fn upgrade(
|
|||
print_release_notes(
|
||||
version::DENO_VERSION_INFO.deno,
|
||||
&selected_version_to_upgrade.version_or_hash,
|
||||
);
|
||||
&client,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
drop(temp_dir); // delete the temp dir
|
||||
|
@ -591,12 +650,20 @@ impl RequestedVersion {
|
|||
|
||||
let (channel, passed_version) = if is_canary {
|
||||
if !re_hash.is_match(&passed_version) {
|
||||
bail!("Invalid commit hash passed");
|
||||
bail!(
|
||||
"Invalid commit hash passed ({})\n\n{}",
|
||||
colors::gray(passed_version),
|
||||
EXAMPLE_USAGE
|
||||
);
|
||||
}
|
||||
(ReleaseChannel::Canary, passed_version)
|
||||
} else {
|
||||
let Ok(semver) = Version::parse_standard(&passed_version) else {
|
||||
bail!("Invalid version passed");
|
||||
bail!(
|
||||
"Invalid version passed ({})\n\n{}",
|
||||
colors::gray(passed_version),
|
||||
EXAMPLE_USAGE
|
||||
);
|
||||
};
|
||||
|
||||
if semver.pre.contains(&"rc".to_string()) {
|
||||
|
@ -972,8 +1039,13 @@ fn check_exe(exe_path: &Path) -> Result<(), AnyError> {
|
|||
.arg("-V")
|
||||
.stderr(std::process::Stdio::inherit())
|
||||
.output()?;
|
||||
assert!(output.status.success());
|
||||
Ok(())
|
||||
if !output.status.success() {
|
||||
bail!(
|
||||
"Failed to validate Deno executable. This may be because your OS is unsupported or the executable is corrupted"
|
||||
)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -1687,4 +1759,31 @@ mod test {
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn blog_post_links() {
|
||||
let version = Version::parse_standard("1.46.0").unwrap();
|
||||
assert_eq!(
|
||||
get_minor_version_blog_post_url(&version),
|
||||
"https://deno.com/blog/v1.46"
|
||||
);
|
||||
|
||||
let version = Version::parse_standard("2.1.1").unwrap();
|
||||
assert_eq!(
|
||||
get_minor_version_blog_post_url(&version),
|
||||
"https://deno.com/blog/v2.1"
|
||||
);
|
||||
|
||||
let version = Version::parse_standard("2.0.0-rc.0").unwrap();
|
||||
assert_eq!(
|
||||
get_rc_version_blog_post_url(&version),
|
||||
"https://deno.com/blog/v2.0-rc-0"
|
||||
);
|
||||
|
||||
let version = Version::parse_standard("2.0.0-rc.2").unwrap();
|
||||
assert_eq!(
|
||||
get_rc_version_blog_post_url(&version),
|
||||
"https://deno.com/blog/v2.0-rc-2"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
113
cli/tools/vendor/analyze.rs
vendored
113
cli/tools/vendor/analyze.rs
vendored
|
@ -1,113 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::swc::ast::ExportDefaultDecl;
|
||||
use deno_ast::swc::ast::ExportSpecifier;
|
||||
use deno_ast::swc::ast::ModuleExportName;
|
||||
use deno_ast::swc::ast::NamedExport;
|
||||
use deno_ast::swc::ast::Program;
|
||||
use deno_ast::swc::visit::noop_visit_type;
|
||||
use deno_ast::swc::visit::Visit;
|
||||
use deno_ast::swc::visit::VisitWith;
|
||||
use deno_ast::ParsedSource;
|
||||
|
||||
/// Gets if the parsed source has a default export.
|
||||
pub fn has_default_export(source: &ParsedSource) -> bool {
|
||||
let mut visitor = DefaultExportFinder {
|
||||
has_default_export: false,
|
||||
};
|
||||
let program = source.program();
|
||||
let program: &Program = &program;
|
||||
program.visit_with(&mut visitor);
|
||||
visitor.has_default_export
|
||||
}
|
||||
|
||||
struct DefaultExportFinder {
|
||||
has_default_export: bool,
|
||||
}
|
||||
|
||||
impl Visit for DefaultExportFinder {
|
||||
noop_visit_type!();
|
||||
|
||||
fn visit_export_default_decl(&mut self, _: &ExportDefaultDecl) {
|
||||
self.has_default_export = true;
|
||||
}
|
||||
|
||||
fn visit_named_export(&mut self, named_export: &NamedExport) {
|
||||
if named_export
|
||||
.specifiers
|
||||
.iter()
|
||||
.any(export_specifier_has_default)
|
||||
{
|
||||
self.has_default_export = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn export_specifier_has_default(s: &ExportSpecifier) -> bool {
|
||||
match s {
|
||||
ExportSpecifier::Default(_) => true,
|
||||
ExportSpecifier::Namespace(_) => false,
|
||||
ExportSpecifier::Named(named) => {
|
||||
let export_name = named.exported.as_ref().unwrap_or(&named.orig);
|
||||
|
||||
match export_name {
|
||||
ModuleExportName::Str(_) => false,
|
||||
ModuleExportName::Ident(ident) => &*ident.sym == "default",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::ParseParams;
|
||||
use deno_ast::ParsedSource;
|
||||
|
||||
use super::has_default_export;
|
||||
|
||||
#[test]
|
||||
fn has_default_when_export_default_decl() {
|
||||
let parsed_source = parse_module("export default class Class {}");
|
||||
assert!(has_default_export(&parsed_source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn has_default_when_named_export() {
|
||||
let parsed_source = parse_module("export {default} from './test.ts';");
|
||||
assert!(has_default_export(&parsed_source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn has_default_when_named_export_alias() {
|
||||
let parsed_source =
|
||||
parse_module("export {test as default} from './test.ts';");
|
||||
assert!(has_default_export(&parsed_source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_has_default_when_named_export_not_exported() {
|
||||
let parsed_source =
|
||||
parse_module("export {default as test} from './test.ts';");
|
||||
assert!(!has_default_export(&parsed_source));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_has_default_when_not() {
|
||||
let parsed_source = parse_module("export {test} from './test.ts'; export class Test{} export * from './test';");
|
||||
assert!(!has_default_export(&parsed_source));
|
||||
}
|
||||
|
||||
fn parse_module(text: &str) -> ParsedSource {
|
||||
deno_ast::parse_module(ParseParams {
|
||||
specifier: ModuleSpecifier::parse("file:///mod.ts").unwrap(),
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
scope_analysis: false,
|
||||
text: text.into(),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
1330
cli/tools/vendor/build.rs
vendored
1330
cli/tools/vendor/build.rs
vendored
File diff suppressed because it is too large
Load diff
508
cli/tools/vendor/import_map.rs
vendored
508
cli/tools/vendor/import_map.rs
vendored
|
@ -1,508 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_ast::LineAndColumnIndex;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Position;
|
||||
use deno_graph::Range;
|
||||
use deno_graph::Resolution;
|
||||
use import_map::ImportMap;
|
||||
use import_map::SpecifierMap;
|
||||
use indexmap::IndexMap;
|
||||
use log::warn;
|
||||
|
||||
use crate::args::JsxImportSourceConfig;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
|
||||
use super::mappings::Mappings;
|
||||
use super::specifiers::is_remote_specifier;
|
||||
use super::specifiers::is_remote_specifier_text;
|
||||
|
||||
struct ImportMapBuilder<'a> {
|
||||
base_dir: &'a ModuleSpecifier,
|
||||
mappings: &'a Mappings,
|
||||
imports: ImportsBuilder<'a>,
|
||||
scopes: IndexMap<String, ImportsBuilder<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> ImportMapBuilder<'a> {
|
||||
pub fn new(base_dir: &'a ModuleSpecifier, mappings: &'a Mappings) -> Self {
|
||||
ImportMapBuilder {
|
||||
base_dir,
|
||||
mappings,
|
||||
imports: ImportsBuilder::new(base_dir, mappings),
|
||||
scopes: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn base_dir(&self) -> &ModuleSpecifier {
|
||||
self.base_dir
|
||||
}
|
||||
|
||||
pub fn scope(
|
||||
&mut self,
|
||||
base_specifier: &ModuleSpecifier,
|
||||
) -> &mut ImportsBuilder<'a> {
|
||||
self
|
||||
.scopes
|
||||
.entry(
|
||||
self
|
||||
.mappings
|
||||
.relative_specifier_text(self.base_dir, base_specifier),
|
||||
)
|
||||
.or_insert_with(|| ImportsBuilder::new(self.base_dir, self.mappings))
|
||||
}
|
||||
|
||||
pub fn into_import_map(
|
||||
self,
|
||||
maybe_original_import_map: Option<&ImportMap>,
|
||||
) -> ImportMap {
|
||||
fn get_local_imports(
|
||||
new_relative_path: &str,
|
||||
original_imports: &SpecifierMap,
|
||||
) -> Vec<(String, String)> {
|
||||
let mut result = Vec::new();
|
||||
for entry in original_imports.entries() {
|
||||
if let Some(raw_value) = entry.raw_value {
|
||||
if raw_value.starts_with("./") || raw_value.starts_with("../") {
|
||||
let sub_index = raw_value.find('/').unwrap() + 1;
|
||||
result.push((
|
||||
entry.raw_key.to_string(),
|
||||
format!("{}{}", new_relative_path, &raw_value[sub_index..]),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn add_local_imports<'a>(
|
||||
new_relative_path: &str,
|
||||
original_imports: &SpecifierMap,
|
||||
get_new_imports: impl FnOnce() -> &'a mut SpecifierMap,
|
||||
) {
|
||||
let local_imports =
|
||||
get_local_imports(new_relative_path, original_imports);
|
||||
if !local_imports.is_empty() {
|
||||
let new_imports = get_new_imports();
|
||||
for (key, value) in local_imports {
|
||||
if let Err(warning) = new_imports.append(key, value) {
|
||||
warn!("{}", warning);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut import_map = ImportMap::new(self.base_dir.clone());
|
||||
|
||||
if let Some(original_im) = maybe_original_import_map {
|
||||
let original_base_dir = ModuleSpecifier::from_directory_path(
|
||||
original_im
|
||||
.base_url()
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
let new_relative_path = self
|
||||
.mappings
|
||||
.relative_specifier_text(self.base_dir, &original_base_dir);
|
||||
// add the imports
|
||||
add_local_imports(&new_relative_path, original_im.imports(), || {
|
||||
import_map.imports_mut()
|
||||
});
|
||||
|
||||
for scope in original_im.scopes() {
|
||||
if scope.raw_key.starts_with("./") || scope.raw_key.starts_with("../") {
|
||||
let sub_index = scope.raw_key.find('/').unwrap() + 1;
|
||||
let new_key =
|
||||
format!("{}{}", new_relative_path, &scope.raw_key[sub_index..]);
|
||||
add_local_imports(&new_relative_path, scope.imports, || {
|
||||
import_map.get_or_append_scope_mut(&new_key).unwrap()
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let imports = import_map.imports_mut();
|
||||
for (key, value) in self.imports.imports {
|
||||
if !imports.contains(&key) {
|
||||
imports.append(key, value).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
for (scope_key, scope_value) in self.scopes {
|
||||
if !scope_value.imports.is_empty() {
|
||||
let imports = import_map.get_or_append_scope_mut(&scope_key).unwrap();
|
||||
for (key, value) in scope_value.imports {
|
||||
if !imports.contains(&key) {
|
||||
imports.append(key, value).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import_map
|
||||
}
|
||||
}
|
||||
|
||||
struct ImportsBuilder<'a> {
|
||||
base_dir: &'a ModuleSpecifier,
|
||||
mappings: &'a Mappings,
|
||||
imports: IndexMap<String, String>,
|
||||
}
|
||||
|
||||
impl<'a> ImportsBuilder<'a> {
|
||||
pub fn new(base_dir: &'a ModuleSpecifier, mappings: &'a Mappings) -> Self {
|
||||
Self {
|
||||
base_dir,
|
||||
mappings,
|
||||
imports: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, key: String, specifier: &ModuleSpecifier) {
|
||||
let value = self
|
||||
.mappings
|
||||
.relative_specifier_text(self.base_dir, specifier);
|
||||
|
||||
// skip creating identity entries
|
||||
if key != value {
|
||||
self.imports.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BuildImportMapInput<'a> {
|
||||
pub base_dir: &'a ModuleSpecifier,
|
||||
pub modules: &'a [&'a Module],
|
||||
pub graph: &'a ModuleGraph,
|
||||
pub mappings: &'a Mappings,
|
||||
pub maybe_original_import_map: Option<&'a ImportMap>,
|
||||
pub maybe_jsx_import_source: Option<&'a JsxImportSourceConfig>,
|
||||
pub resolver: &'a dyn deno_graph::source::Resolver,
|
||||
pub parsed_source_cache: &'a ParsedSourceCache,
|
||||
}
|
||||
|
||||
pub fn build_import_map(
|
||||
input: BuildImportMapInput<'_>,
|
||||
) -> Result<String, AnyError> {
|
||||
let BuildImportMapInput {
|
||||
base_dir,
|
||||
modules,
|
||||
graph,
|
||||
mappings,
|
||||
maybe_original_import_map,
|
||||
maybe_jsx_import_source,
|
||||
resolver,
|
||||
parsed_source_cache,
|
||||
} = input;
|
||||
let mut builder = ImportMapBuilder::new(base_dir, mappings);
|
||||
visit_modules(graph, modules, mappings, &mut builder, parsed_source_cache)?;
|
||||
|
||||
for base_specifier in mappings.base_specifiers() {
|
||||
builder
|
||||
.imports
|
||||
.add(base_specifier.to_string(), base_specifier);
|
||||
}
|
||||
|
||||
// add the jsx import source to the destination import map, if mapped in the original import map
|
||||
if let Some(jsx_import_source) = maybe_jsx_import_source {
|
||||
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
|
||||
if let Ok(resolved_url) = resolver.resolve(
|
||||
&specifier_text,
|
||||
&deno_graph::Range {
|
||||
specifier: jsx_import_source.base_url.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
ResolutionMode::Execution,
|
||||
) {
|
||||
builder.imports.add(specifier_text, &resolved_url);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(builder.into_import_map(maybe_original_import_map).to_json())
|
||||
}
|
||||
|
||||
fn visit_modules(
|
||||
graph: &ModuleGraph,
|
||||
modules: &[&Module],
|
||||
mappings: &Mappings,
|
||||
import_map: &mut ImportMapBuilder,
|
||||
parsed_source_cache: &ParsedSourceCache,
|
||||
) -> Result<(), AnyError> {
|
||||
for module in modules {
|
||||
let module = match module {
|
||||
Module::Js(module) => module,
|
||||
// skip visiting Json modules as they are leaves
|
||||
Module::Json(_)
|
||||
| Module::Npm(_)
|
||||
| Module::Node(_)
|
||||
| Module::External(_) => continue,
|
||||
};
|
||||
|
||||
let parsed_source =
|
||||
parsed_source_cache.get_parsed_source_from_js_module(module)?;
|
||||
let text_info = parsed_source.text_info_lazy().clone();
|
||||
|
||||
for dep in module.dependencies.values() {
|
||||
visit_resolution(
|
||||
&dep.maybe_code,
|
||||
graph,
|
||||
import_map,
|
||||
&module.specifier,
|
||||
mappings,
|
||||
&text_info,
|
||||
&module.source,
|
||||
);
|
||||
visit_resolution(
|
||||
&dep.maybe_type,
|
||||
graph,
|
||||
import_map,
|
||||
&module.specifier,
|
||||
mappings,
|
||||
&text_info,
|
||||
&module.source,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(types_dep) = &module.maybe_types_dependency {
|
||||
visit_resolution(
|
||||
&types_dep.dependency,
|
||||
graph,
|
||||
import_map,
|
||||
&module.specifier,
|
||||
mappings,
|
||||
&text_info,
|
||||
&module.source,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn visit_resolution(
|
||||
resolution: &Resolution,
|
||||
graph: &ModuleGraph,
|
||||
import_map: &mut ImportMapBuilder,
|
||||
referrer: &ModuleSpecifier,
|
||||
mappings: &Mappings,
|
||||
text_info: &SourceTextInfo,
|
||||
source_text: &str,
|
||||
) {
|
||||
if let Some(resolved) = resolution.ok() {
|
||||
let text = text_from_range(text_info, source_text, &resolved.range);
|
||||
// if the text is empty then it's probably an x-TypeScript-types
|
||||
if !text.is_empty() {
|
||||
handle_dep_specifier(
|
||||
text,
|
||||
&resolved.specifier,
|
||||
graph,
|
||||
import_map,
|
||||
referrer,
|
||||
mappings,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_dep_specifier(
|
||||
text: &str,
|
||||
unresolved_specifier: &ModuleSpecifier,
|
||||
graph: &ModuleGraph,
|
||||
import_map: &mut ImportMapBuilder,
|
||||
referrer: &ModuleSpecifier,
|
||||
mappings: &Mappings,
|
||||
) {
|
||||
let specifier = match graph.get(unresolved_specifier) {
|
||||
Some(module) => module.specifier().clone(),
|
||||
// Ignore when None. The graph was previous validated so this is a
|
||||
// dynamic import that was missing and is ignored for vendoring
|
||||
None => return,
|
||||
};
|
||||
// check if it's referencing a remote module
|
||||
if is_remote_specifier(&specifier) {
|
||||
handle_remote_dep_specifier(
|
||||
text,
|
||||
unresolved_specifier,
|
||||
&specifier,
|
||||
import_map,
|
||||
referrer,
|
||||
mappings,
|
||||
)
|
||||
} else if specifier.scheme() == "file" {
|
||||
handle_local_dep_specifier(
|
||||
text,
|
||||
unresolved_specifier,
|
||||
&specifier,
|
||||
import_map,
|
||||
referrer,
|
||||
mappings,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_remote_dep_specifier(
|
||||
text: &str,
|
||||
unresolved_specifier: &ModuleSpecifier,
|
||||
specifier: &ModuleSpecifier,
|
||||
import_map: &mut ImportMapBuilder,
|
||||
referrer: &ModuleSpecifier,
|
||||
mappings: &Mappings,
|
||||
) {
|
||||
if is_remote_specifier_text(text) {
|
||||
let base_specifier = mappings.base_specifier(specifier);
|
||||
if text.starts_with(base_specifier.as_str()) {
|
||||
let sub_path = &text[base_specifier.as_str().len()..];
|
||||
let relative_text =
|
||||
mappings.relative_specifier_text(base_specifier, specifier);
|
||||
let expected_sub_path = relative_text.trim_start_matches("./");
|
||||
if expected_sub_path != sub_path {
|
||||
import_map.imports.add(text.to_string(), specifier);
|
||||
}
|
||||
} else {
|
||||
// it's probably a redirect. Add it explicitly to the import map
|
||||
import_map.imports.add(text.to_string(), specifier);
|
||||
}
|
||||
} else {
|
||||
let expected_relative_specifier_text =
|
||||
mappings.relative_specifier_text(referrer, specifier);
|
||||
if expected_relative_specifier_text == text {
|
||||
return;
|
||||
}
|
||||
|
||||
if !is_remote_specifier(referrer) {
|
||||
// local module referencing a remote module using
|
||||
// non-remote specifier text means it was something in
|
||||
// the original import map, so add a mapping to it
|
||||
import_map.imports.add(text.to_string(), specifier);
|
||||
return;
|
||||
}
|
||||
|
||||
let base_referrer = mappings.base_specifier(referrer);
|
||||
let base_dir = import_map.base_dir().clone();
|
||||
let imports = import_map.scope(base_referrer);
|
||||
if text.starts_with("./") || text.starts_with("../") {
|
||||
// resolve relative specifier key
|
||||
let mut local_base_specifier = mappings.local_uri(base_referrer);
|
||||
local_base_specifier = local_base_specifier
|
||||
// path includes "/" so make it relative
|
||||
.join(&format!(".{}", unresolved_specifier.path()))
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Error joining {} to {}",
|
||||
unresolved_specifier.path(),
|
||||
local_base_specifier
|
||||
)
|
||||
});
|
||||
local_base_specifier.set_query(unresolved_specifier.query());
|
||||
|
||||
imports.add(
|
||||
mappings.relative_specifier_text(&base_dir, &local_base_specifier),
|
||||
specifier,
|
||||
);
|
||||
|
||||
// add a mapping that uses the local directory name and the remote
|
||||
// filename in order to support files importing this relatively
|
||||
imports.add(
|
||||
{
|
||||
let local_path = mappings.local_path(specifier);
|
||||
let mut value =
|
||||
ModuleSpecifier::from_directory_path(local_path.parent().unwrap())
|
||||
.unwrap();
|
||||
value.set_query(specifier.query());
|
||||
value.set_path(&format!(
|
||||
"{}{}",
|
||||
value.path(),
|
||||
specifier.path_segments().unwrap().last().unwrap(),
|
||||
));
|
||||
mappings.relative_specifier_text(&base_dir, &value)
|
||||
},
|
||||
specifier,
|
||||
);
|
||||
} else {
|
||||
// absolute (`/`) or bare specifier should be left as-is
|
||||
imports.add(text.to_string(), specifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_local_dep_specifier(
|
||||
text: &str,
|
||||
unresolved_specifier: &ModuleSpecifier,
|
||||
specifier: &ModuleSpecifier,
|
||||
import_map: &mut ImportMapBuilder,
|
||||
referrer: &ModuleSpecifier,
|
||||
mappings: &Mappings,
|
||||
) {
|
||||
if !is_remote_specifier(referrer) {
|
||||
// do not handle local modules referencing local modules
|
||||
return;
|
||||
}
|
||||
|
||||
// The remote module is referencing a local file. This could occur via an
|
||||
// existing import map. In this case, we'll have to add an import map
|
||||
// entry in order to map the path back to the local path once vendored.
|
||||
let base_dir = import_map.base_dir().clone();
|
||||
let base_specifier = mappings.base_specifier(referrer);
|
||||
let imports = import_map.scope(base_specifier);
|
||||
|
||||
if text.starts_with("./") || text.starts_with("../") {
|
||||
let referrer_local_uri = mappings.local_uri(referrer);
|
||||
let mut specifier_local_uri =
|
||||
referrer_local_uri.join(text).unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Error joining {} to {}",
|
||||
unresolved_specifier.path(),
|
||||
referrer_local_uri
|
||||
)
|
||||
});
|
||||
specifier_local_uri.set_query(unresolved_specifier.query());
|
||||
|
||||
imports.add(
|
||||
mappings.relative_specifier_text(&base_dir, &specifier_local_uri),
|
||||
specifier,
|
||||
);
|
||||
} else {
|
||||
imports.add(text.to_string(), specifier);
|
||||
}
|
||||
}
|
||||
|
||||
fn text_from_range<'a>(
|
||||
text_info: &SourceTextInfo,
|
||||
text: &'a str,
|
||||
range: &Range,
|
||||
) -> &'a str {
|
||||
let result = &text[byte_range(text_info, range)];
|
||||
if result.starts_with('"') || result.starts_with('\'') {
|
||||
// remove the quotes
|
||||
&result[1..result.len() - 1]
|
||||
} else {
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
fn byte_range(
|
||||
text_info: &SourceTextInfo,
|
||||
range: &Range,
|
||||
) -> std::ops::Range<usize> {
|
||||
let start = byte_index(text_info, &range.start);
|
||||
let end = byte_index(text_info, &range.end);
|
||||
start..end
|
||||
}
|
||||
|
||||
fn byte_index(text_info: &SourceTextInfo, pos: &Position) -> usize {
|
||||
// todo(https://github.com/denoland/deno_graph/issues/79): use byte indexes all the way down
|
||||
text_info.loc_to_source_pos(LineAndColumnIndex {
|
||||
line_index: pos.line,
|
||||
column_index: pos.character,
|
||||
}) - text_info.range().start
|
||||
}
|
255
cli/tools/vendor/mappings.rs
vendored
255
cli/tools/vendor/mappings.rs
vendored
|
@ -1,255 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Position;
|
||||
|
||||
use crate::util::path::path_with_stem_suffix;
|
||||
use crate::util::path::relative_specifier;
|
||||
|
||||
use super::specifiers::dir_name_for_root;
|
||||
use super::specifiers::get_unique_path;
|
||||
use super::specifiers::make_url_relative;
|
||||
use super::specifiers::partition_by_root_specifiers;
|
||||
use super::specifiers::sanitize_filepath;
|
||||
|
||||
pub struct ProxiedModule {
|
||||
pub output_path: PathBuf,
|
||||
pub declaration_specifier: ModuleSpecifier,
|
||||
}
|
||||
|
||||
/// Constructs and holds the remote specifier to local path mappings.
|
||||
pub struct Mappings {
|
||||
mappings: HashMap<ModuleSpecifier, PathBuf>,
|
||||
base_specifiers: Vec<ModuleSpecifier>,
|
||||
proxies: HashMap<ModuleSpecifier, ProxiedModule>,
|
||||
}
|
||||
|
||||
impl Mappings {
|
||||
pub fn from_remote_modules(
|
||||
graph: &ModuleGraph,
|
||||
remote_modules: &[&Module],
|
||||
output_dir: &Path,
|
||||
) -> Result<Self, AnyError> {
|
||||
let partitioned_specifiers = partition_by_root_specifiers(
|
||||
remote_modules.iter().map(|m| m.specifier()),
|
||||
);
|
||||
let mut mapped_paths = HashSet::new();
|
||||
let mut mappings = HashMap::new();
|
||||
let mut proxies = HashMap::new();
|
||||
let mut base_specifiers = Vec::new();
|
||||
|
||||
for (root, specifiers) in partitioned_specifiers.into_iter() {
|
||||
let base_dir = get_unique_path(
|
||||
output_dir.join(dir_name_for_root(&root)),
|
||||
&mut mapped_paths,
|
||||
);
|
||||
for specifier in specifiers {
|
||||
let module = graph.get(&specifier).unwrap();
|
||||
let media_type = match module {
|
||||
Module::Js(module) => module.media_type,
|
||||
Module::Json(_) => MediaType::Json,
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => continue,
|
||||
};
|
||||
let sub_path = sanitize_filepath(&make_url_relative(&root, &{
|
||||
let mut specifier = specifier.clone();
|
||||
specifier.set_query(None);
|
||||
specifier
|
||||
})?);
|
||||
let new_path = path_with_extension(
|
||||
&base_dir.join(if cfg!(windows) {
|
||||
sub_path.replace('/', "\\")
|
||||
} else {
|
||||
sub_path
|
||||
}),
|
||||
&media_type.as_ts_extension()[1..],
|
||||
);
|
||||
mappings
|
||||
.insert(specifier, get_unique_path(new_path, &mut mapped_paths));
|
||||
}
|
||||
base_specifiers.push(root.clone());
|
||||
mappings.insert(root, base_dir);
|
||||
}
|
||||
|
||||
// resolve all the "proxy" paths to use for when an x-typescript-types header is specified
|
||||
for module in remote_modules {
|
||||
if let Some(module) = module.js() {
|
||||
if let Some(resolved) = &module
|
||||
.maybe_types_dependency
|
||||
.as_ref()
|
||||
.and_then(|d| d.dependency.ok())
|
||||
{
|
||||
let range = &resolved.range;
|
||||
// hack to tell if it's an x-typescript-types header
|
||||
let is_ts_types_header = range.start == Position::zeroed()
|
||||
&& range.end == Position::zeroed();
|
||||
if is_ts_types_header {
|
||||
let module_path = mappings.get(&module.specifier).unwrap();
|
||||
let proxied_path = get_unique_path(
|
||||
path_with_stem_suffix(module_path, ".proxied"),
|
||||
&mut mapped_paths,
|
||||
);
|
||||
proxies.insert(
|
||||
module.specifier.clone(),
|
||||
ProxiedModule {
|
||||
output_path: proxied_path,
|
||||
declaration_specifier: resolved.specifier.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
mappings,
|
||||
base_specifiers,
|
||||
proxies,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn local_uri(&self, specifier: &ModuleSpecifier) -> ModuleSpecifier {
|
||||
if specifier.scheme() == "file" {
|
||||
specifier.clone()
|
||||
} else {
|
||||
let local_path = self.local_path(specifier);
|
||||
if specifier.path().ends_with('/') {
|
||||
ModuleSpecifier::from_directory_path(&local_path)
|
||||
} else {
|
||||
ModuleSpecifier::from_file_path(&local_path)
|
||||
}
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("Could not convert {} to uri.", local_path.display())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_path(&self, specifier: &ModuleSpecifier) -> PathBuf {
|
||||
if specifier.scheme() == "file" {
|
||||
specifier.to_file_path().unwrap()
|
||||
} else {
|
||||
self
|
||||
.mappings
|
||||
.get(specifier)
|
||||
.unwrap_or_else(|| panic!("Could not find local path for {specifier}"))
|
||||
.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn relative_specifier_text(
|
||||
&self,
|
||||
from: &ModuleSpecifier,
|
||||
to: &ModuleSpecifier,
|
||||
) -> String {
|
||||
let from = self.local_uri(from);
|
||||
let to = self.local_uri(to);
|
||||
relative_specifier(&from, &to).unwrap()
|
||||
}
|
||||
|
||||
pub fn base_specifiers(&self) -> &Vec<ModuleSpecifier> {
|
||||
&self.base_specifiers
|
||||
}
|
||||
|
||||
pub fn base_specifier(
|
||||
&self,
|
||||
child_specifier: &ModuleSpecifier,
|
||||
) -> &ModuleSpecifier {
|
||||
self
|
||||
.base_specifiers
|
||||
.iter()
|
||||
.find(|s| child_specifier.as_str().starts_with(s.as_str()))
|
||||
.unwrap_or_else(|| {
|
||||
panic!("Could not find base specifier for {child_specifier}")
|
||||
})
|
||||
}
|
||||
|
||||
pub fn proxied_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
self.proxies.get(specifier).map(|s| s.output_path.clone())
|
||||
}
|
||||
|
||||
pub fn proxied_modules(
|
||||
&self,
|
||||
) -> std::collections::hash_map::Iter<'_, ModuleSpecifier, ProxiedModule> {
|
||||
self.proxies.iter()
|
||||
}
|
||||
}
|
||||
|
||||
fn path_with_extension(path: &Path, new_ext: &str) -> PathBuf {
|
||||
if let Some(file_stem) = path.file_stem().map(|f| f.to_string_lossy()) {
|
||||
if let Some(old_ext) = path.extension().map(|f| f.to_string_lossy()) {
|
||||
if file_stem.to_lowercase().ends_with(".d") {
|
||||
if new_ext.to_lowercase() == format!("d.{}", old_ext.to_lowercase()) {
|
||||
// maintain casing
|
||||
return path.to_path_buf();
|
||||
}
|
||||
return path.with_file_name(format!(
|
||||
"{}.{}",
|
||||
&file_stem[..file_stem.len() - ".d".len()],
|
||||
new_ext
|
||||
));
|
||||
}
|
||||
if new_ext.to_lowercase() == old_ext.to_lowercase() {
|
||||
// maintain casing
|
||||
return path.to_path_buf();
|
||||
}
|
||||
let media_type = MediaType::from_path(path);
|
||||
if media_type == MediaType::Unknown {
|
||||
return path.with_file_name(format!(
|
||||
"{}.{}",
|
||||
path.file_name().unwrap().to_string_lossy(),
|
||||
new_ext
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
path.with_extension(new_ext)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_path_with_extension() {
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.D.TS"), "ts"),
|
||||
PathBuf::from("/test.ts")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.D.MTS"), "js"),
|
||||
PathBuf::from("/test.js")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.D.TS"), "d.ts"),
|
||||
// maintains casing
|
||||
PathBuf::from("/test.D.TS"),
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.TS"), "ts"),
|
||||
// maintains casing
|
||||
PathBuf::from("/test.TS"),
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.ts"), "js"),
|
||||
PathBuf::from("/test.js")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/test.js"), "js"),
|
||||
PathBuf::from("/test.js")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_extension(&PathBuf::from("/chai@1.2.3"), "js"),
|
||||
PathBuf::from("/chai@1.2.3.js")
|
||||
);
|
||||
}
|
||||
}
|
575
cli/tools/vendor/mod.rs
vendored
575
cli/tools/vendor/mod.rs
vendored
|
@ -1,575 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::TextChange;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_runtime::colors;
|
||||
use log::warn;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::ConfigFile;
|
||||
use crate::args::Flags;
|
||||
use crate::args::FmtOptionsConfig;
|
||||
use crate::args::VendorFlags;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::tools::fmt::format_json;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::fs::resolve_from_cwd;
|
||||
use crate::util::path::relative_specifier;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
mod analyze;
|
||||
mod build;
|
||||
mod import_map;
|
||||
mod mappings;
|
||||
mod specifiers;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub async fn vendor(
|
||||
flags: Arc<Flags>,
|
||||
vendor_flags: VendorFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
log::info!(
|
||||
"{}",
|
||||
colors::yellow("⚠️ Warning: `deno vendor` is deprecated and will be removed in Deno 2.0.\nAdd `\"vendor\": true` to your `deno.json` or use the `--vendor` flag instead."),
|
||||
);
|
||||
let mut cli_options = CliOptions::from_flags(flags)?;
|
||||
let raw_output_dir = match &vendor_flags.output_path {
|
||||
Some(output_path) => PathBuf::from(output_path).to_owned(),
|
||||
None => PathBuf::from("vendor/"),
|
||||
};
|
||||
let output_dir = resolve_from_cwd(&raw_output_dir)?;
|
||||
validate_output_dir(&output_dir, &vendor_flags)?;
|
||||
validate_options(&mut cli_options, &output_dir)?;
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options()?;
|
||||
if cli_options.workspace().config_folders().len() > 1 {
|
||||
bail!("deno vendor is not supported in a workspace. Set `\"vendor\": true` in the workspace deno.json file instead");
|
||||
}
|
||||
let entry_points =
|
||||
resolve_entry_points(&vendor_flags, cli_options.initial_cwd())?;
|
||||
let jsx_import_source = cli_options
|
||||
.workspace()
|
||||
.to_maybe_jsx_import_source_config()?;
|
||||
let module_graph_creator = factory.module_graph_creator().await?.clone();
|
||||
let workspace_resolver = factory.workspace_resolver().await?;
|
||||
let root_folder = cli_options.workspace().root_folder_configs();
|
||||
let maybe_config_file = root_folder.deno_json.as_ref();
|
||||
let output = build::build(build::BuildInput {
|
||||
entry_points,
|
||||
build_graph: move |entry_points| {
|
||||
async move {
|
||||
module_graph_creator
|
||||
.create_graph(GraphKind::All, entry_points)
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
},
|
||||
parsed_source_cache: factory.parsed_source_cache(),
|
||||
output_dir: &output_dir,
|
||||
maybe_original_import_map: workspace_resolver.maybe_import_map(),
|
||||
maybe_jsx_import_source: jsx_import_source.as_ref(),
|
||||
resolver: factory.resolver().await?.as_graph_resolver(),
|
||||
environment: &build::RealVendorEnvironment,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let vendored_count = output.vendored_count;
|
||||
let graph = output.graph;
|
||||
let npm_package_count = graph.npm_packages.len();
|
||||
let try_add_node_modules_dir = npm_package_count > 0
|
||||
&& cli_options.node_modules_dir_enablement().unwrap_or(true);
|
||||
|
||||
log::info!(
|
||||
concat!("Vendored {} {} into {} directory.",),
|
||||
vendored_count,
|
||||
if vendored_count == 1 {
|
||||
"module"
|
||||
} else {
|
||||
"modules"
|
||||
},
|
||||
raw_output_dir.display(),
|
||||
);
|
||||
|
||||
let try_add_import_map = vendored_count > 0;
|
||||
let modified_result = maybe_update_config_file(
|
||||
&output_dir,
|
||||
maybe_config_file,
|
||||
try_add_import_map,
|
||||
try_add_node_modules_dir,
|
||||
);
|
||||
|
||||
// cache the node_modules folder when it's been added to the config file
|
||||
if modified_result.added_node_modules_dir {
|
||||
let node_modules_path =
|
||||
cli_options.node_modules_dir_path().cloned().or_else(|| {
|
||||
maybe_config_file
|
||||
.as_ref()
|
||||
.map(|d| &d.specifier)
|
||||
.filter(|c| c.scheme() == "file")
|
||||
.and_then(|c| c.to_file_path().ok())
|
||||
.map(|config_path| config_path.parent().unwrap().join("node_modules"))
|
||||
});
|
||||
if let Some(node_modules_path) = node_modules_path {
|
||||
let cli_options =
|
||||
cli_options.with_node_modules_dir_path(node_modules_path);
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
if let Some(managed) = factory.npm_resolver().await?.as_managed() {
|
||||
managed.cache_packages().await?;
|
||||
}
|
||||
}
|
||||
log::info!(
|
||||
concat!(
|
||||
"Vendored {} npm {} into node_modules directory. Set `nodeModulesDir: false` ",
|
||||
"in the Deno configuration file to disable vendoring npm packages in the future.",
|
||||
),
|
||||
npm_package_count,
|
||||
if npm_package_count == 1 {
|
||||
"package"
|
||||
} else {
|
||||
"packages"
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if vendored_count > 0 {
|
||||
let import_map_path = raw_output_dir.join("import_map.json");
|
||||
if modified_result.updated_import_map {
|
||||
log::info!(
|
||||
concat!(
|
||||
"\nUpdated your local Deno configuration file with a reference to the ",
|
||||
"new vendored import map at {}. Invoking Deno subcommands will now ",
|
||||
"automatically resolve using the vendored modules. You may override ",
|
||||
"this by providing the `--import-map <other-import-map>` flag or by ",
|
||||
"manually editing your Deno configuration file.",
|
||||
),
|
||||
import_map_path.display(),
|
||||
);
|
||||
} else {
|
||||
log::info!(
|
||||
concat!(
|
||||
"\nTo use vendored modules, specify the `--import-map {}` flag when ",
|
||||
r#"invoking Deno subcommands or add an `"importMap": "<path_to_vendored_import_map>"` "#,
|
||||
"entry to a deno.json file.",
|
||||
),
|
||||
import_map_path.display(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_output_dir(
|
||||
output_dir: &Path,
|
||||
flags: &VendorFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
if !flags.force && !is_dir_empty(output_dir)? {
|
||||
bail!(concat!(
|
||||
"Output directory was not empty. Please specify an empty directory or use ",
|
||||
"--force to ignore this error and potentially overwrite its contents.",
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_options(
|
||||
options: &mut CliOptions,
|
||||
output_dir: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
let import_map_specifier = options
|
||||
.resolve_specified_import_map_specifier()?
|
||||
.or_else(|| {
|
||||
let config_file = options.workspace().root_deno_json()?;
|
||||
config_file
|
||||
.to_import_map_specifier()
|
||||
.ok()
|
||||
.flatten()
|
||||
.or_else(|| {
|
||||
if config_file.is_an_import_map() {
|
||||
Some(config_file.specifier.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
});
|
||||
// check the import map
|
||||
if let Some(import_map_path) = import_map_specifier
|
||||
.and_then(|p| specifier_to_file_path(&p).ok())
|
||||
.and_then(|p| canonicalize_path(&p).ok())
|
||||
{
|
||||
// make the output directory in order to canonicalize it for the check below
|
||||
std::fs::create_dir_all(output_dir)?;
|
||||
let output_dir = canonicalize_path(output_dir).with_context(|| {
|
||||
format!("Failed to canonicalize: {}", output_dir.display())
|
||||
})?;
|
||||
|
||||
if import_map_path.starts_with(output_dir) {
|
||||
// canonicalize to make the test for this pass on the CI
|
||||
let cwd = canonicalize_path(&std::env::current_dir()?)?;
|
||||
// We don't allow using the output directory to help generate the
|
||||
// new state because this may lead to cryptic error messages.
|
||||
log::warn!(
|
||||
concat!(
|
||||
"Ignoring import map. Specifying an import map file ({}) in the ",
|
||||
"deno vendor output directory is not supported. If you wish to use ",
|
||||
"an import map while vendoring, please specify one located outside ",
|
||||
"this directory."
|
||||
),
|
||||
import_map_path
|
||||
.strip_prefix(&cwd)
|
||||
.unwrap_or(&import_map_path)
|
||||
.display()
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
// don't use an import map in the config
|
||||
options.set_import_map_specifier(None);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn maybe_update_config_file(
|
||||
output_dir: &Path,
|
||||
maybe_config_file: Option<&Arc<ConfigFile>>,
|
||||
try_add_import_map: bool,
|
||||
try_add_node_modules_dir: bool,
|
||||
) -> ModifiedResult {
|
||||
assert!(output_dir.is_absolute());
|
||||
let config_file = match maybe_config_file {
|
||||
Some(config_file) => config_file,
|
||||
None => return ModifiedResult::default(),
|
||||
};
|
||||
if config_file.specifier.scheme() != "file" {
|
||||
return ModifiedResult::default();
|
||||
}
|
||||
|
||||
let fmt_config_options = config_file
|
||||
.to_fmt_config()
|
||||
.ok()
|
||||
.map(|config| config.options)
|
||||
.unwrap_or_default();
|
||||
let result = update_config_file(
|
||||
config_file,
|
||||
&fmt_config_options,
|
||||
if try_add_import_map {
|
||||
Some(
|
||||
ModuleSpecifier::from_file_path(output_dir.join("import_map.json"))
|
||||
.unwrap(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
try_add_node_modules_dir,
|
||||
);
|
||||
match result {
|
||||
Ok(modified_result) => modified_result,
|
||||
Err(err) => {
|
||||
warn!("Error updating config file. {:#}", err);
|
||||
ModifiedResult::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update_config_file(
|
||||
config_file: &ConfigFile,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
import_map_specifier: Option<ModuleSpecifier>,
|
||||
try_add_node_modules_dir: bool,
|
||||
) -> Result<ModifiedResult, AnyError> {
|
||||
let config_path = specifier_to_file_path(&config_file.specifier)?;
|
||||
let config_text = std::fs::read_to_string(&config_path)?;
|
||||
let import_map_specifier =
|
||||
import_map_specifier.and_then(|import_map_specifier| {
|
||||
relative_specifier(&config_file.specifier, &import_map_specifier)
|
||||
});
|
||||
let modified_result = update_config_text(
|
||||
&config_text,
|
||||
fmt_options,
|
||||
import_map_specifier.as_deref(),
|
||||
try_add_node_modules_dir,
|
||||
)?;
|
||||
if let Some(new_text) = &modified_result.new_text {
|
||||
std::fs::write(config_path, new_text)?;
|
||||
}
|
||||
Ok(modified_result)
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ModifiedResult {
|
||||
updated_import_map: bool,
|
||||
added_node_modules_dir: bool,
|
||||
new_text: Option<String>,
|
||||
}
|
||||
|
||||
fn update_config_text(
|
||||
text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
import_map_specifier: Option<&str>,
|
||||
try_add_node_modules_dir: bool,
|
||||
) -> Result<ModifiedResult, AnyError> {
|
||||
use jsonc_parser::ast::ObjectProp;
|
||||
use jsonc_parser::ast::Value;
|
||||
let text = if text.trim().is_empty() { "{}\n" } else { text };
|
||||
let ast =
|
||||
jsonc_parser::parse_to_ast(text, &Default::default(), &Default::default())?;
|
||||
let obj = match ast.value {
|
||||
Some(Value::Object(obj)) => obj,
|
||||
_ => bail!("Failed updating config file due to no object."),
|
||||
};
|
||||
let mut modified_result = ModifiedResult::default();
|
||||
let mut text_changes = Vec::new();
|
||||
let mut should_format = false;
|
||||
|
||||
if try_add_node_modules_dir {
|
||||
// Only modify the nodeModulesDir property if it's not set
|
||||
// as this allows people to opt-out of this when vendoring
|
||||
// by specifying `nodeModulesDir: false`
|
||||
if obj.get("nodeModulesDir").is_none() {
|
||||
let insert_position = obj.range.end - 1;
|
||||
text_changes.push(TextChange {
|
||||
range: insert_position..insert_position,
|
||||
new_text: r#""nodeModulesDir": true"#.to_string(),
|
||||
});
|
||||
should_format = true;
|
||||
modified_result.added_node_modules_dir = true;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(import_map_specifier) = import_map_specifier {
|
||||
let import_map_specifier = import_map_specifier.replace('\"', "\\\"");
|
||||
match obj.get("importMap") {
|
||||
Some(ObjectProp {
|
||||
value: Value::StringLit(lit),
|
||||
..
|
||||
}) => {
|
||||
text_changes.push(TextChange {
|
||||
range: lit.range.start..lit.range.end,
|
||||
new_text: format!("\"{}\"", import_map_specifier),
|
||||
});
|
||||
modified_result.updated_import_map = true;
|
||||
}
|
||||
None => {
|
||||
// insert it crudely at a position that won't cause any issues
|
||||
// with comments and format after to make it look nice
|
||||
let insert_position = obj.range.end - 1;
|
||||
text_changes.push(TextChange {
|
||||
range: insert_position..insert_position,
|
||||
new_text: format!(r#""importMap": "{}""#, import_map_specifier),
|
||||
});
|
||||
should_format = true;
|
||||
modified_result.updated_import_map = true;
|
||||
}
|
||||
// shouldn't happen
|
||||
Some(_) => {
|
||||
bail!("Failed updating importMap in config file due to invalid type.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if text_changes.is_empty() {
|
||||
return Ok(modified_result);
|
||||
}
|
||||
|
||||
let new_text = deno_ast::apply_text_changes(text, text_changes);
|
||||
modified_result.new_text = if should_format {
|
||||
format_json(&PathBuf::from("deno.json"), &new_text, fmt_options)
|
||||
.ok()
|
||||
.map(|formatted_text| formatted_text.unwrap_or(new_text))
|
||||
} else {
|
||||
Some(new_text)
|
||||
};
|
||||
Ok(modified_result)
|
||||
}
|
||||
|
||||
fn is_dir_empty(dir_path: &Path) -> Result<bool, AnyError> {
|
||||
match std::fs::read_dir(dir_path) {
|
||||
Ok(mut dir) => Ok(dir.next().is_none()),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(true),
|
||||
Err(err) => {
|
||||
bail!("Error reading directory {}: {}", dir_path.display(), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_entry_points(
|
||||
flags: &VendorFlags,
|
||||
initial_cwd: &Path,
|
||||
) -> Result<Vec<ModuleSpecifier>, AnyError> {
|
||||
flags
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|p| resolve_url_or_path(p, initial_cwd).map_err(|e| e.into()))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod internal_test {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn update_config_text_no_existing_props_add_prop() {
|
||||
let result = update_config_text(
|
||||
"{\n}",
|
||||
&Default::default(),
|
||||
Some("./vendor/import_map.json"),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(result.updated_import_map);
|
||||
assert!(!result.added_node_modules_dir);
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"importMap": "./vendor/import_map.json"
|
||||
}
|
||||
"#
|
||||
);
|
||||
|
||||
let result = update_config_text(
|
||||
"{\n}",
|
||||
&Default::default(),
|
||||
Some("./vendor/import_map.json"),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(result.updated_import_map);
|
||||
assert!(result.added_node_modules_dir);
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"nodeModulesDir": true,
|
||||
"importMap": "./vendor/import_map.json"
|
||||
}
|
||||
"#
|
||||
);
|
||||
|
||||
let result =
|
||||
update_config_text("{\n}", &Default::default(), None, true).unwrap();
|
||||
assert!(!result.updated_import_map);
|
||||
assert!(result.added_node_modules_dir);
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"nodeModulesDir": true
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_config_text_existing_props_add_prop() {
|
||||
let result = update_config_text(
|
||||
r#"{
|
||||
"tasks": {
|
||||
"task1": "other"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
&Default::default(),
|
||||
Some("./vendor/import_map.json"),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"tasks": {
|
||||
"task1": "other"
|
||||
},
|
||||
"importMap": "./vendor/import_map.json"
|
||||
}
|
||||
"#
|
||||
);
|
||||
|
||||
// trailing comma
|
||||
let result = update_config_text(
|
||||
r#"{
|
||||
"tasks": {
|
||||
"task1": "other"
|
||||
},
|
||||
}
|
||||
"#,
|
||||
&Default::default(),
|
||||
Some("./vendor/import_map.json"),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"tasks": {
|
||||
"task1": "other"
|
||||
},
|
||||
"importMap": "./vendor/import_map.json"
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_config_text_update_prop() {
|
||||
let result = update_config_text(
|
||||
r#"{
|
||||
"importMap": "./local.json"
|
||||
}
|
||||
"#,
|
||||
&Default::default(),
|
||||
Some("./vendor/import_map.json"),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
result.new_text.unwrap(),
|
||||
r#"{
|
||||
"importMap": "./vendor/import_map.json"
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_update_node_modules_dir() {
|
||||
// will not update if this is already set (even if it's false)
|
||||
let result = update_config_text(
|
||||
r#"{
|
||||
"nodeModulesDir": false
|
||||
}
|
||||
"#,
|
||||
&Default::default(),
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(!result.added_node_modules_dir);
|
||||
assert!(!result.updated_import_map);
|
||||
assert_eq!(result.new_text, None);
|
||||
|
||||
let result = update_config_text(
|
||||
r#"{
|
||||
"nodeModulesDir": true
|
||||
}
|
||||
"#,
|
||||
&Default::default(),
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(!result.added_node_modules_dir);
|
||||
assert!(!result.updated_import_map);
|
||||
assert_eq!(result.new_text, None);
|
||||
}
|
||||
}
|
208
cli/tools/vendor/specifiers.rs
vendored
208
cli/tools/vendor/specifiers.rs
vendored
|
@ -1,208 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
use crate::util::path::is_banned_path_char;
|
||||
use crate::util::path::path_with_stem_suffix;
|
||||
use crate::util::path::root_url_to_safe_local_dirname;
|
||||
|
||||
/// Partitions the provided specifiers by the non-path and non-query parts of a specifier.
|
||||
pub fn partition_by_root_specifiers<'a>(
|
||||
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
||||
) -> BTreeMap<ModuleSpecifier, Vec<ModuleSpecifier>> {
|
||||
let mut root_specifiers: BTreeMap<ModuleSpecifier, Vec<ModuleSpecifier>> =
|
||||
Default::default();
|
||||
for remote_specifier in specifiers {
|
||||
let mut root_specifier = remote_specifier.clone();
|
||||
root_specifier.set_query(None);
|
||||
root_specifier.set_path("/");
|
||||
|
||||
let specifiers = root_specifiers.entry(root_specifier).or_default();
|
||||
specifiers.push(remote_specifier.clone());
|
||||
}
|
||||
root_specifiers
|
||||
}
|
||||
|
||||
/// Gets the directory name to use for the provided root.
|
||||
pub fn dir_name_for_root(root: &ModuleSpecifier) -> PathBuf {
|
||||
root_url_to_safe_local_dirname(root)
|
||||
}
|
||||
|
||||
/// Gets a unique file path given the provided file path
|
||||
/// and the set of existing file paths. Inserts to the
|
||||
/// set when finding a unique path.
|
||||
pub fn get_unique_path(
|
||||
mut path: PathBuf,
|
||||
unique_set: &mut HashSet<String>,
|
||||
) -> PathBuf {
|
||||
let original_path = path.clone();
|
||||
let mut count = 2;
|
||||
// case insensitive comparison so the output works on case insensitive file systems
|
||||
while !unique_set.insert(path.to_string_lossy().to_lowercase()) {
|
||||
path = path_with_stem_suffix(&original_path, &format!("_{count}"));
|
||||
count += 1;
|
||||
}
|
||||
path
|
||||
}
|
||||
|
||||
pub fn make_url_relative(
|
||||
root: &ModuleSpecifier,
|
||||
url: &ModuleSpecifier,
|
||||
) -> Result<String, AnyError> {
|
||||
root.make_relative(url).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Error making url ({}) relative to root: {}",
|
||||
url.to_string(),
|
||||
root.to_string()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_remote_specifier(specifier: &ModuleSpecifier) -> bool {
|
||||
matches!(specifier.scheme().to_lowercase().as_str(), "http" | "https")
|
||||
}
|
||||
|
||||
pub fn is_remote_specifier_text(text: &str) -> bool {
|
||||
let text = text.trim_start().to_lowercase();
|
||||
text.starts_with("http:") || text.starts_with("https:")
|
||||
}
|
||||
|
||||
pub fn sanitize_filepath(text: &str) -> String {
|
||||
text
|
||||
.chars()
|
||||
.map(|c| if is_banned_path_char(c) { '_' } else { c })
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn partition_by_root_specifiers_same_sub_folder() {
|
||||
run_partition_by_root_specifiers_test(
|
||||
vec![
|
||||
"https://deno.land/x/mod/A.ts",
|
||||
"https://deno.land/x/mod/other/A.ts",
|
||||
],
|
||||
vec![(
|
||||
"https://deno.land/",
|
||||
vec![
|
||||
"https://deno.land/x/mod/A.ts",
|
||||
"https://deno.land/x/mod/other/A.ts",
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partition_by_root_specifiers_different_sub_folder() {
|
||||
run_partition_by_root_specifiers_test(
|
||||
vec![
|
||||
"https://deno.land/x/mod/A.ts",
|
||||
"https://deno.land/x/other/A.ts",
|
||||
],
|
||||
vec![(
|
||||
"https://deno.land/",
|
||||
vec![
|
||||
"https://deno.land/x/mod/A.ts",
|
||||
"https://deno.land/x/other/A.ts",
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partition_by_root_specifiers_different_hosts() {
|
||||
run_partition_by_root_specifiers_test(
|
||||
vec![
|
||||
"https://deno.land/mod/A.ts",
|
||||
"http://deno.land/B.ts",
|
||||
"https://deno.land:8080/C.ts",
|
||||
"https://localhost/mod/A.ts",
|
||||
"https://other/A.ts",
|
||||
],
|
||||
vec![
|
||||
("http://deno.land/", vec!["http://deno.land/B.ts"]),
|
||||
("https://deno.land/", vec!["https://deno.land/mod/A.ts"]),
|
||||
(
|
||||
"https://deno.land:8080/",
|
||||
vec!["https://deno.land:8080/C.ts"],
|
||||
),
|
||||
("https://localhost/", vec!["https://localhost/mod/A.ts"]),
|
||||
("https://other/", vec!["https://other/A.ts"]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
fn run_partition_by_root_specifiers_test(
|
||||
input: Vec<&str>,
|
||||
expected: Vec<(&str, Vec<&str>)>,
|
||||
) {
|
||||
let input = input
|
||||
.iter()
|
||||
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let output = partition_by_root_specifiers(input.iter());
|
||||
// the assertion is much easier to compare when everything is strings
|
||||
let output = output
|
||||
.into_iter()
|
||||
.map(|(s, vec)| {
|
||||
(
|
||||
s.to_string(),
|
||||
vec.into_iter().map(|s| s.to_string()).collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected
|
||||
.into_iter()
|
||||
.map(|(s, vec)| {
|
||||
(
|
||||
s.to_string(),
|
||||
vec.into_iter().map(|s| s.to_string()).collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unique_path() {
|
||||
let mut paths = HashSet::new();
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/test"), &mut paths),
|
||||
PathBuf::from("/test")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/test"), &mut paths),
|
||||
PathBuf::from("/test_2")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/test"), &mut paths),
|
||||
PathBuf::from("/test_3")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/TEST"), &mut paths),
|
||||
PathBuf::from("/TEST_4")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/test.txt"), &mut paths),
|
||||
PathBuf::from("/test.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/test.txt"), &mut paths),
|
||||
PathBuf::from("/test_2.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
get_unique_path(PathBuf::from("/TEST.TXT"), &mut paths),
|
||||
PathBuf::from("/TEST_3.TXT")
|
||||
);
|
||||
}
|
||||
}
|
357
cli/tools/vendor/test.rs
vendored
357
cli/tools/vendor/test.rs
vendored
|
@ -1,357 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_graph::source::LoadFuture;
|
||||
use deno_graph::source::LoadResponse;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::DefaultModuleAnalyzer;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::ModuleGraph;
|
||||
use import_map::ImportMap;
|
||||
|
||||
use crate::args::JsxImportSourceConfig;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
|
||||
use super::build::VendorEnvironment;
|
||||
|
||||
// Utilities that help `deno vendor` get tested in memory.
|
||||
|
||||
type RemoteFileText = String;
|
||||
type RemoteFileHeaders = Option<HashMap<String, String>>;
|
||||
type RemoteFileResult = Result<(RemoteFileText, RemoteFileHeaders), String>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct TestLoader {
|
||||
files: HashMap<ModuleSpecifier, RemoteFileResult>,
|
||||
redirects: HashMap<ModuleSpecifier, ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl TestLoader {
|
||||
pub fn add(
|
||||
&mut self,
|
||||
path_or_specifier: impl AsRef<str>,
|
||||
text: impl AsRef<str>,
|
||||
) -> &mut Self {
|
||||
self.add_result(path_or_specifier, Ok((text.as_ref().to_string(), None)))
|
||||
}
|
||||
|
||||
pub fn add_failure(
|
||||
&mut self,
|
||||
path_or_specifier: impl AsRef<str>,
|
||||
message: impl AsRef<str>,
|
||||
) -> &mut Self {
|
||||
self.add_result(path_or_specifier, Err(message.as_ref().to_string()))
|
||||
}
|
||||
|
||||
fn add_result(
|
||||
&mut self,
|
||||
path_or_specifier: impl AsRef<str>,
|
||||
result: RemoteFileResult,
|
||||
) -> &mut Self {
|
||||
if path_or_specifier
|
||||
.as_ref()
|
||||
.to_lowercase()
|
||||
.starts_with("http")
|
||||
{
|
||||
self.files.insert(
|
||||
ModuleSpecifier::parse(path_or_specifier.as_ref()).unwrap(),
|
||||
result,
|
||||
);
|
||||
} else {
|
||||
let path = make_path(path_or_specifier.as_ref());
|
||||
let specifier = ModuleSpecifier::from_file_path(path).unwrap();
|
||||
self.files.insert(specifier, result);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_with_headers(
|
||||
&mut self,
|
||||
specifier: impl AsRef<str>,
|
||||
text: impl AsRef<str>,
|
||||
headers: &[(&str, &str)],
|
||||
) -> &mut Self {
|
||||
let headers = headers
|
||||
.iter()
|
||||
.map(|(key, value)| (key.to_string(), value.to_string()))
|
||||
.collect();
|
||||
self.files.insert(
|
||||
ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
|
||||
Ok((text.as_ref().to_string(), Some(headers))),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_redirect(
|
||||
&mut self,
|
||||
from: impl AsRef<str>,
|
||||
to: impl AsRef<str>,
|
||||
) -> &mut Self {
|
||||
self.redirects.insert(
|
||||
ModuleSpecifier::parse(from.as_ref()).unwrap(),
|
||||
ModuleSpecifier::parse(to.as_ref()).unwrap(),
|
||||
);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Loader for TestLoader {
|
||||
fn load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_options: deno_graph::source::LoadOptions,
|
||||
) -> LoadFuture {
|
||||
if let Some(redirect) = self.redirects.get(specifier) {
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::Redirect {
|
||||
specifier: redirect.clone(),
|
||||
},
|
||||
))));
|
||||
}
|
||||
let result = self.files.get(specifier).map(|result| match result {
|
||||
Ok(result) => Ok(LoadResponse::Module {
|
||||
specifier: specifier.clone(),
|
||||
content: result.0.clone().into_bytes().into(),
|
||||
maybe_headers: result.1.clone(),
|
||||
}),
|
||||
Err(err) => Err(err),
|
||||
});
|
||||
let result = match result {
|
||||
Some(Ok(result)) => Ok(Some(result)),
|
||||
Some(Err(err)) => Err(anyhow!("{}", err)),
|
||||
None if specifier.scheme() == "data" => {
|
||||
deno_graph::source::load_data_url(specifier)
|
||||
}
|
||||
None => Ok(None),
|
||||
};
|
||||
Box::pin(futures::future::ready(result))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TestVendorEnvironment {
|
||||
directories: RefCell<HashSet<PathBuf>>,
|
||||
files: RefCell<HashMap<PathBuf, String>>,
|
||||
}
|
||||
|
||||
impl VendorEnvironment for TestVendorEnvironment {
|
||||
fn create_dir_all(&self, dir_path: &Path) -> Result<(), AnyError> {
|
||||
let mut directories = self.directories.borrow_mut();
|
||||
for path in dir_path.ancestors() {
|
||||
if !directories.insert(path.to_path_buf()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_file(&self, file_path: &Path, text: &[u8]) -> Result<(), AnyError> {
|
||||
let parent = file_path.parent().unwrap();
|
||||
if !self.directories.borrow().contains(parent) {
|
||||
bail!("Directory not found: {}", parent.display());
|
||||
}
|
||||
self.files.borrow_mut().insert(
|
||||
file_path.to_path_buf(),
|
||||
String::from_utf8(text.to_vec()).unwrap(),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VendorOutput {
|
||||
pub files: Vec<(String, String)>,
|
||||
pub import_map: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct VendorTestBuilder {
|
||||
entry_points: Vec<ModuleSpecifier>,
|
||||
loader: TestLoader,
|
||||
maybe_original_import_map: Option<ImportMap>,
|
||||
environment: TestVendorEnvironment,
|
||||
jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
}
|
||||
|
||||
impl VendorTestBuilder {
|
||||
pub fn with_default_setup() -> Self {
|
||||
let mut builder = VendorTestBuilder::default();
|
||||
builder.add_entry_point("/mod.ts");
|
||||
builder
|
||||
}
|
||||
|
||||
pub fn resolve_to_url(&self, path: &str) -> ModuleSpecifier {
|
||||
ModuleSpecifier::from_file_path(make_path(path)).unwrap()
|
||||
}
|
||||
|
||||
pub fn new_import_map(&self, base_path: &str) -> ImportMap {
|
||||
let base = self.resolve_to_url(base_path);
|
||||
ImportMap::new(base)
|
||||
}
|
||||
|
||||
pub fn set_original_import_map(
|
||||
&mut self,
|
||||
import_map: ImportMap,
|
||||
) -> &mut Self {
|
||||
self.maybe_original_import_map = Some(import_map);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_entry_point(&mut self, entry_point: impl AsRef<str>) -> &mut Self {
|
||||
let entry_point = make_path(entry_point.as_ref());
|
||||
self
|
||||
.entry_points
|
||||
.push(ModuleSpecifier::from_file_path(entry_point).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_jsx_import_source_config(
|
||||
&mut self,
|
||||
jsx_import_source_config: JsxImportSourceConfig,
|
||||
) -> &mut Self {
|
||||
self.jsx_import_source_config = Some(jsx_import_source_config);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build(&mut self) -> Result<VendorOutput, AnyError> {
|
||||
let output_dir = make_path("/vendor");
|
||||
let entry_points = self.entry_points.clone();
|
||||
let loader = self.loader.clone();
|
||||
let parsed_source_cache = ParsedSourceCache::default();
|
||||
let resolver = Arc::new(build_resolver(
|
||||
output_dir.parent().unwrap(),
|
||||
self.jsx_import_source_config.clone(),
|
||||
self.maybe_original_import_map.clone(),
|
||||
));
|
||||
super::build::build(super::build::BuildInput {
|
||||
entry_points,
|
||||
build_graph: {
|
||||
let resolver = resolver.clone();
|
||||
move |entry_points| {
|
||||
async move {
|
||||
Ok(
|
||||
build_test_graph(
|
||||
entry_points,
|
||||
loader,
|
||||
resolver.as_graph_resolver(),
|
||||
&DefaultModuleAnalyzer,
|
||||
)
|
||||
.await,
|
||||
)
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
},
|
||||
parsed_source_cache: &parsed_source_cache,
|
||||
output_dir: &output_dir,
|
||||
maybe_original_import_map: self.maybe_original_import_map.as_ref(),
|
||||
maybe_jsx_import_source: self.jsx_import_source_config.as_ref(),
|
||||
resolver: resolver.as_graph_resolver(),
|
||||
environment: &self.environment,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut files = self.environment.files.borrow_mut();
|
||||
let import_map = files.remove(&output_dir.join("import_map.json"));
|
||||
let mut files = files
|
||||
.iter()
|
||||
.map(|(path, text)| (path_to_string(path), text.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
Ok(VendorOutput {
|
||||
import_map: import_map.map(|text| serde_json::from_str(&text).unwrap()),
|
||||
files,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_loader(&mut self, action: impl Fn(&mut TestLoader)) -> &mut Self {
|
||||
action(&mut self.loader);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
fn build_resolver(
|
||||
root_dir: &Path,
|
||||
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
maybe_original_import_map: Option<ImportMap>,
|
||||
) -> CliGraphResolver {
|
||||
CliGraphResolver::new(CliGraphResolverOptions {
|
||||
node_resolver: None,
|
||||
npm_resolver: None,
|
||||
sloppy_imports_resolver: None,
|
||||
workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
|
||||
Arc::new(ModuleSpecifier::from_directory_path(root_dir).unwrap()),
|
||||
maybe_original_import_map,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
deno_config::workspace::PackageJsonDepResolution::Enabled,
|
||||
)),
|
||||
maybe_jsx_import_source_config,
|
||||
maybe_vendor_dir: None,
|
||||
bare_node_builtins_enabled: false,
|
||||
})
|
||||
}
|
||||
|
||||
async fn build_test_graph(
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: TestLoader,
|
||||
resolver: &dyn deno_graph::source::Resolver,
|
||||
analyzer: &dyn deno_graph::ModuleAnalyzer,
|
||||
) -> ModuleGraph {
|
||||
let mut graph = ModuleGraph::new(GraphKind::All);
|
||||
graph
|
||||
.build(
|
||||
roots,
|
||||
&loader,
|
||||
deno_graph::BuildOptions {
|
||||
resolver: Some(resolver),
|
||||
module_analyzer: analyzer,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await;
|
||||
graph
|
||||
}
|
||||
|
||||
fn make_path(text: &str) -> PathBuf {
|
||||
// This should work all in memory. We're waiting on
|
||||
// https://github.com/servo/rust-url/issues/730 to provide
|
||||
// a cross platform path here
|
||||
assert!(text.starts_with('/'));
|
||||
if cfg!(windows) {
|
||||
PathBuf::from(format!("C:{}", text.replace('/', "\\")))
|
||||
} else {
|
||||
PathBuf::from(text)
|
||||
}
|
||||
}
|
||||
|
||||
fn path_to_string<P>(path: P) -> String
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
// inverse of the function above
|
||||
let path = path.to_string_lossy();
|
||||
if cfg!(windows) {
|
||||
path.replace("C:\\", "\\").replace('\\', "/")
|
||||
} else {
|
||||
path.to_string()
|
||||
}
|
||||
}
|
|
@ -46,8 +46,6 @@ delete Object.prototype.__proto__;
|
|||
"UnixListenOptions",
|
||||
"createHttpClient",
|
||||
"dlopen",
|
||||
"flock",
|
||||
"flockSync",
|
||||
"funlock",
|
||||
"funlockSync",
|
||||
"listen",
|
||||
|
|
481
cli/tsc/dts/lib.deno.ns.d.ts
vendored
481
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -553,15 +553,6 @@ declare namespace Deno {
|
|||
*/
|
||||
sys?: "inherit" | boolean | string[];
|
||||
|
||||
/** Specifies if the `hrtime` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `hrtime` permission will be inherited.
|
||||
* If set to `true`, the global `hrtime` permission will be requested.
|
||||
* If set to `false`, the global `hrtime` permission will be revoked.
|
||||
*
|
||||
* @default {false}
|
||||
*/
|
||||
hrtime?: "inherit" | boolean;
|
||||
|
||||
/** Specifies if the `net` permission should be requested or revoked.
|
||||
* if set to `"inherit"`, the current `net` permission will be inherited.
|
||||
* if set to `true`, the global `net` permission will be requested.
|
||||
|
@ -1863,36 +1854,6 @@ declare namespace Deno {
|
|||
options?: { bufSize?: number },
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
* Turns a Reader, `r`, into an async iterator.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function iter(
|
||||
r: Reader,
|
||||
options?: { bufSize?: number },
|
||||
): AsyncIterableIterator<Uint8Array>;
|
||||
|
||||
/**
|
||||
* Turns a ReaderSync, `r`, into an iterator.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function iterSync(
|
||||
r: ReaderSync,
|
||||
options?: {
|
||||
bufSize?: number;
|
||||
},
|
||||
): IterableIterator<Uint8Array>;
|
||||
|
||||
/** Open a file and resolve to an instance of {@linkcode Deno.FsFile}. The
|
||||
* file does not need to previously exist if using the `create` or `createNew`
|
||||
* open options. The caller may have the resulting file automatically closed
|
||||
|
@ -2265,33 +2226,6 @@ declare namespace Deno {
|
|||
*/
|
||||
export function fdatasyncSync(rid: number): void;
|
||||
|
||||
/** Close the given resource ID (`rid`) which has been previously opened, such
|
||||
* as via opening or creating a file. Closing a file when you are finished
|
||||
* with it is important to avoid leaking resources.
|
||||
*
|
||||
* ```ts
|
||||
* const file = await Deno.open("my_file.txt");
|
||||
* // do work with "file" object
|
||||
* Deno.close(file.rid);
|
||||
* ```
|
||||
*
|
||||
* It is recommended to define the variable with the `using` keyword so the
|
||||
* runtime will automatically close the resource when it goes out of scope.
|
||||
* Doing so negates the need to manually close the resource.
|
||||
*
|
||||
* ```ts
|
||||
* using file = await Deno.open("my_file.txt");
|
||||
* // do work with "file" object
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function close(rid: number): void;
|
||||
|
||||
/** The Deno abstraction for reading and writing files.
|
||||
*
|
||||
* This is the most straight forward way of handling files within Deno and is
|
||||
|
@ -4060,73 +3994,6 @@ declare namespace Deno {
|
|||
bytesReceived: number;
|
||||
}
|
||||
|
||||
/** @category Runtime
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0.
|
||||
*/
|
||||
export interface Metrics extends OpMetrics {
|
||||
ops: Record<string, OpMetrics>;
|
||||
}
|
||||
|
||||
/** Receive metrics from the privileged side of Deno. This is primarily used
|
||||
* in the development of Deno. _Ops_, also called _bindings_, are the
|
||||
* go-between between Deno JavaScript sandbox and the rest of Deno.
|
||||
*
|
||||
* ```shell
|
||||
* > console.table(Deno.metrics())
|
||||
* ┌─────────────────────────┬────────┐
|
||||
* │ (index) │ Values │
|
||||
* ├─────────────────────────┼────────┤
|
||||
* │ opsDispatched │ 3 │
|
||||
* │ opsDispatchedSync │ 2 │
|
||||
* │ opsDispatchedAsync │ 1 │
|
||||
* │ opsDispatchedAsyncUnref │ 0 │
|
||||
* │ opsCompleted │ 3 │
|
||||
* │ opsCompletedSync │ 2 │
|
||||
* │ opsCompletedAsync │ 1 │
|
||||
* │ opsCompletedAsyncUnref │ 0 │
|
||||
* │ bytesSentControl │ 73 │
|
||||
* │ bytesSentData │ 0 │
|
||||
* │ bytesReceived │ 375 │
|
||||
* └─────────────────────────┴────────┘
|
||||
* ```
|
||||
*
|
||||
* @category Runtime
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0.
|
||||
*/
|
||||
export function metrics(): Metrics;
|
||||
|
||||
/**
|
||||
* A map of open resources that Deno is tracking. The key is the resource ID
|
||||
* (_rid_) and the value is its representation.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0.
|
||||
*
|
||||
* @category Runtime */
|
||||
export interface ResourceMap {
|
||||
[rid: number]: unknown;
|
||||
}
|
||||
|
||||
/** Returns a map of open resource IDs (_rid_) along with their string
|
||||
* representations. This is an internal API and as such resource
|
||||
* representation has `unknown` type; that means it can change any time and
|
||||
* should not be depended upon.
|
||||
*
|
||||
* ```ts
|
||||
* console.log(Deno.resources());
|
||||
* // { 0: "stdin", 1: "stdout", 2: "stderr" }
|
||||
* Deno.openSync('../test.file');
|
||||
* console.log(Deno.resources());
|
||||
* // { 0: "stdin", 1: "stdout", 2: "stderr", 3: "fsFile" }
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0.
|
||||
*
|
||||
* @category Runtime
|
||||
*/
|
||||
export function resources(): ResourceMap;
|
||||
|
||||
/**
|
||||
* Additional information for FsEvent objects with the "other" kind.
|
||||
*
|
||||
|
@ -4865,8 +4732,7 @@ declare namespace Deno {
|
|||
| "net"
|
||||
| "env"
|
||||
| "sys"
|
||||
| "ffi"
|
||||
| "hrtime";
|
||||
| "ffi";
|
||||
|
||||
/** The current status of the permission:
|
||||
*
|
||||
|
@ -4997,17 +4863,6 @@ declare namespace Deno {
|
|||
path?: string | URL;
|
||||
}
|
||||
|
||||
/** The permission descriptor for the `allow-hrtime` and `deny-hrtime` permissions, which
|
||||
* controls if the runtime code has access to high resolution time. High
|
||||
* resolution time is considered sensitive information, because it can be used
|
||||
* by malicious code to gain information about the host that it might not
|
||||
* otherwise have access to.
|
||||
*
|
||||
* @category Permissions */
|
||||
export interface HrtimePermissionDescriptor {
|
||||
name: "hrtime";
|
||||
}
|
||||
|
||||
/** Permission descriptors which define a permission and can be queried,
|
||||
* requested, or revoked.
|
||||
*
|
||||
|
@ -5023,8 +4878,7 @@ declare namespace Deno {
|
|||
| NetPermissionDescriptor
|
||||
| EnvPermissionDescriptor
|
||||
| SysPermissionDescriptor
|
||||
| FfiPermissionDescriptor
|
||||
| HrtimePermissionDescriptor;
|
||||
| FfiPermissionDescriptor;
|
||||
|
||||
/** The interface which defines what event types are supported by
|
||||
* {@linkcode PermissionStatus} instances.
|
||||
|
@ -5340,19 +5194,6 @@ declare namespace Deno {
|
|||
*/
|
||||
export const args: string[];
|
||||
|
||||
/**
|
||||
* A symbol which can be used as a key for a custom method which will be
|
||||
* called when `Deno.inspect()` is called, or when the object is logged to
|
||||
* the console.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export const customInspect: unique symbol;
|
||||
|
||||
/** The URL of the entrypoint module entered from the command-line. It
|
||||
* requires read permission to the CWD.
|
||||
*
|
||||
|
@ -5506,50 +5347,6 @@ declare namespace Deno {
|
|||
*/
|
||||
export function ftruncateSync(rid: number, len?: number): void;
|
||||
|
||||
/**
|
||||
* Synchronously changes the access (`atime`) and modification (`mtime`) times
|
||||
* of a file stream resource referenced by `rid`. Given times are either in
|
||||
* seconds (UNIX epoch time) or as `Date` objects.
|
||||
*
|
||||
* ```ts
|
||||
* const file = Deno.openSync("file.txt", { create: true, write: true });
|
||||
* Deno.futimeSync(file.rid, 1556495550, new Date());
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function futimeSync(
|
||||
rid: number,
|
||||
atime: number | Date,
|
||||
mtime: number | Date,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Changes the access (`atime`) and modification (`mtime`) times of a file
|
||||
* stream resource referenced by `rid`. Given times are either in seconds
|
||||
* (UNIX epoch time) or as `Date` objects.
|
||||
*
|
||||
* ```ts
|
||||
* const file = await Deno.open("file.txt", { create: true, write: true });
|
||||
* await Deno.futime(file.rid, 1556495550, new Date());
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function futime(
|
||||
rid: number,
|
||||
atime: number | Date,
|
||||
mtime: number | Date,
|
||||
): Promise<void>;
|
||||
|
||||
/**
|
||||
* Returns a `Deno.FileInfo` for the given file stream.
|
||||
*
|
||||
|
@ -5884,7 +5681,7 @@ declare namespace Deno {
|
|||
*
|
||||
* @category Network
|
||||
*/
|
||||
export interface CAARecord {
|
||||
export interface CaaRecord {
|
||||
/** If `true`, indicates that the corresponding property tag **must** be
|
||||
* understood if the semantics of the CAA record are to be correctly
|
||||
* interpreted by an issuer.
|
||||
|
@ -5904,7 +5701,7 @@ declare namespace Deno {
|
|||
* specified, it will return an array of objects with this interface.
|
||||
*
|
||||
* @category Network */
|
||||
export interface MXRecord {
|
||||
export interface MxRecord {
|
||||
/** A priority value, which is a relative value compared to the other
|
||||
* preferences of MX records for the domain. */
|
||||
preference: number;
|
||||
|
@ -5916,7 +5713,7 @@ declare namespace Deno {
|
|||
* specified, it will return an array of objects with this interface.
|
||||
*
|
||||
* @category Network */
|
||||
export interface NAPTRRecord {
|
||||
export interface NaptrRecord {
|
||||
order: number;
|
||||
preference: number;
|
||||
flags: string;
|
||||
|
@ -5929,7 +5726,7 @@ declare namespace Deno {
|
|||
* specified, it will return an array of objects with this interface.
|
||||
*
|
||||
* @category Network */
|
||||
export interface SOARecord {
|
||||
export interface SoaRecord {
|
||||
mname: string;
|
||||
rname: string;
|
||||
serial: number;
|
||||
|
@ -5944,7 +5741,7 @@ declare namespace Deno {
|
|||
*
|
||||
* @category Network
|
||||
*/
|
||||
export interface SRVRecord {
|
||||
export interface SrvRecord {
|
||||
priority: number;
|
||||
weight: number;
|
||||
port: number;
|
||||
|
@ -6009,7 +5806,7 @@ declare namespace Deno {
|
|||
query: string,
|
||||
recordType: "CAA",
|
||||
options?: ResolveDnsOptions,
|
||||
): Promise<CAARecord[]>;
|
||||
): Promise<CaaRecord[]>;
|
||||
|
||||
/**
|
||||
* Performs DNS resolution against the given query, returning resolved
|
||||
|
@ -6039,7 +5836,7 @@ declare namespace Deno {
|
|||
query: string,
|
||||
recordType: "MX",
|
||||
options?: ResolveDnsOptions,
|
||||
): Promise<MXRecord[]>;
|
||||
): Promise<MxRecord[]>;
|
||||
|
||||
/**
|
||||
* Performs DNS resolution against the given query, returning resolved
|
||||
|
@ -6069,7 +5866,7 @@ declare namespace Deno {
|
|||
query: string,
|
||||
recordType: "NAPTR",
|
||||
options?: ResolveDnsOptions,
|
||||
): Promise<NAPTRRecord[]>;
|
||||
): Promise<NaptrRecord[]>;
|
||||
|
||||
/**
|
||||
* Performs DNS resolution against the given query, returning resolved
|
||||
|
@ -6099,7 +5896,7 @@ declare namespace Deno {
|
|||
query: string,
|
||||
recordType: "SOA",
|
||||
options?: ResolveDnsOptions,
|
||||
): Promise<SOARecord[]>;
|
||||
): Promise<SoaRecord[]>;
|
||||
|
||||
/**
|
||||
* Performs DNS resolution against the given query, returning resolved
|
||||
|
@ -6129,7 +5926,7 @@ declare namespace Deno {
|
|||
query: string,
|
||||
recordType: "SRV",
|
||||
options?: ResolveDnsOptions,
|
||||
): Promise<SRVRecord[]>;
|
||||
): Promise<SrvRecord[]>;
|
||||
|
||||
/**
|
||||
* Performs DNS resolution against the given query, returning resolved
|
||||
|
@ -6191,11 +5988,11 @@ declare namespace Deno {
|
|||
options?: ResolveDnsOptions,
|
||||
): Promise<
|
||||
| string[]
|
||||
| CAARecord[]
|
||||
| MXRecord[]
|
||||
| NAPTRRecord[]
|
||||
| SOARecord[]
|
||||
| SRVRecord[]
|
||||
| CaaRecord[]
|
||||
| MxRecord[]
|
||||
| NaptrRecord[]
|
||||
| SoaRecord[]
|
||||
| SrvRecord[]
|
||||
| string[][]
|
||||
>;
|
||||
|
||||
|
@ -6245,9 +6042,11 @@ declare namespace Deno {
|
|||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeHandlerInfo {
|
||||
export interface ServeHandlerInfo<Addr extends Deno.Addr = Deno.Addr> {
|
||||
/** The remote address of the connection. */
|
||||
remoteAddr: Deno.NetAddr;
|
||||
remoteAddr: Addr;
|
||||
/** The completion promise */
|
||||
completed: Promise<void>;
|
||||
}
|
||||
|
||||
/** A handler for HTTP requests. Consumes a request and returns a response.
|
||||
|
@ -6258,9 +6057,9 @@ declare namespace Deno {
|
|||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export type ServeHandler = (
|
||||
export type ServeHandler<Addr extends Deno.Addr = Deno.Addr> = (
|
||||
request: Request,
|
||||
info: ServeHandlerInfo,
|
||||
info: ServeHandlerInfo<Addr>,
|
||||
) => Response | Promise<Response>;
|
||||
|
||||
/** Interface that module run with `deno serve` subcommand must conform to.
|
||||
|
@ -6296,7 +6095,27 @@ declare namespace Deno {
|
|||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeOptions {
|
||||
export interface ServeOptions<Addr extends Deno.Addr = Deno.Addr> {
|
||||
/** An {@linkcode AbortSignal} to close the server and all connections. */
|
||||
signal?: AbortSignal;
|
||||
|
||||
/** The handler to invoke when route handlers throw an error. */
|
||||
onError?: (error: unknown) => Response | Promise<Response>;
|
||||
|
||||
/** The callback which is called when the server starts listening. */
|
||||
onListen?: (localAddr: Addr) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options that can be passed to `Deno.serve` to create a server listening on
|
||||
* a TCP port.
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeTcpOptions extends ServeOptions<Deno.NetAddr> {
|
||||
/** The transport to use. */
|
||||
transport?: "tcp";
|
||||
|
||||
/** The port to listen on.
|
||||
*
|
||||
* Set to `0` to listen on any available port.
|
||||
|
@ -6314,109 +6133,37 @@ declare namespace Deno {
|
|||
* @default {"0.0.0.0"} */
|
||||
hostname?: string;
|
||||
|
||||
/** An {@linkcode AbortSignal} to close the server and all connections. */
|
||||
signal?: AbortSignal;
|
||||
|
||||
/** Sets `SO_REUSEPORT` on POSIX systems. */
|
||||
reusePort?: boolean;
|
||||
|
||||
/** The handler to invoke when route handlers throw an error. */
|
||||
onError?: (error: unknown) => Response | Promise<Response>;
|
||||
|
||||
/** The callback which is called when the server starts listening. */
|
||||
onListen?: (localAddr: Deno.NetAddr) => void;
|
||||
}
|
||||
|
||||
/** Additional options which are used when opening a TLS (HTTPS) server.
|
||||
/**
|
||||
* Options that can be passed to `Deno.serve` to create a server listening on
|
||||
* a Unix domain socket.
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeTlsOptions extends ServeOptions {
|
||||
/**
|
||||
* Server private key in PEM format. Use {@linkcode TlsCertifiedKeyOptions} instead.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*/
|
||||
cert?: string;
|
||||
export interface ServeUnixOptions extends ServeOptions<Deno.UnixAddr> {
|
||||
/** The transport to use. */
|
||||
transport?: "unix";
|
||||
|
||||
/**
|
||||
* Cert chain in PEM format. Use {@linkcode TlsCertifiedKeyOptions} instead.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*/
|
||||
key?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeInit {
|
||||
/** The handler to invoke to process each incoming request. */
|
||||
handler: ServeHandler;
|
||||
}
|
||||
|
||||
/**
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeTlsInit {
|
||||
/** The handler to invoke to process each incoming request. */
|
||||
handler: ServeHandler;
|
||||
}
|
||||
|
||||
/** @category HTTP Server */
|
||||
export interface ServeUnixOptions {
|
||||
/** The unix domain socket path to listen on. */
|
||||
path: string;
|
||||
|
||||
/** An {@linkcode AbortSignal} to close the server and all connections. */
|
||||
signal?: AbortSignal;
|
||||
|
||||
/** The handler to invoke when route handlers throw an error. */
|
||||
onError?: (error: unknown) => Response | Promise<Response>;
|
||||
|
||||
/** The callback which is called when the server starts listening. */
|
||||
onListen?: (localAddr: Deno.UnixAddr) => void;
|
||||
}
|
||||
|
||||
/** Information for a unix domain socket HTTP request.
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeUnixHandlerInfo {
|
||||
/** The remote address of the connection. */
|
||||
remoteAddr: Deno.UnixAddr;
|
||||
}
|
||||
|
||||
/** A handler for unix domain socket HTTP requests. Consumes a request and returns a response.
|
||||
*
|
||||
* If a handler throws, the server calling the handler will assume the impact
|
||||
* of the error is isolated to the individual request. It will catch the error
|
||||
* and if necessary will close the underlying connection.
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export type ServeUnixHandler = (
|
||||
request: Request,
|
||||
info: ServeUnixHandlerInfo,
|
||||
) => Response | Promise<Response>;
|
||||
|
||||
/**
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface ServeUnixInit {
|
||||
export interface ServeInit<Addr extends Deno.Addr = Deno.Addr> {
|
||||
/** The handler to invoke to process each incoming request. */
|
||||
handler: ServeUnixHandler;
|
||||
handler: ServeHandler<Addr>;
|
||||
}
|
||||
|
||||
/** An instance of the server created using `Deno.serve()` API.
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export interface HttpServer<A extends Deno.Addr = Deno.Addr>
|
||||
export interface HttpServer<Addr extends Deno.Addr = Deno.Addr>
|
||||
extends AsyncDisposable {
|
||||
/** A promise that resolves once server finishes - eg. when aborted using
|
||||
* the signal passed to {@linkcode ServeOptions.signal}.
|
||||
|
@ -6424,7 +6171,7 @@ declare namespace Deno {
|
|||
finished: Promise<void>;
|
||||
|
||||
/** The local address this server is listening on. */
|
||||
addr: A;
|
||||
addr: Addr;
|
||||
|
||||
/**
|
||||
* Make the server block the event loop from finishing.
|
||||
|
@ -6443,15 +6190,6 @@ declare namespace Deno {
|
|||
shutdown(): Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* @category HTTP Server
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*/
|
||||
export type Server = HttpServer;
|
||||
|
||||
/** Serves HTTP requests with the given handler.
|
||||
*
|
||||
* The below example serves with the port `8000` on hostname `"127.0.0.1"`.
|
||||
|
@ -6462,7 +6200,9 @@ declare namespace Deno {
|
|||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export function serve(handler: ServeHandler): HttpServer<Deno.NetAddr>;
|
||||
export function serve(
|
||||
handler: ServeHandler<Deno.NetAddr>,
|
||||
): HttpServer<Deno.NetAddr>;
|
||||
/** Serves HTTP requests with the given option bag and handler.
|
||||
*
|
||||
* You can specify the socket path with `path` option.
|
||||
|
@ -6510,68 +6250,8 @@ declare namespace Deno {
|
|||
*/
|
||||
export function serve(
|
||||
options: ServeUnixOptions,
|
||||
handler: ServeUnixHandler,
|
||||
handler: ServeHandler<Deno.UnixAddr>,
|
||||
): HttpServer<Deno.UnixAddr>;
|
||||
/** Serves HTTP requests with the given option bag and handler.
|
||||
*
|
||||
* You can specify an object with a port and hostname option, which is the
|
||||
* address to listen on. The default is port `8000` on hostname `"127.0.0.1"`.
|
||||
*
|
||||
* You can change the address to listen on using the `hostname` and `port`
|
||||
* options. The below example serves on port `3000` and hostname `"0.0.0.0"`.
|
||||
*
|
||||
* ```ts
|
||||
* Deno.serve(
|
||||
* { port: 3000, hostname: "0.0.0.0" },
|
||||
* (_req) => new Response("Hello, world")
|
||||
* );
|
||||
* ```
|
||||
*
|
||||
* You can stop the server with an {@linkcode AbortSignal}. The abort signal
|
||||
* needs to be passed as the `signal` option in the options bag. The server
|
||||
* aborts when the abort signal is aborted. To wait for the server to close,
|
||||
* await the promise returned from the `Deno.serve` API.
|
||||
*
|
||||
* ```ts
|
||||
* const ac = new AbortController();
|
||||
*
|
||||
* const server = Deno.serve(
|
||||
* { signal: ac.signal },
|
||||
* (_req) => new Response("Hello, world")
|
||||
* );
|
||||
* server.finished.then(() => console.log("Server closed"));
|
||||
*
|
||||
* console.log("Closing server...");
|
||||
* ac.abort();
|
||||
* ```
|
||||
*
|
||||
* By default `Deno.serve` prints the message
|
||||
* `Listening on http://<hostname>:<port>/` on listening. If you like to
|
||||
* change this behavior, you can specify a custom `onListen` callback.
|
||||
*
|
||||
* ```ts
|
||||
* Deno.serve({
|
||||
* onListen({ port, hostname }) {
|
||||
* console.log(`Server started at http://${hostname}:${port}`);
|
||||
* // ... more info specific to your server ..
|
||||
* },
|
||||
* }, (_req) => new Response("Hello, world"));
|
||||
* ```
|
||||
*
|
||||
* To enable TLS you must specify the `key` and `cert` options.
|
||||
*
|
||||
* ```ts
|
||||
* const cert = "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----\n";
|
||||
* const key = "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n";
|
||||
* Deno.serve({ cert, key }, (_req) => new Response("Hello, world"));
|
||||
* ```
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export function serve(
|
||||
options: ServeOptions,
|
||||
handler: ServeHandler,
|
||||
): HttpServer<Deno.NetAddr>;
|
||||
/** Serves HTTP requests with the given option bag and handler.
|
||||
*
|
||||
* You can specify an object with a port and hostname option, which is the
|
||||
|
@ -6630,9 +6310,9 @@ declare namespace Deno {
|
|||
*/
|
||||
export function serve(
|
||||
options:
|
||||
| ServeTlsOptions
|
||||
| (ServeTlsOptions & TlsCertifiedKeyOptions),
|
||||
handler: ServeHandler,
|
||||
| ServeTcpOptions
|
||||
| (ServeTcpOptions & TlsCertifiedKeyOptions),
|
||||
handler: ServeHandler<Deno.NetAddr>,
|
||||
): HttpServer<Deno.NetAddr>;
|
||||
/** Serves HTTP requests with the given option bag.
|
||||
*
|
||||
|
@ -6659,7 +6339,7 @@ declare namespace Deno {
|
|||
* @category HTTP Server
|
||||
*/
|
||||
export function serve(
|
||||
options: ServeUnixInit & ServeUnixOptions,
|
||||
options: ServeUnixOptions & ServeInit<Deno.UnixAddr>,
|
||||
): HttpServer<Deno.UnixAddr>;
|
||||
/** Serves HTTP requests with the given option bag.
|
||||
*
|
||||
|
@ -6688,40 +6368,7 @@ declare namespace Deno {
|
|||
*/
|
||||
export function serve(
|
||||
options:
|
||||
& ServeInit
|
||||
& ServeOptions,
|
||||
): HttpServer<Deno.NetAddr>;
|
||||
/** Serves HTTP requests with the given option bag.
|
||||
*
|
||||
* You can specify an object with a port and hostname option, which is the
|
||||
* address to listen on. The default is port `8000` on hostname `"127.0.0.1"`.
|
||||
*
|
||||
* ```ts
|
||||
* const ac = new AbortController();
|
||||
*
|
||||
* const server = Deno.serve({
|
||||
* port: 3000,
|
||||
* hostname: "0.0.0.0",
|
||||
* handler: (_req) => new Response("Hello, world"),
|
||||
* signal: ac.signal,
|
||||
* onListen({ port, hostname }) {
|
||||
* console.log(`Server started at http://${hostname}:${port}`);
|
||||
* },
|
||||
* });
|
||||
* server.finished.then(() => console.log("Server closed"));
|
||||
*
|
||||
* console.log("Closing server...");
|
||||
* ac.abort();
|
||||
* ```
|
||||
*
|
||||
* @category HTTP Server
|
||||
*/
|
||||
export function serve(
|
||||
options:
|
||||
& ServeTlsInit
|
||||
& (
|
||||
| ServeTlsOptions
|
||||
| (ServeTlsOptions & TlsCertifiedKeyOptions)
|
||||
),
|
||||
& (ServeTcpOptions | (ServeTcpOptions & TlsCertifiedKeyOptions))
|
||||
& ServeInit<Deno.NetAddr>,
|
||||
): HttpServer<Deno.NetAddr>;
|
||||
}
|
||||
|
|
6
cli/tsc/dts/lib.deno.shared_globals.d.ts
vendored
6
cli/tsc/dts/lib.deno.shared_globals.d.ts
vendored
|
@ -593,16 +593,12 @@ declare interface Performance extends EventTarget {
|
|||
endMark?: string,
|
||||
): PerformanceMeasure;
|
||||
|
||||
/** Returns a current time from Deno's start in milliseconds.
|
||||
*
|
||||
* Use the permission flag `--allow-hrtime` to return a precise value.
|
||||
/** Returns a current time from Deno's start in fractional milliseconds.
|
||||
*
|
||||
* ```ts
|
||||
* const t = performance.now();
|
||||
* console.log(`${t} ms since start!`);
|
||||
* ```
|
||||
*
|
||||
* @tags allow-hrtime
|
||||
*/
|
||||
now(): number;
|
||||
|
||||
|
|
41
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
41
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
|
@ -10,18 +10,6 @@
|
|||
declare namespace Deno {
|
||||
export {}; // stop default export type behavior
|
||||
|
||||
/** Information for a HTTP request.
|
||||
*
|
||||
* @category HTTP Server
|
||||
* @experimental
|
||||
*/
|
||||
export interface ServeHandlerInfo {
|
||||
/** The remote address of the connection. */
|
||||
remoteAddr: Deno.NetAddr;
|
||||
/** The completion promise */
|
||||
completed: Promise<void>;
|
||||
}
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* Retrieve the process umask. If `mask` is provided, sets the process umask.
|
||||
|
@ -1216,26 +1204,6 @@ declare namespace Deno {
|
|||
options: UnixListenOptions & { transport: "unixpacket" },
|
||||
): DatagramConn;
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* Acquire an advisory file-system lock for the provided file.
|
||||
*
|
||||
* @param [exclusive=false]
|
||||
* @category File System
|
||||
* @experimental
|
||||
*/
|
||||
export function flock(rid: number, exclusive?: boolean): Promise<void>;
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* Acquire an advisory file-system lock synchronously for the provided file.
|
||||
*
|
||||
* @param [exclusive=false]
|
||||
* @category File System
|
||||
* @experimental
|
||||
*/
|
||||
export function flockSync(rid: number, exclusive?: boolean): void;
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* Release an advisory file-system lock for the provided file.
|
||||
|
@ -2163,7 +2131,10 @@ declare namespace Deno {
|
|||
* @category Jupyter
|
||||
* @experimental
|
||||
*/
|
||||
export function display(obj: unknown, options?: DisplayOptions): void;
|
||||
export function display(
|
||||
obj: unknown,
|
||||
options?: DisplayOptions,
|
||||
): Promise<void>;
|
||||
|
||||
/**
|
||||
* Show Markdown in Jupyter frontends with a tagged template function.
|
||||
|
@ -2236,12 +2207,12 @@ declare namespace Deno {
|
|||
* Format an object for displaying in Deno
|
||||
*
|
||||
* @param obj - The object to be displayed
|
||||
* @returns MediaBundle
|
||||
* @returns Promise<MediaBundle>
|
||||
*
|
||||
* @category Jupyter
|
||||
* @experimental
|
||||
*/
|
||||
export function format(obj: unknown): MediaBundle;
|
||||
export function format(obj: unknown): Promise<MediaBundle>;
|
||||
|
||||
/**
|
||||
* Broadcast a message on IO pub channel.
|
||||
|
|
6
cli/tsc/dts/lib.dom.d.ts
vendored
6
cli/tsc/dts/lib.dom.d.ts
vendored
|
@ -17550,7 +17550,7 @@ declare var PerformanceServerTiming: {
|
|||
};
|
||||
|
||||
/**
|
||||
* A legacy interface kept for backwards compatibility and contains properties that offer performance timing information for various events which occur during the loading and use of the current page. You get a PerformanceTiming object describing your page using the window.performance.timing property.
|
||||
* A legacy interface kept for backwards compatibility and contains properties that offer performance timing information for various events which occur during the loading and use of the current page. You get a PerformanceTiming object describing your page using the globalThis.performance.timing property.
|
||||
* @deprecated This interface is deprecated in the Navigation Timing Level 2 specification. Please use the PerformanceNavigationTiming interface instead.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PerformanceTiming)
|
||||
|
@ -17833,7 +17833,7 @@ declare var Plugin: {
|
|||
};
|
||||
|
||||
/**
|
||||
* Used to store a list of Plugin objects describing the available plugins; it's returned by the window.navigator.plugins property. The PluginArray is not a JavaScript array, but has the length property and supports accessing individual items using bracket notation (plugins[2]), as well as via item(index) and namedItem("name") methods.
|
||||
* Used to store a list of Plugin objects describing the available plugins; it's returned by the globalThis.navigator.plugins property. The PluginArray is not a JavaScript array, but has the length property and supports accessing individual items using bracket notation (plugins[2]), as well as via item(index) and namedItem("name") methods.
|
||||
* @deprecated
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PluginArray)
|
||||
|
@ -22155,7 +22155,7 @@ declare var SubmitEvent: {
|
|||
};
|
||||
|
||||
/**
|
||||
* This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via Window.crypto).
|
||||
* This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via globalThis.crypto).
|
||||
* Available only in secure contexts.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto)
|
||||
|
|
4
cli/tsc/dts/lib.webworker.d.ts
vendored
4
cli/tsc/dts/lib.webworker.d.ts
vendored
|
@ -5407,7 +5407,7 @@ declare var StylePropertyMapReadOnly: {
|
|||
};
|
||||
|
||||
/**
|
||||
* This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via Window.crypto).
|
||||
* This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via globalThis.crypto).
|
||||
* Available only in secure contexts.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto)
|
||||
|
@ -8641,7 +8641,7 @@ declare var WorkerLocation: {
|
|||
};
|
||||
|
||||
/**
|
||||
* A subset of the Navigator interface allowed to be accessed from a Worker. Such an object is initialized for each worker and is available via the WorkerGlobalScope.navigator property obtained by calling window.self.navigator.
|
||||
* A subset of the Navigator interface allowed to be accessed from a Worker. Such an object is initialized for each worker and is available via the WorkerGlobalScope.navigator property obtained by calling globalThis.self.navigator.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WorkerNavigator)
|
||||
*/
|
||||
|
|
|
@ -22,6 +22,7 @@ use deno_core::serde::Serialize;
|
|||
use deno_core::serde::Serializer;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_v8;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::OpState;
|
||||
|
@ -32,7 +33,6 @@ use deno_graph::ModuleGraph;
|
|||
use deno_graph::ResolutionResolved;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use lsp_types::Url;
|
||||
use node_resolver::errors::NodeJsErrorCode;
|
||||
use node_resolver::errors::NodeJsErrorCoded;
|
||||
use node_resolver::NodeModuleKind;
|
||||
|
|
|
@ -40,7 +40,7 @@ struct InternalEntry {
|
|||
struct InternalState {
|
||||
// this ensures only one actual draw thread is running
|
||||
drawer_id: usize,
|
||||
hide: bool,
|
||||
hide_count: usize,
|
||||
has_draw_thread: bool,
|
||||
next_entry_id: u16,
|
||||
entries: Vec<InternalEntry>,
|
||||
|
@ -56,7 +56,7 @@ impl InternalState {
|
|||
static INTERNAL_STATE: Lazy<Arc<Mutex<InternalState>>> = Lazy::new(|| {
|
||||
Arc::new(Mutex::new(InternalState {
|
||||
drawer_id: 0,
|
||||
hide: false,
|
||||
hide_count: 0,
|
||||
has_draw_thread: false,
|
||||
entries: Vec::new(),
|
||||
next_entry_id: 0,
|
||||
|
@ -113,7 +113,7 @@ impl DrawThread {
|
|||
pub fn hide() {
|
||||
let internal_state = &*INTERNAL_STATE;
|
||||
let mut internal_state = internal_state.lock();
|
||||
internal_state.hide = true;
|
||||
internal_state.hide_count += 1;
|
||||
|
||||
Self::clear_and_stop_draw_thread(&mut internal_state);
|
||||
}
|
||||
|
@ -122,9 +122,12 @@ impl DrawThread {
|
|||
pub fn show() {
|
||||
let internal_state = &*INTERNAL_STATE;
|
||||
let mut internal_state = internal_state.lock();
|
||||
internal_state.hide = false;
|
||||
|
||||
Self::maybe_start_draw_thread(&mut internal_state);
|
||||
if internal_state.hide_count > 0 {
|
||||
internal_state.hide_count -= 1;
|
||||
if internal_state.hide_count == 0 {
|
||||
Self::maybe_start_draw_thread(&mut internal_state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn finish_entry(entry_id: u16) {
|
||||
|
@ -153,7 +156,7 @@ impl DrawThread {
|
|||
|
||||
fn maybe_start_draw_thread(internal_state: &mut InternalState) {
|
||||
if internal_state.has_draw_thread
|
||||
|| internal_state.hide
|
||||
|| internal_state.hide_count > 0
|
||||
|| internal_state.entries.is_empty()
|
||||
|| !DrawThread::is_supported()
|
||||
{
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::env::current_dir;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Error;
|
||||
use std::io::ErrorKind;
|
||||
|
@ -18,7 +17,6 @@ use deno_config::glob::WalkEntry;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
pub use deno_core::normalize_path;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
@ -255,18 +253,6 @@ fn canonicalize_path_maybe_not_exists_with_custom_fn(
|
|||
}
|
||||
}
|
||||
|
||||
pub fn resolve_from_cwd(path: &Path) -> Result<PathBuf, AnyError> {
|
||||
let resolved_path = if path.is_absolute() {
|
||||
path.to_owned()
|
||||
} else {
|
||||
let cwd =
|
||||
current_dir().context("Failed to get current working directory")?;
|
||||
cwd.join(path)
|
||||
};
|
||||
|
||||
Ok(normalize_path(resolved_path))
|
||||
}
|
||||
|
||||
/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`.
|
||||
/// Specifiers that start with http and https are left intact.
|
||||
/// Note: This ignores all .git and node_modules folders.
|
||||
|
@ -509,7 +495,7 @@ pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> {
|
||||
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
|
||||
let err_mapper = |err: Error| {
|
||||
Error::new(
|
||||
err.kind(),
|
||||
|
@ -715,30 +701,13 @@ pub fn specifier_from_file_path(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use deno_core::futures;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use test_util::PathRef;
|
||||
use test_util::TempDir;
|
||||
use tokio::sync::Notify;
|
||||
|
||||
#[test]
|
||||
fn resolve_from_cwd_child() {
|
||||
let cwd = current_dir().unwrap();
|
||||
assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_from_cwd_dot() {
|
||||
let cwd = current_dir().unwrap();
|
||||
assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_from_cwd_parent() {
|
||||
let cwd = current_dir().unwrap();
|
||||
assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_path() {
|
||||
assert_eq!(normalize_path(Path::new("a/../b")), PathBuf::from("b"));
|
||||
|
@ -756,14 +725,6 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_from_cwd_absolute() {
|
||||
let expected = Path::new("a");
|
||||
let cwd = current_dir().unwrap();
|
||||
let absolute_expected = cwd.join(expected);
|
||||
assert_eq!(resolve_from_cwd(expected).unwrap(), absolute_expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_specifiers() {
|
||||
fn create_files(dir_path: &PathRef, files: &[&str]) {
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
use std::io::Write;
|
||||
|
||||
use super::draw_thread::DrawThread;
|
||||
|
||||
struct CliLogger(env_logger::Logger);
|
||||
|
||||
impl CliLogger {
|
||||
|
@ -21,7 +23,13 @@ impl log::Log for CliLogger {
|
|||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
// it was considered to hold the draw thread's internal lock
|
||||
// across logging, but if outputting to stderr blocks then that
|
||||
// could potentially block other threads that access the draw
|
||||
// thread's state
|
||||
DrawThread::hide();
|
||||
self.0.log(record);
|
||||
DrawThread::show();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,8 +41,10 @@ impl log::Log for CliLogger {
|
|||
pub fn init(maybe_level: Option<log::Level>) {
|
||||
let log_level = maybe_level.unwrap_or(log::Level::Info);
|
||||
let logger = env_logger::Builder::from_env(
|
||||
env_logger::Env::default()
|
||||
.default_filter_or(log_level.to_level_filter().to_string()),
|
||||
env_logger::Env::new()
|
||||
// Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix
|
||||
.filter_or("DENO_LOG", log_level.to_level_filter().to_string())
|
||||
.write_style("DENO_LOG_STYLE"),
|
||||
)
|
||||
// https://github.com/denoland/deno/issues/6641
|
||||
.filter_module("rustyline", log::LevelFilter::Off)
|
||||
|
@ -46,6 +56,15 @@ pub fn init(maybe_level: Option<log::Level>) {
|
|||
// in the cli logger
|
||||
.filter_module("deno::lsp::performance", log::LevelFilter::Debug)
|
||||
.filter_module("rustls", log::LevelFilter::Off)
|
||||
// swc_ecma_codegen's `srcmap!` macro emits error-level spans only on debug
|
||||
// build:
|
||||
// https://github.com/swc-project/swc/blob/74d6478be1eb8cdf1df096c360c159db64b64d8a/crates/swc_ecma_codegen/src/macros.rs#L112
|
||||
// We suppress them here to avoid flooding our CI logs in integration tests.
|
||||
.filter_module("swc_ecma_codegen", log::LevelFilter::Off)
|
||||
.filter_module("swc_ecma_transforms_optimization", log::LevelFilter::Off)
|
||||
.filter_module("swc_ecma_parser", log::LevelFilter::Error)
|
||||
// Suppress span lifecycle logs since they are too verbose
|
||||
.filter_module("tracing::span", log::LevelFilter::Off)
|
||||
.format(|buf, record| {
|
||||
let mut target = record.target().to_string();
|
||||
if let Some(line_no) = record.line() {
|
||||
|
|
|
@ -145,34 +145,6 @@ pub fn relative_specifier(
|
|||
Some(to_percent_decoded_str(&text))
|
||||
}
|
||||
|
||||
/// Gets a path with the specified file stem suffix.
|
||||
///
|
||||
/// Ex. `file.ts` with suffix `_2` returns `file_2.ts`
|
||||
pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf {
|
||||
if let Some(file_name) = path.file_name().map(|f| f.to_string_lossy()) {
|
||||
if let Some(file_stem) = path.file_stem().map(|f| f.to_string_lossy()) {
|
||||
if let Some(ext) = path.extension().map(|f| f.to_string_lossy()) {
|
||||
return if file_stem.to_lowercase().ends_with(".d") {
|
||||
path.with_file_name(format!(
|
||||
"{}{}.{}.{}",
|
||||
&file_stem[..file_stem.len() - ".d".len()],
|
||||
suffix,
|
||||
// maintain casing
|
||||
&file_stem[file_stem.len() - "d".len()..],
|
||||
ext
|
||||
))
|
||||
} else {
|
||||
path.with_file_name(format!("{file_stem}{suffix}.{ext}"))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
path.with_file_name(format!("{file_name}{suffix}"))
|
||||
} else {
|
||||
path.with_file_name(suffix)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(windows, allow(dead_code))]
|
||||
pub fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
|
||||
pathdiff::diff_paths(to, from)
|
||||
|
@ -405,46 +377,6 @@ mod test {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_with_stem_suffix() {
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/"), "_2"),
|
||||
PathBuf::from("/_2")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test"), "_2"),
|
||||
PathBuf::from("/test_2")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test.txt"), "_2"),
|
||||
PathBuf::from("/test_2.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test/subdir"), "_2"),
|
||||
PathBuf::from("/test/subdir_2")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test/subdir.other.txt"), "_2"),
|
||||
PathBuf::from("/test/subdir.other_2.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test.d.ts"), "_2"),
|
||||
PathBuf::from("/test_2.d.ts")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test.D.TS"), "_2"),
|
||||
PathBuf::from("/test_2.D.TS")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test.d.mts"), "_2"),
|
||||
PathBuf::from("/test_2.d.mts")
|
||||
);
|
||||
assert_eq!(
|
||||
path_with_stem_suffix(&PathBuf::from("/test.d.cts"), "_2"),
|
||||
PathBuf::from("/test_2.d.cts")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_percent_decoded_str() {
|
||||
let str = to_percent_decoded_str("%F0%9F%A6%95");
|
||||
|
|
|
@ -9,6 +9,8 @@ const TYPESCRIPT: &str = env!("TS_VERSION");
|
|||
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
// TODO(bartlomieju): ideally we could remove this const.
|
||||
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();
|
||||
// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well
|
||||
const IS_RC: bool = option_env!("DENO_RC").is_some();
|
||||
|
||||
pub static DENO_VERSION_INFO: Lazy<DenoVersionInfo> = Lazy::new(|| {
|
||||
let release_channel = libsui::find_section("denover")
|
||||
|
@ -17,6 +19,8 @@ pub static DENO_VERSION_INFO: Lazy<DenoVersionInfo> = Lazy::new(|| {
|
|||
.unwrap_or({
|
||||
if IS_CANARY {
|
||||
ReleaseChannel::Canary
|
||||
} else if IS_RC {
|
||||
ReleaseChannel::Rc
|
||||
} else {
|
||||
ReleaseChannel::Stable
|
||||
}
|
||||
|
|
|
@ -116,6 +116,9 @@ pub struct CliMainWorkerOptions {
|
|||
pub skip_op_registration: bool,
|
||||
pub create_hmr_runner: Option<CreateHmrRunnerCb>,
|
||||
pub create_coverage_collector: Option<CreateCoverageCollectorCb>,
|
||||
pub node_ipc: Option<i64>,
|
||||
pub serve_port: Option<u16>,
|
||||
pub serve_host: Option<String>,
|
||||
}
|
||||
|
||||
struct SharedWorkerState {
|
||||
|
@ -135,13 +138,8 @@ struct SharedWorkerState {
|
|||
maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
node_ipc: Option<i64>,
|
||||
enable_future_features: bool,
|
||||
disable_deprecated_api_warning: bool,
|
||||
verbose_deprecated_api_warning: bool,
|
||||
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
|
||||
serve_port: Option<u16>,
|
||||
serve_host: Option<String>,
|
||||
}
|
||||
|
||||
impl SharedWorkerState {
|
||||
|
@ -434,14 +432,8 @@ impl CliMainWorkerFactory {
|
|||
maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
options: CliMainWorkerOptions,
|
||||
node_ipc: Option<i64>,
|
||||
serve_port: Option<u16>,
|
||||
serve_host: Option<String>,
|
||||
enable_future_features: bool,
|
||||
disable_deprecated_api_warning: bool,
|
||||
verbose_deprecated_api_warning: bool,
|
||||
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
|
||||
options: CliMainWorkerOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
shared: Arc::new(SharedWorkerState {
|
||||
|
@ -461,12 +453,8 @@ impl CliMainWorkerFactory {
|
|||
maybe_inspector_server,
|
||||
maybe_lockfile,
|
||||
feature_checker,
|
||||
node_ipc,
|
||||
serve_port,
|
||||
serve_host,
|
||||
enable_future_features,
|
||||
disable_deprecated_api_warning,
|
||||
verbose_deprecated_api_warning,
|
||||
// TODO(2.0): remove?
|
||||
enable_future_features: true,
|
||||
code_cache,
|
||||
}),
|
||||
}
|
||||
|
@ -573,9 +561,9 @@ impl CliMainWorkerFactory {
|
|||
let feature_checker = shared.feature_checker.clone();
|
||||
let mut unstable_features =
|
||||
Vec::with_capacity(crate::UNSTABLE_GRANULAR_FLAGS.len());
|
||||
for (feature_name, _, id) in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if feature_checker.check(feature_name) {
|
||||
unstable_features.push(*id);
|
||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if feature_checker.check(granular_flag.name) {
|
||||
unstable_features.push(granular_flag.id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -602,13 +590,11 @@ impl CliMainWorkerFactory {
|
|||
has_node_modules_dir: shared.options.has_node_modules_dir,
|
||||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: shared.node_ipc,
|
||||
disable_deprecated_api_warning: shared.disable_deprecated_api_warning,
|
||||
verbose_deprecated_api_warning: shared.verbose_deprecated_api_warning,
|
||||
node_ipc_fd: shared.options.node_ipc,
|
||||
future: shared.enable_future_features,
|
||||
mode,
|
||||
serve_port: shared.serve_port,
|
||||
serve_host: shared.serve_host.clone(),
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
},
|
||||
extensions: custom_extensions,
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
|
@ -771,9 +757,9 @@ fn create_web_worker_callback(
|
|||
let feature_checker = shared.feature_checker.clone();
|
||||
let mut unstable_features =
|
||||
Vec::with_capacity(crate::UNSTABLE_GRANULAR_FLAGS.len());
|
||||
for (feature_name, _, id) in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if feature_checker.check(feature_name) {
|
||||
unstable_features.push(*id);
|
||||
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
|
||||
if feature_checker.check(granular_flag.name) {
|
||||
unstable_features.push(granular_flag.id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -801,12 +787,10 @@ fn create_web_worker_callback(
|
|||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: None,
|
||||
disable_deprecated_api_warning: shared.disable_deprecated_api_warning,
|
||||
verbose_deprecated_api_warning: shared.verbose_deprecated_api_warning,
|
||||
future: shared.enable_future_features,
|
||||
mode: WorkerExecutionMode::Worker,
|
||||
serve_port: shared.serve_port,
|
||||
serve_host: shared.serve_host.clone(),
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
},
|
||||
extensions: vec![],
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.158.0"
|
||||
version = "0.161.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.96.0"
|
||||
version = "0.99.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.33.0"
|
||||
version = "0.36.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -179,7 +179,7 @@ class AssertionError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
function assert(cond, msg = "Assertion failed.") {
|
||||
function assert(cond, msg = "Assertion failed") {
|
||||
if (!cond) {
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
|
@ -3236,8 +3236,8 @@ class Console {
|
|||
table = (data = undefined, properties = undefined) => {
|
||||
if (properties !== undefined && !ArrayIsArray(properties)) {
|
||||
throw new Error(
|
||||
"The 'properties' argument must be of type Array. " +
|
||||
"Received type " + typeof properties,
|
||||
"The 'properties' argument must be of type Array: " +
|
||||
"received type " + typeof properties,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.164.0"
|
||||
version = "0.167.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.44.0"
|
||||
version = "0.47.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.178.0"
|
||||
version = "0.181.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -689,7 +689,8 @@ fn import_key_ec(
|
|||
|
||||
let rng = ring::rand::SystemRandom::new();
|
||||
// deserialize pkcs8 using ring crate, to VALIDATE public key
|
||||
let _private_key = EcdsaKeyPair::from_pkcs8(signing_alg, &data, &rng)?;
|
||||
let _private_key = EcdsaKeyPair::from_pkcs8(signing_alg, &data, &rng)
|
||||
.map_err(|_| data_error("invalid key"))?;
|
||||
|
||||
// 11.
|
||||
if named_curve != pk_named_curve {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue