mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge branch 'main' into Fix-UNC-Path-Permissions-Issue-on-Windows
This commit is contained in:
commit
b9b7163c46
595 changed files with 13677 additions and 5345 deletions
|
@ -31,6 +31,8 @@
|
|||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/typescript.d.ts",
|
||||
"cli/tools/doc/prism.css",
|
||||
"cli/tools/doc/prism.js",
|
||||
"ext/websocket/autobahn/reports",
|
||||
"gh-pages",
|
||||
"target",
|
||||
|
|
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 25;
|
||||
const cacheVersion = 27;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -361,8 +361,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -685,7 +685,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-24.04
|
||||
|
|
726
Cargo.lock
generated
726
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
66
Cargo.toml
66
Cargo.toml
|
@ -46,18 +46,19 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
|
||||
deno_core = { version = "0.319.0" }
|
||||
deno_core = { version = "0.322.0" }
|
||||
|
||||
deno_bench_util = { version = "0.171.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.173.0", path = "./bench_util" }
|
||||
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.4"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.37.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.186.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.39.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.188.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.16"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.107.0", path = "./ext/napi/sym" }
|
||||
napi_sym = { version = "0.109.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.4"
|
||||
|
@ -66,32 +67,32 @@ denokv_remote = "0.8.4"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.4" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.109.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.46.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.177.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.57.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.191.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.201.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.164.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.87.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.175.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.87.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.85.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.108.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.169.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.114.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.164.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.177.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.208.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.144.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.177.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.182.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.172.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.173.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.111.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.48.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.179.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.59.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.193.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.203.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.166.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.89.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.177.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.89.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.87.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.110.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.171.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.116.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.166.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.179.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.210.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.146.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.179.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.184.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.174.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.9.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.16.0", path = "./resolvers/node" }
|
||||
deno_resolver = { version = "0.11.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.18.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -99,6 +100,7 @@ async-trait = "0.1.73"
|
|||
base32 = "=0.5.1"
|
||||
base64 = "0.21.7"
|
||||
bencher = "0.1"
|
||||
boxed_error = "0.2.2"
|
||||
brotli = "6.0.0"
|
||||
bytes = "1.4.0"
|
||||
cache_control = "=0.2.0"
|
||||
|
@ -126,6 +128,7 @@ fs3 = "0.5.0"
|
|||
futures = "0.3.21"
|
||||
glob = "0.3.1"
|
||||
h2 = "0.4.4"
|
||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
|
||||
http = "1.0"
|
||||
http-body = "1.0"
|
||||
http-body-util = "0.1.2"
|
||||
|
@ -141,7 +144,7 @@ jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
|||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
log = "0.4.20"
|
||||
log = { version = "0.4.20", features = ["kv"] }
|
||||
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||
memmem = "0.1.1"
|
||||
monch = "=0.5.0"
|
||||
|
@ -197,8 +200,7 @@ tower-http = { version = "0.6.1", features = ["decompression-br", "decompression
|
|||
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
|
||||
tower-service = "0.3.2"
|
||||
twox-hash = "=1.6.3"
|
||||
# Upgrading past 2.4.1 may cause WPT failures
|
||||
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
|
||||
url = { version = "2.5", features = ["serde", "expose_internals"] }
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
webpki-root-certs = "0.26.5"
|
||||
webpki-roots = "0.26"
|
||||
|
|
80
Releases.md
80
Releases.md
|
@ -6,6 +6,86 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.1.1 / 2024.11.21
|
||||
|
||||
- docs(add): clarification to add command (#26968)
|
||||
- docs(doc): fix typo in doc subcommand help output (#26321)
|
||||
- fix(node): regression where ts files were sometimes resolved instead of js
|
||||
(#26971)
|
||||
- fix(task): ensure root config always looks up dependencies in root (#26959)
|
||||
- fix(watch): don't panic if there's no path provided (#26972)
|
||||
- fix: Buffer global in --unstable-node-globals (#26973)
|
||||
|
||||
### 2.1.0 / 2024.11.21
|
||||
|
||||
- feat(cli): add `--unstable-node-globals` flag (#26617)
|
||||
- feat(cli): support multiple env file argument (#26527)
|
||||
- feat(compile): ability to embed directory in executable (#26939)
|
||||
- feat(compile): ability to embed local data files (#26934)
|
||||
- feat(ext/fetch): Make fetch client parameters configurable (#26909)
|
||||
- feat(ext/fetch): allow embedders to use `hickory_dns_resolver` instead of
|
||||
default `GaiResolver` (#26740)
|
||||
- feat(ext/fs): add ctime to Deno.stats and use it in node compat layer (#24801)
|
||||
- feat(ext/http): Make http server parameters configurable (#26785)
|
||||
- feat(ext/node): perf_hooks.monitorEventLoopDelay() (#26905)
|
||||
- feat(fetch): accept async iterables for body (#26882)
|
||||
- feat(fmt): support SQL (#26750)
|
||||
- feat(info): show location for Web Cache (#26205)
|
||||
- feat(init): add --npm flag to initialize npm projects (#26896)
|
||||
- feat(jupyter): Add `Deno.jupyter.image` API (#26284)
|
||||
- feat(lint): Add checked files list to the JSON output(#26936)
|
||||
- feat(lsp): auto-imports with @deno-types directives (#26821)
|
||||
- feat(node): stabilize detecting if CJS via `"type": "commonjs"` in a
|
||||
package.json (#26439)
|
||||
- feat(permission): support suffix wildcards in `--allow-env` flag (#25255)
|
||||
- feat(publish): add `--set-version <version>` flag (#26141)
|
||||
- feat(runtime): remove public OTEL trace API (#26854)
|
||||
- feat(task): add --eval flag (#26943)
|
||||
- feat(task): dependencies (#26467)
|
||||
- feat(task): support object notation, remove support for JSDocs (#26886)
|
||||
- feat(task): workspace support with --filter and --recursive (#26949)
|
||||
- feat(watch): log which file changed on HMR or watch change (#25801)
|
||||
- feat: OpenTelemetry Tracing API and Exporting (#26710)
|
||||
- feat: Wasm module support (#26668)
|
||||
- feat: fmt and lint respect .gitignore file (#26897)
|
||||
- feat: permission stack traces in ops (#26938)
|
||||
- feat: subcommand to view and update outdated dependencies (#26942)
|
||||
- feat: upgrade V8 to 13.0 (#26851)
|
||||
- fix(cli): preserve comments in doc tests (#26828)
|
||||
- fix(cli): show prefix hint when installing a package globally (#26629)
|
||||
- fix(ext/cache): gracefully error when cache creation failed (#26895)
|
||||
- fix(ext/http): prefer brotli for `accept-encoding: gzip, deflate, br, zstd`
|
||||
(#26814)
|
||||
- fix(ext/node): New async setInterval function to improve the nodejs
|
||||
compatibility (#26703)
|
||||
- fix(ext/node): add autoSelectFamily option to net.createConnection (#26661)
|
||||
- fix(ext/node): handle `--allow-sys=inspector` (#26836)
|
||||
- fix(ext/node): increase tolerance for interval test (#26899)
|
||||
- fix(ext/node): process.getBuiltinModule (#26833)
|
||||
- fix(ext/node): use ERR_NOT_IMPLEMENTED for notImplemented (#26853)
|
||||
- fix(ext/node): zlib.crc32() (#26856)
|
||||
- fix(ext/webgpu): Create GPUQuerySet converter before usage (#26883)
|
||||
- fix(ext/websocket): initialize `error` attribute of WebSocket ErrorEvent
|
||||
(#26796)
|
||||
- fix(ext/webstorage): use error class for sqlite error case (#26806)
|
||||
- fix(fmt): error instead of panic on unstable format (#26859)
|
||||
- fix(fmt): formatting of .svelte files (#26948)
|
||||
- fix(install): percent encodings in interactive progress bar (#26600)
|
||||
- fix(install): re-setup bin entries after running lifecycle scripts (#26752)
|
||||
- fix(lockfile): track dependencies specified in TypeScript compiler options
|
||||
(#26551)
|
||||
- fix(lsp): ignore editor indent settings if deno.json is present (#26912)
|
||||
- fix(lsp): skip code action edits that can't be converted (#26831)
|
||||
- fix(node): handle resolving ".//<something>" in npm packages (#26920)
|
||||
- fix(node/crypto): support promisify on generateKeyPair (#26913)
|
||||
- fix(permissions): say to use --allow-run instead of --allow-all (#26842)
|
||||
- fix(publish): improve error message when missing exports (#26945)
|
||||
- fix: otel resiliency (#26857)
|
||||
- fix: update message for unsupported schemes with npm and jsr (#26884)
|
||||
- perf(compile): code cache (#26528)
|
||||
- perf(windows): delay load webgpu and some other dlls (#26917)
|
||||
- perf: use available system memory for v8 isolate memory limit (#26868)
|
||||
|
||||
### 2.0.6 / 2024.11.10
|
||||
|
||||
- feat(ext/http): abort event when request is cancelled (#26781)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.171.0"
|
||||
version = "0.173.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.6"
|
||||
version = "2.1.1"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -69,11 +69,11 @@ winres.workspace = true
|
|||
|
||||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.38.2", features = ["workspace", "sync"] }
|
||||
deno_cache_dir.workspace = true
|
||||
deno_config.workspace = true
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] }
|
||||
deno_graph = { version = "=0.84.1" }
|
||||
deno_doc = { version = "0.160.0", features = ["rust", "comrak"] }
|
||||
deno_graph = { version = "=0.85.0" }
|
||||
deno_lint = { version = "=0.68.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm.workspace = true
|
||||
|
@ -122,14 +122,14 @@ hyper-util.workspace = true
|
|||
import_map = { version = "=0.20.1", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser = { workspace = true, features = ["cst", "serde"] }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
libz-sys.workspace = true
|
||||
log = { workspace = true, features = ["serde"] }
|
||||
lsp-types.workspace = true
|
||||
malva = "=0.11.0"
|
||||
markup_fmt = "=0.15.0"
|
||||
markup_fmt = "=0.16.0"
|
||||
memmem.workspace = true
|
||||
monch.workspace = true
|
||||
notify.workspace = true
|
||||
|
@ -151,6 +151,8 @@ serde_repr.workspace = true
|
|||
sha2.workspace = true
|
||||
shell-escape = "=0.1.5"
|
||||
spki = { version = "0.7", features = ["pem"] }
|
||||
# NOTE(bartlomieju): using temporary fork for now, revert back to `sqlformat-rs` later
|
||||
sqlformat = { package = "deno_sqlformat", version = "0.3.2" }
|
||||
strsim = "0.11.1"
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
|
|
|
@ -70,7 +70,41 @@ pub fn deno_json_deps(
|
|||
let values = imports_values(config.json.imports.as_ref())
|
||||
.into_iter()
|
||||
.chain(scope_values(config.json.scopes.as_ref()));
|
||||
values_to_set(values)
|
||||
let mut set = values_to_set(values);
|
||||
|
||||
if let Some(serde_json::Value::Object(compiler_options)) =
|
||||
&config.json.compiler_options
|
||||
{
|
||||
// add jsxImportSource
|
||||
if let Some(serde_json::Value::String(value)) =
|
||||
compiler_options.get("jsxImportSource")
|
||||
{
|
||||
if let Some(dep_req) = value_to_dep_req(value) {
|
||||
set.insert(dep_req);
|
||||
}
|
||||
}
|
||||
// add jsxImportSourceTypes
|
||||
if let Some(serde_json::Value::String(value)) =
|
||||
compiler_options.get("jsxImportSourceTypes")
|
||||
{
|
||||
if let Some(dep_req) = value_to_dep_req(value) {
|
||||
set.insert(dep_req);
|
||||
}
|
||||
}
|
||||
// add the dependencies in the types array
|
||||
if let Some(serde_json::Value::Array(types)) = compiler_options.get("types")
|
||||
{
|
||||
for value in types {
|
||||
if let serde_json::Value::String(value) = value {
|
||||
if let Some(dep_req) = value_to_dep_req(value) {
|
||||
set.insert(dep_req);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set
|
||||
}
|
||||
|
||||
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
|
||||
|
@ -98,15 +132,23 @@ fn values_to_set<'a>(
|
|||
) -> HashSet<JsrDepPackageReq> {
|
||||
let mut entries = HashSet::new();
|
||||
for value in values {
|
||||
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
|
||||
entries.insert(JsrDepPackageReq::jsr(req_ref.into_inner().req));
|
||||
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
|
||||
entries.insert(JsrDepPackageReq::npm(req_ref.into_inner().req));
|
||||
if let Some(dep_req) = value_to_dep_req(value) {
|
||||
entries.insert(dep_req);
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
|
||||
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
|
||||
Some(JsrDepPackageReq::jsr(req_ref.into_inner().req))
|
||||
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
|
||||
Some(JsrDepPackageReq::npm(req_ref.into_inner().req))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
|
||||
if let Some(ignored_options) = &ts_config.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -126,11 +126,7 @@ impl CliLockfile {
|
|||
maybe_deno_json: Option<&ConfigFile>,
|
||||
) -> HashSet<JsrDepPackageReq> {
|
||||
maybe_deno_json
|
||||
.map(|c| {
|
||||
crate::args::deno_json::deno_json_deps(c)
|
||||
.into_iter()
|
||||
.collect()
|
||||
})
|
||||
.map(crate::args::deno_json::deno_json_deps)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
|
|
|
@ -289,6 +289,7 @@ impl BenchOptions {
|
|||
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub struct UnstableFmtOptions {
|
||||
pub component: bool,
|
||||
pub sql: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -322,6 +323,7 @@ impl FmtOptions {
|
|||
options: resolve_fmt_options(fmt_flags, fmt_config.options),
|
||||
unstable: UnstableFmtOptions {
|
||||
component: unstable.component || fmt_flags.unstable_component,
|
||||
sql: unstable.sql || fmt_flags.unstable_sql,
|
||||
},
|
||||
files: fmt_config.files,
|
||||
}
|
||||
|
@ -823,10 +825,8 @@ impl CliOptions {
|
|||
};
|
||||
let msg =
|
||||
format!("DANGER: TLS certificate validation is disabled {}", domains);
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
// use eprintln instead of log::warn so this always gets shown
|
||||
eprintln!("{}", colors::yellow(msg));
|
||||
log::error!("{}", colors::yellow(msg));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -870,12 +870,8 @@ impl CliOptions {
|
|||
} else {
|
||||
&[]
|
||||
};
|
||||
let config_parse_options = deno_config::deno_json::ConfigParseOptions {
|
||||
include_task_comments: matches!(
|
||||
flags.subcommand,
|
||||
DenoSubcommand::Task(..)
|
||||
),
|
||||
};
|
||||
let config_parse_options =
|
||||
deno_config::deno_json::ConfigParseOptions::default();
|
||||
let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled
|
||||
&& !flags.no_npm
|
||||
&& !has_flag_env_var("DENO_NO_PACKAGE_JSON");
|
||||
|
@ -1131,23 +1127,10 @@ impl CliOptions {
|
|||
}
|
||||
|
||||
pub fn otel_config(&self) -> Option<OtelConfig> {
|
||||
if self
|
||||
.flags
|
||||
.unstable_config
|
||||
.features
|
||||
.contains(&String::from("otel"))
|
||||
{
|
||||
Some(OtelConfig {
|
||||
runtime_name: Cow::Borrowed("deno"),
|
||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||
..Default::default()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
self.flags.otel_config()
|
||||
}
|
||||
|
||||
pub fn env_file_name(&self) -> Option<&String> {
|
||||
pub fn env_file_name(&self) -> Option<&Vec<String>> {
|
||||
self.flags.env_file.as_ref()
|
||||
}
|
||||
|
||||
|
@ -1338,6 +1321,7 @@ impl CliOptions {
|
|||
let workspace = self.workspace();
|
||||
UnstableFmtOptions {
|
||||
component: workspace.has_unstable("fmt-component"),
|
||||
sql: workspace.has_unstable("fmt-sql"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1564,6 +1548,10 @@ impl CliOptions {
|
|||
}) => Url::parse(&flags.module_url)
|
||||
.ok()
|
||||
.map(|url| vec![Cow::Owned(url)]),
|
||||
DenoSubcommand::Doc(DocFlags {
|
||||
source_files: DocSourceFileFlag::Paths(paths),
|
||||
..
|
||||
}) => Some(files_to_urls(paths)),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
@ -1640,8 +1628,10 @@ impl CliOptions {
|
|||
DenoSubcommand::Install(_)
|
||||
| DenoSubcommand::Add(_)
|
||||
| DenoSubcommand::Remove(_)
|
||||
| DenoSubcommand::Init(_)
|
||||
| DenoSubcommand::Outdated(_)
|
||||
) {
|
||||
// For `deno install/add/remove` we want to force the managed resolver so it can set up `node_modules/` directory.
|
||||
// For `deno install/add/remove/init` we want to force the managed resolver so it can set up `node_modules/` directory.
|
||||
return false;
|
||||
}
|
||||
if self.node_modules_dir().ok().flatten().is_none()
|
||||
|
@ -1686,6 +1676,7 @@ impl CliOptions {
|
|||
"byonm",
|
||||
"bare-node-builtins",
|
||||
"fmt-component",
|
||||
"fmt-sql",
|
||||
])
|
||||
.collect();
|
||||
|
||||
|
@ -1923,6 +1914,10 @@ pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
|
|||
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
|
||||
}
|
||||
|
||||
pub fn has_trace_permissions_enabled() -> bool {
|
||||
has_flag_env_var("DENO_TRACE_PERMISSIONS")
|
||||
}
|
||||
|
||||
pub fn has_flag_env_var(name: &str) -> bool {
|
||||
let value = env::var(name);
|
||||
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
|
||||
|
@ -1954,10 +1949,12 @@ pub fn config_to_deno_graph_workspace_member(
|
|||
})
|
||||
}
|
||||
|
||||
fn load_env_variables_from_env_file(filename: Option<&String>) {
|
||||
let Some(env_file_name) = filename else {
|
||||
fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
|
||||
let Some(env_file_names) = filename else {
|
||||
return;
|
||||
};
|
||||
|
||||
for env_file_name in env_file_names.iter().rev() {
|
||||
match from_filename(env_file_name) {
|
||||
Ok(_) => (),
|
||||
Err(error) => {
|
||||
|
@ -1969,6 +1966,7 @@ fn load_env_variables_from_env_file(filename: Option<&String>) {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
18
cli/build.rs
18
cli/build.rs
|
@ -400,6 +400,24 @@ fn main() {
|
|||
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
|
||||
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
|
||||
|
||||
if cfg!(windows) {
|
||||
// these dls load slowly, so delay loading them
|
||||
let dlls = [
|
||||
// webgpu
|
||||
"d3dcompiler_47",
|
||||
"OPENGL32",
|
||||
// network related functions
|
||||
"iphlpapi",
|
||||
];
|
||||
for dll in dlls {
|
||||
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
|
||||
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
|
||||
}
|
||||
// enable delay loading
|
||||
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
|
||||
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
|
||||
}
|
||||
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
|
||||
|
||||
|
|
10
cli/cache/code_cache.rs
vendored
10
cli/cache/code_cache.rs
vendored
|
@ -1,10 +1,14 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
|
||||
use crate::worker::CliCodeCache;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheDBHash;
|
||||
|
@ -82,6 +86,12 @@ impl CodeCache {
|
|||
}
|
||||
}
|
||||
|
||||
impl CliCodeCache for CodeCache {
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl code_cache::CodeCache for CodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
disallowed-methods = [
|
||||
{ path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" },
|
||||
{ path = "std::process::exit", reason = "use deno_runtime::exit instead" },
|
||||
]
|
||||
disallowed-types = [
|
||||
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
|
||||
|
|
|
@ -38,6 +38,7 @@ fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
|
|||
ModuleGraphError::ModuleError(err) => match err {
|
||||
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
|
||||
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
|
||||
ModuleError::WasmParseErr(..) => "SyntaxError",
|
||||
ModuleError::UnsupportedMediaType { .. }
|
||||
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
|
||||
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
|
||||
|
|
|
@ -42,8 +42,9 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliDenoResolver;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliNpmReqResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliResolverOptions;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
|
@ -71,6 +72,9 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::FutureExt;
|
||||
use deno_core::FeatureChecker;
|
||||
|
||||
use deno_resolver::npm::NpmReqResolverOptions;
|
||||
use deno_resolver::DenoResolverOptions;
|
||||
use deno_resolver::NodeAndNpmReqResolver;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
|
@ -126,7 +130,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider {
|
|||
}
|
||||
}
|
||||
|
||||
struct Deferred<T>(once_cell::unsync::OnceCell<T>);
|
||||
pub struct Deferred<T>(once_cell::unsync::OnceCell<T>);
|
||||
|
||||
impl<T> Default for Deferred<T> {
|
||||
fn default() -> Self {
|
||||
|
@ -175,9 +179,9 @@ struct CliFactoryServices {
|
|||
blob_store: Deferred<Arc<BlobStore>>,
|
||||
caches: Deferred<Arc<Caches>>,
|
||||
cjs_tracker: Deferred<Arc<CjsTracker>>,
|
||||
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
|
||||
cli_options: Deferred<Arc<CliOptions>>,
|
||||
code_cache: Deferred<Arc<CodeCache>>,
|
||||
deno_resolver: Deferred<Arc<CliDenoResolver>>,
|
||||
emit_cache: Deferred<Arc<EmitCache>>,
|
||||
emitter: Deferred<Arc<Emitter>>,
|
||||
feature_checker: Deferred<Arc<FeatureChecker>>,
|
||||
|
@ -197,6 +201,7 @@ struct CliFactoryServices {
|
|||
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
|
||||
node_resolver: Deferred<Arc<NodeResolver>>,
|
||||
npm_cache_dir: Deferred<Arc<NpmCacheDir>>,
|
||||
npm_req_resolver: Deferred<Arc<CliNpmReqResolver>>,
|
||||
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
|
||||
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
|
||||
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
|
||||
|
@ -523,6 +528,31 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn deno_resolver(&self) -> Result<&Arc<CliDenoResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.deno_resolver
|
||||
.get_or_try_init_async(async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(CliDenoResolver::new(DenoResolverOptions {
|
||||
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
|
||||
node_and_req_resolver: if cli_options.no_npm() {
|
||||
None
|
||||
} else {
|
||||
Some(NodeAndNpmReqResolver {
|
||||
node_resolver: self.node_resolver().await?.clone(),
|
||||
npm_req_resolver: self.npm_req_resolver().await?.clone(),
|
||||
})
|
||||
},
|
||||
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
|
||||
workspace_resolver: self.workspace_resolver().await?.clone(),
|
||||
is_byonm: cli_options.use_byonm(),
|
||||
maybe_vendor_dir: cli_options.vendor_dir_path(),
|
||||
})))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
|
@ -531,17 +561,14 @@ impl CliFactory {
|
|||
async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(CliResolver::new(CliResolverOptions {
|
||||
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
|
||||
node_resolver: Some(self.cli_node_resolver().await?.clone()),
|
||||
npm_resolver: if cli_options.no_npm() {
|
||||
None
|
||||
} else {
|
||||
Some(self.npm_resolver().await?.clone())
|
||||
},
|
||||
workspace_resolver: self.workspace_resolver().await?.clone(),
|
||||
bare_node_builtins_enabled: cli_options
|
||||
.unstable_bare_node_builtins(),
|
||||
maybe_vendor_dir: cli_options.vendor_dir_path(),
|
||||
deno_resolver: self.deno_resolver().await?.clone(),
|
||||
})))
|
||||
}
|
||||
.boxed_local(),
|
||||
|
@ -624,7 +651,11 @@ impl CliFactory {
|
|||
Ok(Arc::new(NodeResolver::new(
|
||||
DenoFsNodeResolverEnv::new(self.fs().clone()),
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
self.npm_resolver().await?.clone().into_npm_resolver(),
|
||||
self
|
||||
.npm_resolver()
|
||||
.await?
|
||||
.clone()
|
||||
.into_npm_pkg_folder_resolver(),
|
||||
self.pkg_json_resolver().clone(),
|
||||
)))
|
||||
}
|
||||
|
@ -656,13 +687,36 @@ impl CliFactory {
|
|||
DenoFsNodeResolverEnv::new(self.fs().clone()),
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
node_resolver,
|
||||
self.npm_resolver().await?.clone().into_npm_resolver(),
|
||||
self
|
||||
.npm_resolver()
|
||||
.await?
|
||||
.clone()
|
||||
.into_npm_pkg_folder_resolver(),
|
||||
self.pkg_json_resolver().clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn npm_req_resolver(
|
||||
&self,
|
||||
) -> Result<&Arc<CliNpmReqResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.npm_req_resolver
|
||||
.get_or_try_init_async(async {
|
||||
let npm_resolver = self.npm_resolver().await?;
|
||||
Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
|
||||
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
|
||||
fs: CliDenoResolverFs(self.fs().clone()),
|
||||
in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(),
|
||||
node_resolver: self.node_resolver().await?.clone(),
|
||||
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
|
||||
})))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
|
||||
self.services.pkg_json_resolver.get_or_init(|| {
|
||||
Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new(
|
||||
|
@ -799,23 +853,6 @@ impl CliFactory {
|
|||
})
|
||||
}
|
||||
|
||||
pub async fn cli_node_resolver(
|
||||
&self,
|
||||
) -> Result<&Arc<CliNodeResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.cli_node_resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(CliNodeResolver::new(
|
||||
self.fs().clone(),
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn permission_desc_parser(
|
||||
&self,
|
||||
) -> Result<&Arc<RuntimePermissionDescriptorParser>, AnyError> {
|
||||
|
@ -847,6 +884,7 @@ impl CliFactory {
|
|||
let cli_options = self.cli_options()?;
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.cjs_tracker()?,
|
||||
self.cli_options()?,
|
||||
self.deno_dir()?,
|
||||
self.emitter()?,
|
||||
self.file_fetcher()?,
|
||||
|
@ -880,7 +918,6 @@ impl CliFactory {
|
|||
let fs = self.fs();
|
||||
let node_resolver = self.node_resolver().await?;
|
||||
let npm_resolver = self.npm_resolver().await?;
|
||||
let cli_node_resolver = self.cli_node_resolver().await?;
|
||||
let cli_npm_resolver = self.npm_resolver().await?.clone();
|
||||
let in_npm_pkg_checker = self.in_npm_pkg_checker()?;
|
||||
let maybe_file_watcher_communicator = if cli_options.has_hmr() {
|
||||
|
@ -891,6 +928,7 @@ impl CliFactory {
|
|||
let node_code_translator = self.node_code_translator().await?;
|
||||
let cjs_tracker = self.cjs_tracker()?.clone();
|
||||
let pkg_json_resolver = self.pkg_json_resolver().clone();
|
||||
let npm_req_resolver = self.npm_req_resolver().await?;
|
||||
|
||||
Ok(CliMainWorkerFactory::new(
|
||||
self.blob_store().clone(),
|
||||
|
@ -918,7 +956,8 @@ impl CliFactory {
|
|||
self.main_module_graph_container().await?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
node_code_translator.clone(),
|
||||
cli_node_resolver.clone(),
|
||||
node_resolver.clone(),
|
||||
npm_req_resolver.clone(),
|
||||
cli_npm_resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
self.cjs_tracker()?.clone(),
|
||||
|
|
|
@ -164,8 +164,19 @@ fn get_validated_scheme(
|
|||
) -> Result<String, AnyError> {
|
||||
let scheme = specifier.scheme();
|
||||
if !SUPPORTED_SCHEMES.contains(&scheme) {
|
||||
// NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled
|
||||
// before `file_fetcher.rs` APIs are even hit.
|
||||
let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec();
|
||||
all_supported_schemes.extend_from_slice(&["npm", "jsr"]);
|
||||
all_supported_schemes.sort();
|
||||
let scheme_list = all_supported_schemes
|
||||
.iter()
|
||||
.map(|scheme| format!(" - \"{}\"", scheme))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(generic_error(format!(
|
||||
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}"
|
||||
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}",
|
||||
scheme_list
|
||||
)))
|
||||
} else {
|
||||
Ok(scheme.to_string())
|
||||
|
|
|
@ -188,7 +188,7 @@ pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
|
|||
fn exit_for_integrity_error(err: &ModuleError) {
|
||||
if let Some(err_message) = enhanced_integrity_error_message(err) {
|
||||
log::error!("{} {}", colors::red("error:"), err_message);
|
||||
std::process::exit(10);
|
||||
deno_runtime::exit(10);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -177,6 +177,52 @@ function isCanvasLike(obj) {
|
|||
return obj !== null && typeof obj === "object" && "toDataURL" in obj;
|
||||
}
|
||||
|
||||
function isJpg(obj) {
|
||||
// Check if obj is a Uint8Array
|
||||
if (!(obj instanceof Uint8Array)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// JPG files start with the magic bytes FF D8
|
||||
if (obj.length < 2 || obj[0] !== 0xFF || obj[1] !== 0xD8) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// JPG files end with the magic bytes FF D9
|
||||
if (
|
||||
obj.length < 2 || obj[obj.length - 2] !== 0xFF ||
|
||||
obj[obj.length - 1] !== 0xD9
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function isPng(obj) {
|
||||
// Check if obj is a Uint8Array
|
||||
if (!(obj instanceof Uint8Array)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// PNG files start with a specific 8-byte signature
|
||||
const pngSignature = [137, 80, 78, 71, 13, 10, 26, 10];
|
||||
|
||||
// Check if the array is at least as long as the signature
|
||||
if (obj.length < pngSignature.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check each byte of the signature
|
||||
for (let i = 0; i < pngSignature.length; i++) {
|
||||
if (obj[i] !== pngSignature[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Possible HTML and SVG Elements */
|
||||
function isSVGElementLike(obj) {
|
||||
return obj !== null && typeof obj === "object" && "outerHTML" in obj &&
|
||||
|
@ -233,6 +279,16 @@ async function format(obj) {
|
|||
if (isDataFrameLike(obj)) {
|
||||
return extractDataFrame(obj);
|
||||
}
|
||||
if (isJpg(obj)) {
|
||||
return {
|
||||
"image/jpeg": core.ops.op_base64_encode(obj),
|
||||
};
|
||||
}
|
||||
if (isPng(obj)) {
|
||||
return {
|
||||
"image/png": core.ops.op_base64_encode(obj),
|
||||
};
|
||||
}
|
||||
if (isSVGElementLike(obj)) {
|
||||
return {
|
||||
"image/svg+xml": obj.outerHTML,
|
||||
|
@ -314,6 +370,28 @@ const html = createTaggedTemplateDisplayable("text/html");
|
|||
*/
|
||||
const svg = createTaggedTemplateDisplayable("image/svg+xml");
|
||||
|
||||
function image(obj) {
|
||||
if (typeof obj === "string") {
|
||||
try {
|
||||
obj = Deno.readFileSync(obj);
|
||||
} catch {
|
||||
// pass
|
||||
}
|
||||
}
|
||||
|
||||
if (isJpg(obj)) {
|
||||
return makeDisplayable({ "image/jpeg": core.ops.op_base64_encode(obj) });
|
||||
}
|
||||
|
||||
if (isPng(obj)) {
|
||||
return makeDisplayable({ "image/png": core.ops.op_base64_encode(obj) });
|
||||
}
|
||||
|
||||
throw new TypeError(
|
||||
"Object is not a valid image or a path to an image. `Deno.jupyter.image` supports displaying JPG or PNG images.",
|
||||
);
|
||||
}
|
||||
|
||||
function isMediaBundle(obj) {
|
||||
if (obj == null || typeof obj !== "object" || Array.isArray(obj)) {
|
||||
return false;
|
||||
|
@ -465,6 +543,7 @@ function enableJupyter() {
|
|||
md,
|
||||
html,
|
||||
svg,
|
||||
image,
|
||||
$display,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -344,9 +344,8 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
{
|
||||
let in_npm_pkg = self
|
||||
.resolver
|
||||
.maybe_node_resolver(Some(&self.file_referrer))
|
||||
.map(|n| n.in_npm_package(specifier))
|
||||
.unwrap_or(false);
|
||||
.in_npm_pkg_checker(Some(&self.file_referrer))
|
||||
.in_npm_package(specifier);
|
||||
if in_npm_pkg {
|
||||
if let Ok(Some(pkg_id)) =
|
||||
npm_resolver.resolve_pkg_id_from_specifier(specifier)
|
||||
|
|
|
@ -5,6 +5,7 @@ use super::cache::LspCache;
|
|||
use super::config::Config;
|
||||
use super::resolver::LspIsCjsResolver;
|
||||
use super::resolver::LspResolver;
|
||||
use super::resolver::ScopeDepInfo;
|
||||
use super::resolver::SingleReferrerGraphResolver;
|
||||
use super::testing::TestCollector;
|
||||
use super::testing::TestModule;
|
||||
|
@ -38,7 +39,6 @@ use indexmap::IndexSet;
|
|||
use node_resolver::NodeModuleKind;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
|
@ -883,8 +883,13 @@ impl FileSystemDocuments {
|
|||
let doc = if specifier.scheme() == "file" {
|
||||
let path = url_to_file_path(specifier).ok()?;
|
||||
let bytes = fs::read(path).ok()?;
|
||||
let content =
|
||||
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?;
|
||||
let content = bytes_to_content(
|
||||
specifier,
|
||||
MediaType::from_specifier(specifier),
|
||||
bytes,
|
||||
None,
|
||||
)
|
||||
.ok()?;
|
||||
Document::new(
|
||||
specifier.clone(),
|
||||
content.into(),
|
||||
|
@ -923,19 +928,24 @@ impl FileSystemDocuments {
|
|||
specifier,
|
||||
Some(&cached_file.metadata.headers),
|
||||
);
|
||||
let content = deno_graph::source::decode_owned_source(
|
||||
let media_type = resolve_media_type(
|
||||
specifier,
|
||||
Some(&cached_file.metadata.headers),
|
||||
None,
|
||||
);
|
||||
let content = bytes_to_content(
|
||||
specifier,
|
||||
media_type,
|
||||
cached_file.content,
|
||||
maybe_charset,
|
||||
)
|
||||
.ok()?;
|
||||
let maybe_headers = Some(cached_file.metadata.headers);
|
||||
Document::new(
|
||||
specifier.clone(),
|
||||
content.into(),
|
||||
None,
|
||||
None,
|
||||
maybe_headers,
|
||||
Some(cached_file.metadata.headers),
|
||||
is_cjs_resolver,
|
||||
resolver.clone(),
|
||||
config.clone(),
|
||||
|
@ -989,12 +999,7 @@ pub struct Documents {
|
|||
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
|
||||
/// Documents stored on the file system.
|
||||
file_system_docs: Arc<FileSystemDocuments>,
|
||||
/// The npm package requirements found in npm specifiers.
|
||||
npm_reqs_by_scope:
|
||||
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
|
||||
/// Config scopes that contain a node: specifier such that a @types/node
|
||||
/// package should be injected.
|
||||
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
|
||||
dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>,
|
||||
}
|
||||
|
||||
impl Documents {
|
||||
|
@ -1157,17 +1162,20 @@ impl Documents {
|
|||
false
|
||||
}
|
||||
|
||||
pub fn npm_reqs_by_scope(
|
||||
pub fn dep_info_by_scope(
|
||||
&mut self,
|
||||
) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> {
|
||||
self.calculate_npm_reqs_if_dirty();
|
||||
self.npm_reqs_by_scope.clone()
|
||||
) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> {
|
||||
self.calculate_dep_info_if_dirty();
|
||||
self.dep_info_by_scope.clone()
|
||||
}
|
||||
|
||||
pub fn scopes_with_node_specifier(
|
||||
&self,
|
||||
) -> &Arc<HashSet<Option<ModuleSpecifier>>> {
|
||||
&self.scopes_with_node_specifier
|
||||
pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> {
|
||||
self
|
||||
.dep_info_by_scope
|
||||
.iter()
|
||||
.filter(|(_, i)| i.has_node_specifier)
|
||||
.map(|(s, _)| s.clone())
|
||||
.collect::<HashSet<_>>()
|
||||
}
|
||||
|
||||
/// Return a document for the specifier.
|
||||
|
@ -1410,34 +1418,46 @@ impl Documents {
|
|||
/// Iterate through the documents, building a map where the key is a unique
|
||||
/// document and the value is a set of specifiers that depend on that
|
||||
/// document.
|
||||
fn calculate_npm_reqs_if_dirty(&mut self) {
|
||||
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default();
|
||||
let mut scopes_with_specifier = HashSet::new();
|
||||
fn calculate_dep_info_if_dirty(&mut self) {
|
||||
let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default();
|
||||
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
|
||||
if !is_fs_docs_dirty && !self.dirty {
|
||||
return;
|
||||
}
|
||||
let mut visit_doc = |doc: &Arc<Document>| {
|
||||
let scope = doc.scope();
|
||||
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default();
|
||||
let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default();
|
||||
for dependency in doc.dependencies().values() {
|
||||
if let Some(dep) = dependency.get_code() {
|
||||
let code_specifier = dependency.get_code();
|
||||
let type_specifier = dependency.get_type();
|
||||
if let Some(dep) = code_specifier {
|
||||
if dep.scheme() == "node" {
|
||||
scopes_with_specifier.insert(scope.cloned());
|
||||
dep_info.has_node_specifier = true;
|
||||
}
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
if let Some(dep) = dependency.get_type() {
|
||||
if let Some(dep) = type_specifier {
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
if dependency.maybe_deno_types_specifier.is_some() {
|
||||
if let (Some(code_specifier), Some(type_specifier)) =
|
||||
(code_specifier, type_specifier)
|
||||
{
|
||||
if MediaType::from_specifier(type_specifier).is_declaration() {
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.insert(type_specifier.clone(), code_specifier.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1448,14 +1468,49 @@ impl Documents {
|
|||
visit_doc(doc);
|
||||
}
|
||||
|
||||
// fill the reqs from the lockfile
|
||||
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
|
||||
let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default();
|
||||
(|| {
|
||||
let config_file = config_data.maybe_deno_json()?;
|
||||
let jsx_config =
|
||||
config_file.to_maybe_jsx_import_source_config().ok()??;
|
||||
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
|
||||
let code_specifier = jsx_config.default_specifier.as_ref()?;
|
||||
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
|
||||
let range = deno_graph::Range {
|
||||
specifier: jsx_config.base_url.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
};
|
||||
let type_specifier = cli_resolver
|
||||
.resolve(
|
||||
type_specifier,
|
||||
&range,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
deno_package_json::NodeModuleKind::Esm,
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
.ok()?;
|
||||
let code_specifier = cli_resolver
|
||||
.resolve(
|
||||
code_specifier,
|
||||
&range,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
deno_package_json::NodeModuleKind::Esm,
|
||||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok()?;
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.insert(type_specifier, code_specifier);
|
||||
Some(())
|
||||
})();
|
||||
// fill the reqs from the lockfile
|
||||
if let Some(lockfile) = config_data.lockfile.as_ref() {
|
||||
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
|
||||
let lockfile = lockfile.lock();
|
||||
for dep_req in lockfile.content.packages.specifiers.keys() {
|
||||
if dep_req.kind == deno_semver::package::PackageKind::Npm {
|
||||
reqs.insert(dep_req.req.clone());
|
||||
dep_info.npm_reqs.insert(dep_req.req.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1464,15 +1519,22 @@ impl Documents {
|
|||
// Ensure a @types/node package exists when any module uses a node: specifier.
|
||||
// Unlike on the command line, here we just add @types/node to the npm package
|
||||
// requirements since this won't end up in the lockfile.
|
||||
for scope in &scopes_with_specifier {
|
||||
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default();
|
||||
if !reqs.iter().any(|r| r.name == "@types/node") {
|
||||
reqs.insert(PackageReq::from_str("@types/node").unwrap());
|
||||
for dep_info in dep_info_by_scope.values_mut() {
|
||||
if dep_info.has_node_specifier
|
||||
&& !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node")
|
||||
{
|
||||
dep_info
|
||||
.npm_reqs
|
||||
.insert(PackageReq::from_str("@types/node").unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope);
|
||||
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier);
|
||||
self.dep_info_by_scope = Arc::new(
|
||||
dep_info_by_scope
|
||||
.into_iter()
|
||||
.map(|(s, i)| (s, Arc::new(i)))
|
||||
.collect(),
|
||||
);
|
||||
self.dirty = false;
|
||||
}
|
||||
|
||||
|
@ -1654,6 +1716,24 @@ fn analyze_module(
|
|||
}
|
||||
}
|
||||
|
||||
fn bytes_to_content(
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
bytes: Vec<u8>,
|
||||
maybe_charset: Option<&str>,
|
||||
) -> Result<String, AnyError> {
|
||||
if media_type == MediaType::Wasm {
|
||||
// we use the dts representation for Wasm modules
|
||||
Ok(deno_graph::source::wasm::wasm_module_to_dts(&bytes)?)
|
||||
} else {
|
||||
Ok(deno_graph::source::decode_owned_source(
|
||||
specifier,
|
||||
bytes,
|
||||
maybe_charset,
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -1036,7 +1036,7 @@ impl Inner {
|
|||
|
||||
// refresh the npm specifiers because it might have discovered
|
||||
// a @types/node package and now's a good time to do that anyway
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
|
||||
self.project_changed([], true);
|
||||
}
|
||||
|
@ -1082,7 +1082,7 @@ impl Inner {
|
|||
);
|
||||
if document.is_diagnosable() {
|
||||
self.project_changed([(document.specifier(), ChangeKind::Opened)], false);
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate(&[specifier]);
|
||||
self.send_diagnostics_update();
|
||||
self.send_testing_update();
|
||||
|
@ -1103,8 +1103,8 @@ impl Inner {
|
|||
Ok(document) => {
|
||||
if document.is_diagnosable() {
|
||||
let old_scopes_with_node_specifier =
|
||||
self.documents.scopes_with_node_specifier().clone();
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.documents.scopes_with_node_specifier();
|
||||
self.refresh_dep_info().await;
|
||||
let mut config_changed = false;
|
||||
if !self
|
||||
.documents
|
||||
|
@ -1155,11 +1155,13 @@ impl Inner {
|
|||
}));
|
||||
}
|
||||
|
||||
async fn refresh_npm_specifiers(&mut self) {
|
||||
let package_reqs = self.documents.npm_reqs_by_scope();
|
||||
async fn refresh_dep_info(&mut self) {
|
||||
let dep_info_by_scope = self.documents.dep_info_by_scope();
|
||||
let resolver = self.resolver.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
spawn(async move { resolver.set_npm_reqs(&package_reqs).await })
|
||||
spawn(
|
||||
async move { resolver.set_dep_info_by_scope(&dep_info_by_scope).await },
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
|
@ -1180,7 +1182,7 @@ impl Inner {
|
|||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
self.diagnostics_state.clear(&specifier);
|
||||
if self.is_diagnosable(&specifier) {
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate(&[specifier.clone()]);
|
||||
self.send_diagnostics_update();
|
||||
self.send_testing_update();
|
||||
|
@ -1394,13 +1396,18 @@ impl Inner {
|
|||
.fmt_config_for_specifier(&specifier)
|
||||
.options
|
||||
.clone();
|
||||
let config_data = self.config.tree.data_for_specifier(&specifier);
|
||||
if !config_data.is_some_and(|d| d.maybe_deno_json().is_some()) {
|
||||
fmt_options.use_tabs = Some(!params.options.insert_spaces);
|
||||
fmt_options.indent_width = Some(params.options.tab_size as u8);
|
||||
let config_data = self.config.tree.data_for_specifier(&specifier);
|
||||
}
|
||||
let unstable_options = UnstableFmtOptions {
|
||||
component: config_data
|
||||
.map(|d| d.unstable.contains("fmt-component"))
|
||||
.unwrap_or(false),
|
||||
sql: config_data
|
||||
.map(|d| d.unstable.contains("fmt-sql"))
|
||||
.unwrap_or(false),
|
||||
};
|
||||
let document = document.clone();
|
||||
move || {
|
||||
|
@ -3600,15 +3607,16 @@ impl Inner {
|
|||
|
||||
if byonm {
|
||||
roots.retain(|s| s.scheme() != "npm");
|
||||
} else if let Some(npm_reqs) = self
|
||||
} else if let Some(dep_info) = self
|
||||
.documents
|
||||
.npm_reqs_by_scope()
|
||||
.dep_info_by_scope()
|
||||
.get(&config_data.map(|d| d.scope.as_ref().clone()))
|
||||
{
|
||||
// always include the npm packages since resolution of one npm package
|
||||
// might affect the resolution of other npm packages
|
||||
roots.extend(
|
||||
npm_reqs
|
||||
dep_info
|
||||
.npm_reqs
|
||||
.iter()
|
||||
.map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()),
|
||||
);
|
||||
|
@ -3629,9 +3637,8 @@ impl Inner {
|
|||
deno_json_cache: None,
|
||||
pkg_json_cache: None,
|
||||
workspace_cache: None,
|
||||
config_parse_options: deno_config::deno_json::ConfigParseOptions {
|
||||
include_task_comments: false,
|
||||
},
|
||||
config_parse_options:
|
||||
deno_config::deno_json::ConfigParseOptions::default(),
|
||||
additional_config_file_names: &[],
|
||||
discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"),
|
||||
maybe_vendor_override: if force_global_cache {
|
||||
|
@ -3686,7 +3693,7 @@ impl Inner {
|
|||
|
||||
async fn post_cache(&mut self) {
|
||||
self.resolver.did_cache();
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate_all();
|
||||
self.project_changed([], true);
|
||||
self.ts_server.cleanup_semantic_cache(self.snapshot()).await;
|
||||
|
|
|
@ -11,7 +11,7 @@ pub fn start(parent_process_id: u32) {
|
|||
std::thread::sleep(Duration::from_secs(10));
|
||||
|
||||
if !is_process_active(parent_process_id) {
|
||||
std::process::exit(1);
|
||||
deno_runtime::exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ use deno_cache_dir::HttpCache;
|
|||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::GraphImport;
|
||||
|
@ -15,6 +16,9 @@ use deno_graph::Range;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::url_from_directory_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_resolver::npm::NpmReqResolverOptions;
|
||||
use deno_resolver::DenoResolverOptions;
|
||||
use deno_resolver::NodeAndNpmReqResolver;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
|
@ -43,6 +47,7 @@ use crate::args::CacheSetting;
|
|||
use crate::args::CliLockfile;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::cache::DenoCacheEnvFsAdapter;
|
||||
use crate::factory::Deferred;
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::lsp::config::Config;
|
||||
|
@ -57,8 +62,9 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::resolver::CliDenoResolver;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliNpmReqResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliResolverOptions;
|
||||
use crate::resolver::IsCjsResolver;
|
||||
|
@ -71,26 +77,33 @@ use crate::util::progress_bar::ProgressBarStyle;
|
|||
#[derive(Debug, Clone)]
|
||||
struct LspScopeResolver {
|
||||
resolver: Arc<CliResolver>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
jsr_resolver: Option<Arc<JsrCacheResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
pkg_json_resolver: Option<Arc<PackageJsonResolver>>,
|
||||
node_resolver: Option<Arc<NodeResolver>>,
|
||||
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
redirect_resolver: Option<Arc<RedirectResolver>>,
|
||||
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
||||
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
|
||||
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
|
||||
config_data: Option<Arc<ConfigData>>,
|
||||
}
|
||||
|
||||
impl Default for LspScopeResolver {
|
||||
fn default() -> Self {
|
||||
let factory = ResolverFactory::new(None);
|
||||
Self {
|
||||
resolver: create_cli_resolver(None, None, None),
|
||||
resolver: factory.cli_resolver().clone(),
|
||||
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
||||
jsr_resolver: None,
|
||||
npm_resolver: None,
|
||||
node_resolver: None,
|
||||
pkg_json_resolver: None,
|
||||
npm_pkg_req_resolver: None,
|
||||
pkg_json_resolver: factory.pkg_json_resolver().clone(),
|
||||
redirect_resolver: None,
|
||||
graph_imports: Default::default(),
|
||||
dep_info: Default::default(),
|
||||
package_json_deps_by_resolution: Default::default(),
|
||||
config_data: None,
|
||||
}
|
||||
|
@ -103,35 +116,16 @@ impl LspScopeResolver {
|
|||
cache: &LspCache,
|
||||
http_client_provider: Option<&Arc<HttpClientProvider>>,
|
||||
) -> Self {
|
||||
let mut npm_resolver = None;
|
||||
let mut node_resolver = None;
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
));
|
||||
if let Some(http_client) = http_client_provider {
|
||||
npm_resolver = create_npm_resolver(
|
||||
config_data.map(|d| d.as_ref()),
|
||||
cache,
|
||||
http_client,
|
||||
&pkg_json_resolver,
|
||||
)
|
||||
.await;
|
||||
if let Some(npm_resolver) = &npm_resolver {
|
||||
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
|
||||
node_resolver = Some(create_node_resolver(
|
||||
fs.clone(),
|
||||
in_npm_pkg_checker,
|
||||
npm_resolver,
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
let mut factory = ResolverFactory::new(config_data);
|
||||
if let Some(http_client_provider) = http_client_provider {
|
||||
factory.init_npm_resolver(http_client_provider, cache).await;
|
||||
}
|
||||
}
|
||||
let cli_resolver = create_cli_resolver(
|
||||
config_data.map(|d| d.as_ref()),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
);
|
||||
let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone();
|
||||
let npm_resolver = factory.npm_resolver().cloned();
|
||||
let node_resolver = factory.node_resolver().cloned();
|
||||
let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned();
|
||||
let cli_resolver = factory.cli_resolver().clone();
|
||||
let pkg_json_resolver = factory.pkg_json_resolver().clone();
|
||||
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
|
||||
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
||||
config_data.map(|d| d.as_ref()),
|
||||
|
@ -171,7 +165,7 @@ impl LspScopeResolver {
|
|||
})
|
||||
.unwrap_or_default();
|
||||
let package_json_deps_by_resolution = (|| {
|
||||
let node_resolver = node_resolver.as_ref()?;
|
||||
let npm_pkg_req_resolver = npm_pkg_req_resolver.as_ref()?;
|
||||
let package_json = config_data?.maybe_pkg_json()?;
|
||||
let referrer = package_json.specifier();
|
||||
let dependencies = package_json.dependencies.as_ref()?;
|
||||
|
@ -181,7 +175,7 @@ impl LspScopeResolver {
|
|||
let req_ref =
|
||||
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
|
||||
let specifier = into_specifier_and_media_type(Some(
|
||||
node_resolver
|
||||
npm_pkg_req_resolver
|
||||
.resolve_req_reference(
|
||||
&req_ref,
|
||||
&referrer,
|
||||
|
@ -189,6 +183,15 @@ impl LspScopeResolver {
|
|||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.or_else(|_| {
|
||||
npm_pkg_req_resolver.resolve_req_reference(
|
||||
&req_ref,
|
||||
&referrer,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
))
|
||||
.0;
|
||||
|
@ -201,47 +204,38 @@ impl LspScopeResolver {
|
|||
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
|
||||
Self {
|
||||
resolver: cli_resolver,
|
||||
in_npm_pkg_checker,
|
||||
jsr_resolver,
|
||||
npm_pkg_req_resolver,
|
||||
npm_resolver,
|
||||
node_resolver,
|
||||
pkg_json_resolver: Some(pkg_json_resolver),
|
||||
pkg_json_resolver,
|
||||
redirect_resolver,
|
||||
graph_imports,
|
||||
dep_info: Default::default(),
|
||||
package_json_deps_by_resolution,
|
||||
config_data: config_data.cloned(),
|
||||
}
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Arc<Self> {
|
||||
let mut factory = ResolverFactory::new(self.config_data.as_ref());
|
||||
let npm_resolver =
|
||||
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
));
|
||||
let mut node_resolver = None;
|
||||
if let Some(npm_resolver) = &npm_resolver {
|
||||
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
|
||||
node_resolver = Some(create_node_resolver(
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
npm_resolver,
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
factory.set_npm_resolver(npm_resolver.clone());
|
||||
}
|
||||
let graph_resolver = create_cli_resolver(
|
||||
self.config_data.as_deref(),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
);
|
||||
Arc::new(Self {
|
||||
resolver: graph_resolver,
|
||||
resolver: factory.cli_resolver().clone(),
|
||||
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
|
||||
jsr_resolver: self.jsr_resolver.clone(),
|
||||
npm_resolver,
|
||||
node_resolver,
|
||||
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
|
||||
npm_resolver: factory.npm_resolver().cloned(),
|
||||
node_resolver: factory.node_resolver().cloned(),
|
||||
redirect_resolver: self.redirect_resolver.clone(),
|
||||
pkg_json_resolver: Some(pkg_json_resolver),
|
||||
pkg_json_resolver: factory.pkg_json_resolver().clone(),
|
||||
graph_imports: self.graph_imports.clone(),
|
||||
dep_info: self.dep_info.clone(),
|
||||
package_json_deps_by_resolution: self
|
||||
.package_json_deps_by_resolution
|
||||
.clone(),
|
||||
|
@ -308,19 +302,24 @@ impl LspResolver {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn set_npm_reqs(
|
||||
pub async fn set_dep_info_by_scope(
|
||||
&self,
|
||||
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>,
|
||||
dep_info_by_scope: &Arc<
|
||||
BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>,
|
||||
>,
|
||||
) {
|
||||
for (scope, resolver) in [(None, &self.unscoped)]
|
||||
.into_iter()
|
||||
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
|
||||
{
|
||||
let dep_info = dep_info_by_scope.get(&scope.cloned());
|
||||
if let Some(dep_info) = dep_info {
|
||||
*resolver.dep_info.lock() = dep_info.clone();
|
||||
}
|
||||
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
let reqs = reqs
|
||||
.get(&scope.cloned())
|
||||
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
|
||||
let reqs = dep_info
|
||||
.map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
|
||||
.unwrap_or_default();
|
||||
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
|
||||
lsp_warn!("Could not set npm package requirements: {:#}", err);
|
||||
|
@ -354,12 +353,12 @@ impl LspResolver {
|
|||
resolver.config_data.as_ref()
|
||||
}
|
||||
|
||||
pub fn maybe_node_resolver(
|
||||
pub fn in_npm_pkg_checker(
|
||||
&self,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<&Arc<CliNodeResolver>> {
|
||||
) -> &Arc<dyn InNpmPackageChecker> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.node_resolver.as_ref()
|
||||
&resolver.in_npm_pkg_checker
|
||||
}
|
||||
|
||||
pub fn maybe_managed_npm_resolver(
|
||||
|
@ -429,9 +428,9 @@ impl LspResolver {
|
|||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let node_resolver = resolver.node_resolver.as_ref()?;
|
||||
let npm_pkg_req_resolver = resolver.npm_pkg_req_resolver.as_ref()?;
|
||||
Some(into_specifier_and_media_type(Some(
|
||||
node_resolver
|
||||
npm_pkg_req_resolver
|
||||
.resolve_req_reference(
|
||||
req_ref,
|
||||
referrer,
|
||||
|
@ -454,6 +453,19 @@ impl LspResolver {
|
|||
.cloned()
|
||||
}
|
||||
|
||||
pub fn deno_types_to_code_resolution(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let dep_info = resolver.dep_info.lock().clone();
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.get(specifier)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
|
||||
// consider any /node_modules/ directory as being in the node_modules
|
||||
|
@ -483,10 +495,11 @@ impl LspResolver {
|
|||
referrer_kind: NodeModuleKind,
|
||||
) -> bool {
|
||||
let resolver = self.get_scope_resolver(Some(referrer));
|
||||
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
|
||||
let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
node_resolver
|
||||
npm_pkg_req_resolver
|
||||
.resolve_if_for_npm_pkg(
|
||||
specifier_text,
|
||||
referrer,
|
||||
|
@ -503,10 +516,9 @@ impl LspResolver {
|
|||
referrer: &ModuleSpecifier,
|
||||
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
|
||||
let resolver = self.get_scope_resolver(Some(referrer));
|
||||
let Some(pkg_json_resolver) = resolver.pkg_json_resolver.as_ref() else {
|
||||
return Ok(None);
|
||||
};
|
||||
pkg_json_resolver.get_closest_package_json(referrer)
|
||||
resolver
|
||||
.pkg_json_resolver
|
||||
.get_closest_package_json(referrer)
|
||||
}
|
||||
|
||||
pub fn resolve_redirects(
|
||||
|
@ -558,18 +570,54 @@ impl LspResolver {
|
|||
}
|
||||
}
|
||||
|
||||
async fn create_npm_resolver(
|
||||
config_data: Option<&ConfigData>,
|
||||
cache: &LspCache,
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct ScopeDepInfo {
|
||||
pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
|
||||
pub npm_reqs: BTreeSet<PackageReq>,
|
||||
pub has_node_specifier: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ResolverFactoryServices {
|
||||
cli_resolver: Deferred<Arc<CliResolver>>,
|
||||
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
|
||||
node_resolver: Deferred<Option<Arc<NodeResolver>>>,
|
||||
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
}
|
||||
|
||||
struct ResolverFactory<'a> {
|
||||
config_data: Option<&'a Arc<ConfigData>>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
services: ResolverFactoryServices,
|
||||
}
|
||||
|
||||
impl<'a> ResolverFactory<'a> {
|
||||
pub fn new(config_data: Option<&'a Arc<ConfigData>>) -> Self {
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
));
|
||||
Self {
|
||||
config_data,
|
||||
fs,
|
||||
pkg_json_resolver,
|
||||
services: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn init_npm_resolver(
|
||||
&mut self,
|
||||
http_client_provider: &Arc<HttpClientProvider>,
|
||||
pkg_json_resolver: &Arc<PackageJsonResolver>,
|
||||
) -> Option<Arc<dyn CliNpmResolver>> {
|
||||
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
|
||||
cache: &LspCache,
|
||||
) {
|
||||
let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false);
|
||||
let options = if enable_byonm {
|
||||
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
|
||||
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
|
||||
pkg_json_resolver: pkg_json_resolver.clone(),
|
||||
root_node_modules_dir: config_data.and_then(|config_data| {
|
||||
pkg_json_resolver: self.pkg_json_resolver.clone(),
|
||||
root_node_modules_dir: self.config_data.and_then(|config_data| {
|
||||
config_data.node_modules_dir.clone().or_else(|| {
|
||||
url_to_file_path(&config_data.scope)
|
||||
.ok()
|
||||
|
@ -578,17 +626,18 @@ async fn create_npm_resolver(
|
|||
}),
|
||||
})
|
||||
} else {
|
||||
let npmrc = config_data
|
||||
let npmrc = self
|
||||
.config_data
|
||||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc);
|
||||
let npm_cache_dir = Arc::new(NpmCacheDir::new(
|
||||
&DenoCacheEnvFsAdapter(&deno_fs::RealFs),
|
||||
&DenoCacheEnvFsAdapter(self.fs.as_ref()),
|
||||
cache.deno_dir().npm_folder_path(),
|
||||
npmrc.get_all_known_registries_urls(),
|
||||
));
|
||||
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) {
|
||||
snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) {
|
||||
Some(lockfile) => {
|
||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
|
||||
lockfile.clone(),
|
||||
|
@ -607,7 +656,8 @@ async fn create_npm_resolver(
|
|||
// the user is typing.
|
||||
cache_setting: CacheSetting::Only,
|
||||
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
|
||||
maybe_node_modules_path: config_data
|
||||
maybe_node_modules_path: self
|
||||
.config_data
|
||||
.and_then(|d| d.node_modules_dir.clone()),
|
||||
// only used for top level install, so we can ignore this
|
||||
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
|
||||
|
@ -616,57 +666,38 @@ async fn create_npm_resolver(
|
|||
lifecycle_scripts: Default::default(),
|
||||
})
|
||||
};
|
||||
Some(create_cli_npm_resolver_for_lsp(options).await)
|
||||
}
|
||||
self.set_npm_resolver(create_cli_npm_resolver_for_lsp(options).await);
|
||||
}
|
||||
|
||||
fn create_in_npm_pkg_checker(
|
||||
npm_resolver: &Arc<dyn CliNpmResolver>,
|
||||
) -> Arc<dyn InNpmPackageChecker> {
|
||||
crate::npm::create_in_npm_pkg_checker(match npm_resolver.as_inner() {
|
||||
crate::npm::InnerCliNpmResolverRef::Byonm(_) => {
|
||||
CreateInNpmPkgCheckerOptions::Byonm
|
||||
pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) {
|
||||
self.services.npm_resolver = Some(npm_resolver);
|
||||
}
|
||||
crate::npm::InnerCliNpmResolverRef::Managed(m) => {
|
||||
CreateInNpmPkgCheckerOptions::Managed(
|
||||
CliManagedInNpmPkgCheckerCreateOptions {
|
||||
root_cache_dir_url: m.global_cache_root_url(),
|
||||
maybe_node_modules_path: m.maybe_node_modules_path(),
|
||||
},
|
||||
)
|
||||
|
||||
pub fn npm_resolver(&self) -> Option<&Arc<dyn CliNpmResolver>> {
|
||||
self.services.npm_resolver.as_ref()
|
||||
}
|
||||
|
||||
pub fn cli_resolver(&self) -> &Arc<CliResolver> {
|
||||
self.services.cli_resolver.get_or_init(|| {
|
||||
let npm_req_resolver = self.npm_pkg_req_resolver().cloned();
|
||||
let deno_resolver = Arc::new(CliDenoResolver::new(DenoResolverOptions {
|
||||
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
|
||||
node_and_req_resolver: match (self.node_resolver(), npm_req_resolver) {
|
||||
(Some(node_resolver), Some(npm_req_resolver)) => {
|
||||
Some(NodeAndNpmReqResolver {
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_req_resolver,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_node_resolver(
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
npm_resolver: &Arc<dyn CliNpmResolver>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
) -> Arc<CliNodeResolver> {
|
||||
let node_resolver_inner = Arc::new(NodeResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
in_npm_pkg_checker.clone(),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
Arc::new(CliNodeResolver::new(
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
node_resolver_inner,
|
||||
npm_resolver.clone(),
|
||||
))
|
||||
}
|
||||
|
||||
fn create_cli_resolver(
|
||||
config_data: Option<&ConfigData>,
|
||||
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
) -> Arc<CliResolver> {
|
||||
Arc::new(CliResolver::new(CliResolverOptions {
|
||||
node_resolver: node_resolver.cloned(),
|
||||
npm_resolver: npm_resolver.cloned(),
|
||||
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else(
|
||||
|| {
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
sloppy_imports_resolver: self
|
||||
.config_data
|
||||
.and_then(|d| d.sloppy_imports_resolver.clone()),
|
||||
workspace_resolver: self
|
||||
.config_data
|
||||
.map(|d| d.resolver.clone())
|
||||
.unwrap_or_else(|| {
|
||||
Arc::new(WorkspaceResolver::new_raw(
|
||||
// this is fine because this is only used before initialization
|
||||
Arc::new(ModuleSpecifier::parse("file:///").unwrap()),
|
||||
|
@ -675,14 +706,77 @@ fn create_cli_resolver(
|
|||
Vec::new(),
|
||||
PackageJsonDepResolution::Disabled,
|
||||
))
|
||||
},
|
||||
),
|
||||
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
bare_node_builtins_enabled: config_data
|
||||
}),
|
||||
is_byonm: self.config_data.map(|d| d.byonm).unwrap_or(false),
|
||||
maybe_vendor_dir: self.config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
}));
|
||||
Arc::new(CliResolver::new(CliResolverOptions {
|
||||
deno_resolver,
|
||||
npm_resolver: self.npm_resolver().cloned(),
|
||||
bare_node_builtins_enabled: self
|
||||
.config_data
|
||||
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
|
||||
sloppy_imports_resolver: config_data
|
||||
.and_then(|d| d.sloppy_imports_resolver.clone()),
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
|
||||
&self.pkg_json_resolver
|
||||
}
|
||||
|
||||
pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> {
|
||||
self.services.in_npm_pkg_checker.get_or_init(|| {
|
||||
crate::npm::create_in_npm_pkg_checker(
|
||||
match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) {
|
||||
Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => {
|
||||
CreateInNpmPkgCheckerOptions::Byonm
|
||||
}
|
||||
Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => {
|
||||
CreateInNpmPkgCheckerOptions::Managed(
|
||||
CliManagedInNpmPkgCheckerCreateOptions {
|
||||
root_cache_dir_url: m.global_cache_root_url(),
|
||||
maybe_node_modules_path: m.maybe_node_modules_path(),
|
||||
},
|
||||
)
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
|
||||
self
|
||||
.services
|
||||
.node_resolver
|
||||
.get_or_init(|| {
|
||||
let npm_resolver = self.services.npm_resolver.as_ref()?;
|
||||
Some(Arc::new(NodeResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()),
|
||||
self.in_npm_pkg_checker().clone(),
|
||||
npm_resolver.clone().into_npm_pkg_folder_resolver(),
|
||||
self.pkg_json_resolver.clone(),
|
||||
)))
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
pub fn npm_pkg_req_resolver(&self) -> Option<&Arc<CliNpmReqResolver>> {
|
||||
self
|
||||
.services
|
||||
.npm_pkg_req_resolver
|
||||
.get_or_init(|| {
|
||||
let node_resolver = self.node_resolver()?;
|
||||
let npm_resolver = self.npm_resolver()?;
|
||||
Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
|
||||
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
|
||||
fs: CliDenoResolverFs(self.fs.clone()),
|
||||
in_npm_pkg_checker: self.in_npm_pkg_checker().clone(),
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
|
||||
})))
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
|
|
|
@ -34,6 +34,7 @@ use crate::util::path::relative_specifier;
|
|||
use crate::util::path::to_percent_decoded_str;
|
||||
use crate::util::result::InfallibleResultExt;
|
||||
use crate::util::v8::convert;
|
||||
use crate::worker::create_isolate_create_params;
|
||||
use deno_core::convert::Smi;
|
||||
use deno_core::convert::ToV8;
|
||||
use deno_core::error::StdAnyError;
|
||||
|
@ -3416,9 +3417,18 @@ fn parse_code_actions(
|
|||
additional_text_edits.extend(change.text_changes.iter().map(|tc| {
|
||||
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
|
||||
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
||||
text_edit.new_text = text_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
text_edit.new_text = text_edit.new_text.replace(
|
||||
&specifier_rewrite.old_specifier,
|
||||
&specifier_rewrite.new_specifier,
|
||||
);
|
||||
if let Some(deno_types_specifier) =
|
||||
&specifier_rewrite.new_deno_types_specifier
|
||||
{
|
||||
text_edit.new_text = format!(
|
||||
"// @deno-types=\"{}\"\n{}",
|
||||
deno_types_specifier, &text_edit.new_text
|
||||
);
|
||||
}
|
||||
}
|
||||
text_edit
|
||||
}));
|
||||
|
@ -3577,17 +3587,23 @@ impl CompletionEntryDetails {
|
|||
let mut text_edit = original_item.text_edit.clone();
|
||||
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
||||
if let Some(text_edit) = &mut text_edit {
|
||||
match text_edit {
|
||||
lsp::CompletionTextEdit::Edit(text_edit) => {
|
||||
text_edit.new_text = text_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
}
|
||||
let new_text = match text_edit {
|
||||
lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text,
|
||||
lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => {
|
||||
insert_replace_edit.new_text = insert_replace_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
&mut insert_replace_edit.new_text
|
||||
}
|
||||
};
|
||||
*new_text = new_text.replace(
|
||||
&specifier_rewrite.old_specifier,
|
||||
&specifier_rewrite.new_specifier,
|
||||
);
|
||||
if let Some(deno_types_specifier) =
|
||||
&specifier_rewrite.new_deno_types_specifier
|
||||
{
|
||||
*new_text = format!(
|
||||
"// @deno-types=\"{}\"\n{}",
|
||||
deno_types_specifier, new_text
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3692,6 +3708,13 @@ impl CompletionInfo {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct CompletionSpecifierRewrite {
|
||||
old_specifier: String,
|
||||
new_specifier: String,
|
||||
new_deno_types_specifier: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionItemData {
|
||||
|
@ -3704,7 +3727,7 @@ pub struct CompletionItemData {
|
|||
/// be rewritten by replacing the first string with the second. Intended for
|
||||
/// auto-import specifiers to be reverse-import-mapped.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub specifier_rewrite: Option<(String, String)>,
|
||||
pub specifier_rewrite: Option<CompletionSpecifierRewrite>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub data: Option<Value>,
|
||||
pub use_code_snippet: bool,
|
||||
|
@ -3926,20 +3949,40 @@ impl CompletionEntry {
|
|||
if let Some(source) = &self.source {
|
||||
let mut display_source = source.clone();
|
||||
if let Some(import_data) = &self.auto_import_data {
|
||||
if let Some(new_module_specifier) = language_server
|
||||
.get_ts_response_import_mapper(specifier)
|
||||
let import_mapper =
|
||||
language_server.get_ts_response_import_mapper(specifier);
|
||||
if let Some(mut new_specifier) = import_mapper
|
||||
.check_specifier(&import_data.normalized, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &import_data.normalized))
|
||||
{
|
||||
if new_module_specifier.contains("/node_modules/") {
|
||||
if new_specifier.contains("/node_modules/") {
|
||||
return None;
|
||||
}
|
||||
display_source.clone_from(&new_module_specifier);
|
||||
if new_module_specifier != import_data.raw.module_specifier {
|
||||
specifier_rewrite = Some((
|
||||
import_data.raw.module_specifier.clone(),
|
||||
new_module_specifier,
|
||||
));
|
||||
let mut new_deno_types_specifier = None;
|
||||
if let Some(code_specifier) = language_server
|
||||
.resolver
|
||||
.deno_types_to_code_resolution(
|
||||
&import_data.normalized,
|
||||
Some(specifier),
|
||||
)
|
||||
.and_then(|s| {
|
||||
import_mapper
|
||||
.check_specifier(&s, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &s))
|
||||
})
|
||||
{
|
||||
new_deno_types_specifier =
|
||||
Some(std::mem::replace(&mut new_specifier, code_specifier));
|
||||
}
|
||||
display_source.clone_from(&new_specifier);
|
||||
if new_specifier != import_data.raw.module_specifier
|
||||
|| new_deno_types_specifier.is_some()
|
||||
{
|
||||
specifier_rewrite = Some(CompletionSpecifierRewrite {
|
||||
old_specifier: import_data.raw.module_specifier.clone(),
|
||||
new_specifier,
|
||||
new_deno_types_specifier,
|
||||
});
|
||||
}
|
||||
} else if source.starts_with(jsr_url().as_str()) {
|
||||
return None;
|
||||
|
@ -4245,9 +4288,7 @@ impl TscSpecifierMap {
|
|||
return specifier.to_string();
|
||||
}
|
||||
let mut specifier = original.to_string();
|
||||
if specifier.contains("/node_modules/.deno/")
|
||||
&& !specifier.contains("/node_modules/@types/node/")
|
||||
{
|
||||
if !specifier.contains("/node_modules/@types/node/") {
|
||||
// The ts server doesn't give completions from files in
|
||||
// `node_modules/.deno/`. We work around it like this.
|
||||
specifier = specifier.replace("/node_modules/", "/$node_modules/");
|
||||
|
@ -4413,9 +4454,7 @@ fn op_load<'s>(
|
|||
== NodeModuleKind::Cjs,
|
||||
})
|
||||
};
|
||||
|
||||
let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
|
||||
|
||||
state.performance.measure(mark);
|
||||
Ok(serialized)
|
||||
}
|
||||
|
@ -4760,6 +4799,7 @@ fn run_tsc_thread(
|
|||
specifier_map,
|
||||
request_rx,
|
||||
)],
|
||||
create_params: create_isolate_create_params(),
|
||||
startup_snapshot: Some(tsc::compiler_snapshot()),
|
||||
inspector: has_inspector_server,
|
||||
..Default::default()
|
||||
|
@ -5565,7 +5605,7 @@ mod tests {
|
|||
let (_tx, rx) = mpsc::unbounded_channel();
|
||||
let state =
|
||||
State::new(state_snapshot, Default::default(), Default::default(), rx);
|
||||
let mut op_state = OpState::new(None);
|
||||
let mut op_state = OpState::new(None, None);
|
||||
op_state.put(state);
|
||||
op_state
|
||||
}
|
||||
|
|
42
cli/main.rs
42
cli/main.rs
|
@ -37,6 +37,7 @@ use crate::util::v8::init_v8_flags;
|
|||
|
||||
use args::TaskFlags;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
|
||||
|
||||
|
@ -50,7 +51,6 @@ use deno_runtime::fmt_errors::format_js_error;
|
|||
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
|
||||
use deno_terminal::colors;
|
||||
use factory::CliFactory;
|
||||
use npm::ResolvePkgFolderFromDenoReqError;
|
||||
use standalone::MODULE_NOT_FOUND;
|
||||
use standalone::UNSUPPORTED_SCHEME;
|
||||
use std::env;
|
||||
|
@ -144,9 +144,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
}
|
||||
DenoSubcommand::Init(init_flags) => {
|
||||
spawn_subcommand(async {
|
||||
// make compiler happy since init_project is sync
|
||||
tokio::task::yield_now().await;
|
||||
tools::init::init_project(init_flags)
|
||||
tools::init::init_project(init_flags).await
|
||||
})
|
||||
}
|
||||
DenoSubcommand::Info(info_flags) => {
|
||||
|
@ -188,6 +186,11 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
tools::lint::lint(flags, lint_flags).await
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Outdated(update_flags) => {
|
||||
spawn_subcommand(async move {
|
||||
tools::registry::outdated(flags, update_flags).await
|
||||
})
|
||||
}
|
||||
DenoSubcommand::Repl(repl_flags) => {
|
||||
spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await })
|
||||
}
|
||||
|
@ -238,6 +241,9 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
cwd: None,
|
||||
task: Some(run_flags.script.clone()),
|
||||
is_run: true,
|
||||
recursive: false,
|
||||
filter: None,
|
||||
eval: false,
|
||||
};
|
||||
new_flags.subcommand = DenoSubcommand::Task(task_flags.clone());
|
||||
let result = tools::task::execute_script(Arc::new(new_flags), task_flags.clone()).await;
|
||||
|
@ -350,18 +356,17 @@ fn setup_panic_hook() {
|
|||
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
|
||||
eprintln!();
|
||||
orig_hook(panic_info);
|
||||
std::process::exit(1);
|
||||
deno_runtime::exit(1);
|
||||
}));
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn exit_with_message(message: &str, code: i32) -> ! {
|
||||
eprintln!(
|
||||
log::error!(
|
||||
"{}: {}",
|
||||
colors::red_bold("error"),
|
||||
message.trim_start_matches("error: ")
|
||||
);
|
||||
std::process::exit(code);
|
||||
deno_runtime::exit(code);
|
||||
}
|
||||
|
||||
fn exit_for_error(error: AnyError) -> ! {
|
||||
|
@ -380,13 +385,12 @@ fn exit_for_error(error: AnyError) -> ! {
|
|||
exit_with_message(&error_string, error_code);
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
|
||||
eprintln!(
|
||||
log::error!(
|
||||
"Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.",
|
||||
feature
|
||||
);
|
||||
std::process::exit(70);
|
||||
deno_runtime::exit(70);
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -419,7 +423,7 @@ pub fn main() {
|
|||
drop(profiler);
|
||||
|
||||
match result {
|
||||
Ok(exit_code) => std::process::exit(exit_code),
|
||||
Ok(exit_code) => deno_runtime::exit(exit_code),
|
||||
Err(err) => exit_for_error(err),
|
||||
}
|
||||
}
|
||||
|
@ -433,12 +437,21 @@ fn resolve_flags_and_init(
|
|||
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
|
||||
{
|
||||
// Ignore results to avoid BrokenPipe errors.
|
||||
util::logger::init(None);
|
||||
let _ = err.print();
|
||||
std::process::exit(0);
|
||||
deno_runtime::exit(0);
|
||||
}
|
||||
Err(err) => {
|
||||
util::logger::init(None);
|
||||
exit_for_error(AnyError::from(err))
|
||||
}
|
||||
Err(err) => exit_for_error(AnyError::from(err)),
|
||||
};
|
||||
|
||||
if let Some(otel_config) = flags.otel_config() {
|
||||
deno_runtime::ops::otel::init(otel_config)?;
|
||||
}
|
||||
util::logger::init(flags.log_level);
|
||||
|
||||
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
|
||||
if flags.unstable_config.legacy_flag_enabled {
|
||||
log::warn!(
|
||||
|
@ -467,7 +480,6 @@ fn resolve_flags_and_init(
|
|||
deno_core::JsRuntime::init_platform(
|
||||
None, /* import assertions enabled */ false,
|
||||
);
|
||||
util::logger::init(flags.log_level);
|
||||
|
||||
Ok(flags)
|
||||
}
|
||||
|
|
|
@ -40,23 +40,21 @@ use std::env::current_exe;
|
|||
|
||||
use crate::args::Flags;
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
|
||||
eprintln!(
|
||||
log::error!(
|
||||
"Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.",
|
||||
feature
|
||||
);
|
||||
std::process::exit(70);
|
||||
deno_runtime::exit(70);
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn exit_with_message(message: &str, code: i32) -> ! {
|
||||
eprintln!(
|
||||
log::error!(
|
||||
"{}: {}",
|
||||
colors::red_bold("error"),
|
||||
message.trim_start_matches("error: ")
|
||||
);
|
||||
std::process::exit(code);
|
||||
deno_runtime::exit(code);
|
||||
}
|
||||
|
||||
fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
|
||||
|
@ -89,13 +87,19 @@ fn main() {
|
|||
let future = async move {
|
||||
match standalone {
|
||||
Ok(Some(data)) => {
|
||||
if let Some(otel_config) = data.metadata.otel_config.clone() {
|
||||
deno_runtime::ops::otel::init(otel_config)?;
|
||||
}
|
||||
util::logger::init(data.metadata.log_level);
|
||||
load_env_vars(&data.metadata.env_vars_from_env_file);
|
||||
let exit_code = standalone::run(data).await?;
|
||||
std::process::exit(exit_code);
|
||||
deno_runtime::exit(exit_code);
|
||||
}
|
||||
Ok(None) => Ok(()),
|
||||
Err(err) => Err(err),
|
||||
Err(err) => {
|
||||
util::logger::init(None);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ use crate::node;
|
|||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliNpmReqResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::ModuleCodeStringSource;
|
||||
use crate::resolver::NotSupportedKindInNpmError;
|
||||
|
@ -66,10 +66,12 @@ use deno_graph::JsonModule;
|
|||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Resolution;
|
||||
use deno_graph::WasmModule;
|
||||
use deno_runtime::code_cache;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::create_host_defined_options;
|
||||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
|
@ -215,7 +217,8 @@ struct SharedCliModuleLoaderState {
|
|||
main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
|
@ -238,7 +241,8 @@ impl CliModuleLoaderFactory {
|
|||
main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
|
@ -264,6 +268,7 @@ impl CliModuleLoaderFactory {
|
|||
module_load_preparer,
|
||||
node_code_translator,
|
||||
node_resolver,
|
||||
npm_req_resolver,
|
||||
npm_resolver,
|
||||
npm_module_loader,
|
||||
parsed_source_cache,
|
||||
|
@ -364,7 +369,9 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
requested_module_type: RequestedModuleType,
|
||||
) -> Result<ModuleSource, AnyError> {
|
||||
let code_source = self.load_code_source(specifier, maybe_referrer).await?;
|
||||
let code = if self.shared.is_inspecting {
|
||||
let code = if self.shared.is_inspecting
|
||||
|| code_source.media_type == MediaType::Wasm
|
||||
{
|
||||
// we need the code with the source map in order for
|
||||
// it to work with --inspect or --inspect-brk
|
||||
code_source.code
|
||||
|
@ -374,6 +381,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
};
|
||||
let module_type = match code_source.media_type {
|
||||
MediaType::Json => ModuleType::Json,
|
||||
MediaType::Wasm => ModuleType::Wasm,
|
||||
_ => ModuleType::JavaScript,
|
||||
};
|
||||
|
||||
|
@ -425,7 +433,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
if let Some(code_source) = self.load_prepared_module(specifier).await? {
|
||||
return Ok(code_source);
|
||||
}
|
||||
if self.shared.node_resolver.in_npm_package(specifier) {
|
||||
if self.shared.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||
return self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
|
@ -470,21 +478,6 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
raw_specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
if self.shared.node_resolver.in_npm_package(referrer) {
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(
|
||||
raw_specifier,
|
||||
referrer,
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
let resolution = match graph.get(referrer) {
|
||||
Some(Module::Js(module)) => module
|
||||
|
@ -518,12 +511,16 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
if self.shared.is_repl {
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return self.shared.node_resolver.resolve_req_reference(
|
||||
return self
|
||||
.shared
|
||||
.npm_req_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
referrer,
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
);
|
||||
)
|
||||
.map_err(AnyError::from);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -538,7 +535,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
&package_folder,
|
||||
module.nv_reference.sub_path(),
|
||||
Some(referrer),
|
||||
|
@ -552,6 +549,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
Some(Module::Node(module)) => module.specifier.clone(),
|
||||
Some(Module::Js(module)) => module.specifier.clone(),
|
||||
Some(Module::Json(module)) => module.specifier.clone(),
|
||||
Some(Module::Wasm(module)) => module.specifier.clone(),
|
||||
Some(Module::External(module)) => {
|
||||
node::resolve_specifier_into_node_modules(
|
||||
&module.specifier,
|
||||
|
@ -723,6 +721,13 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
media_type: *media_type,
|
||||
})))
|
||||
}
|
||||
Some(deno_graph::Module::Wasm(WasmModule {
|
||||
source, specifier, ..
|
||||
})) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
|
||||
code: ModuleSourceCode::Bytes(source.clone().into()),
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::Wasm,
|
||||
}))),
|
||||
Some(
|
||||
deno_graph::Module::External(_)
|
||||
| deno_graph::Module::Node(_)
|
||||
|
@ -828,7 +833,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
name: &str,
|
||||
) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> {
|
||||
let name = deno_core::ModuleSpecifier::parse(name).ok()?;
|
||||
if self.0.shared.node_resolver.in_npm_package(&name) {
|
||||
if self.0.shared.in_npm_pkg_checker.in_npm_package(&name) {
|
||||
Some(create_host_defined_options(scope))
|
||||
} else {
|
||||
None
|
||||
|
@ -865,7 +870,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
if self.0.shared.node_resolver.in_npm_package(specifier) {
|
||||
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||
}
|
||||
|
||||
|
|
13
cli/node.rs
13
cli/node.rs
|
@ -7,8 +7,6 @@ use deno_ast::MediaType;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::ParsedSourceStore;
|
||||
use deno_path_util::url_from_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
|
||||
|
@ -22,7 +20,6 @@ use crate::cache::CacheDBHash;
|
|||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
||||
pub type CliNodeCodeTranslator =
|
||||
NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>;
|
||||
|
@ -37,13 +34,9 @@ pub fn resolve_specifier_into_node_modules(
|
|||
specifier: &ModuleSpecifier,
|
||||
fs: &dyn deno_fs::FileSystem,
|
||||
) -> ModuleSpecifier {
|
||||
url_to_file_path(specifier)
|
||||
.ok()
|
||||
// this path might not exist at the time the graph is being created
|
||||
// because the node_modules folder might not yet exist
|
||||
.and_then(|path| canonicalize_path_maybe_not_exists_with_fs(&path, fs).ok())
|
||||
.and_then(|path| url_from_file_path(&path).ok())
|
||||
.unwrap_or_else(|| specifier.clone())
|
||||
node_resolver::resolve_specifier_into_node_modules(specifier, &|path| {
|
||||
fs.realpath_sync(path).map_err(|err| err.into_io_error())
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
|
|
|
@ -2,19 +2,17 @@
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
|
||||
use deno_resolver::npm::CliNpmReqResolver;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::NpmResolver;
|
||||
use node_resolver::NpmPackageFolderResolver;
|
||||
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
|
@ -22,7 +20,6 @@ use crate::resolver::CliDenoResolverFs;
|
|||
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
use super::ResolvePkgFolderFromDenoReqError;
|
||||
|
||||
pub type CliByonmNpmResolverCreateOptions =
|
||||
ByonmNpmResolverCreateOptions<CliDenoResolverFs, DenoFsNodeResolverEnv>;
|
||||
|
@ -47,7 +44,13 @@ impl NpmProcessStateProvider for CliByonmWrapper {
|
|||
}
|
||||
|
||||
impl CliNpmResolver for CliByonmNpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
|
||||
fn into_npm_pkg_folder_resolver(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmPackageFolderResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -57,6 +60,10 @@ impl CliNpmResolver for CliByonmNpmResolver {
|
|||
Arc::new(CliByonmWrapper(self))
|
||||
}
|
||||
|
||||
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
||||
Arc::new(self.clone())
|
||||
}
|
||||
|
@ -69,17 +76,6 @@ impl CliNpmResolver for CliByonmNpmResolver {
|
|||
self.root_node_modules_dir()
|
||||
}
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
|
||||
ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req(
|
||||
self, req, referrer,
|
||||
)
|
||||
.map_err(ResolvePkgFolderFromDenoReqError::Byonm)
|
||||
}
|
||||
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
|
|
@ -22,6 +22,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
|||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_resolver::npm::CliNpmReqResolver;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
|
@ -31,7 +32,7 @@ use deno_semver::package::PackageReq;
|
|||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::NpmResolver;
|
||||
use node_resolver::NpmPackageFolderResolver;
|
||||
use resolution::AddPkgReqsResult;
|
||||
|
||||
use crate::args::CliLockfile;
|
||||
|
@ -499,7 +500,7 @@ impl ManagedCliNpmResolver {
|
|||
self.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
}
|
||||
|
||||
fn resolve_pkg_id_from_pkg_req(
|
||||
pub fn resolve_pkg_id_from_pkg_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
) -> Result<NpmPackageId, PackageReqNotFoundError> {
|
||||
|
@ -605,7 +606,7 @@ fn npm_process_state(
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
impl NpmResolver for ManagedCliNpmResolver {
|
||||
impl NpmPackageFolderResolver for ManagedCliNpmResolver {
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
|
@ -635,8 +636,29 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl CliNpmReqResolver for ManagedCliNpmResolver {
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
_referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
|
||||
let pkg_id = self
|
||||
.resolve_pkg_id_from_pkg_req(req)
|
||||
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
|
||||
self
|
||||
.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
|
||||
}
|
||||
}
|
||||
|
||||
impl CliNpmResolver for ManagedCliNpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
|
||||
fn into_npm_pkg_folder_resolver(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmPackageFolderResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -687,19 +709,6 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
self.fs_resolver.node_modules_path()
|
||||
}
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
_referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
|
||||
let pkg_id = self
|
||||
.resolve_pkg_id_from_pkg_req(req)
|
||||
.map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?;
|
||||
self
|
||||
.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
.map_err(ResolvePkgFolderFromDenoReqError::Managed)
|
||||
}
|
||||
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
|
|
@ -6,19 +6,18 @@ mod managed;
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use common::maybe_auth_header_for_npm_registry;
|
||||
use dashmap::DashMap;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_resolver::npm::ByonmInNpmPackageChecker;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
use deno_resolver::npm::CliNpmReqResolver;
|
||||
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
|
@ -26,8 +25,7 @@ use deno_semver::package::PackageReq;
|
|||
use managed::cache::registry_info::get_package_url;
|
||||
use managed::create_managed_in_npm_pkg_checker;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::NpmResolver;
|
||||
use thiserror::Error;
|
||||
use node_resolver::NpmPackageFolderResolver;
|
||||
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
|
@ -38,14 +36,6 @@ pub use self::managed::CliManagedNpmResolverCreateOptions;
|
|||
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
||||
pub use self::managed::ManagedCliNpmResolver;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ResolvePkgFolderFromDenoReqError {
|
||||
#[error(transparent)]
|
||||
Managed(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Byonm(#[from] ByonmResolvePkgFolderFromDenoReqError),
|
||||
}
|
||||
|
||||
pub enum CliNpmResolverCreateOptions {
|
||||
Managed(CliManagedNpmResolverCreateOptions),
|
||||
Byonm(CliByonmNpmResolverCreateOptions),
|
||||
|
@ -95,11 +85,17 @@ pub enum InnerCliNpmResolverRef<'a> {
|
|||
Byonm(&'a CliByonmNpmResolver),
|
||||
}
|
||||
|
||||
pub trait CliNpmResolver: NpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>;
|
||||
pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
|
||||
fn into_npm_pkg_folder_resolver(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmPackageFolderResolver>;
|
||||
fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver>;
|
||||
fn into_process_state_provider(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmProcessStateProvider>;
|
||||
fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>;
|
||||
|
||||
|
@ -121,12 +117,6 @@ pub trait CliNpmResolver: NpmResolver {
|
|||
|
||||
fn root_node_modules_path(&self) -> Option<&Path>;
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>;
|
||||
|
||||
fn ensure_read_permission<'a>(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
|
|
@ -51,7 +51,7 @@ fn op_bench_get_origin(state: &mut OpState) -> String {
|
|||
#[derive(Clone)]
|
||||
struct PermissionsHolder(Uuid, PermissionsContainer);
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
|
|
|
@ -46,7 +46,7 @@ deno_core::extension!(deno_test,
|
|||
#[derive(Clone)]
|
||||
struct PermissionsHolder(Uuid, PermissionsContainer);
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
|
|
765
cli/resolver.rs
765
cli/resolver.rs
|
@ -4,10 +4,8 @@ use async_trait::async_trait;
|
|||
use dashmap::DashMap;
|
||||
use dashmap::DashSet;
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionDiagnostic;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -20,28 +18,14 @@ use deno_graph::source::UnknownBuiltInNodeModuleError;
|
|||
use deno_graph::NpmLoadError;
|
||||
use deno_graph::NpmResolvePkgReqsResult;
|
||||
use deno_npm::resolution::NpmResolutionError;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolver;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::is_builtin_node_module;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJsonResolver;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::errors::NodeResolveError;
|
||||
use node_resolver::errors::NodeResolveErrorKind;
|
||||
use node_resolver::errors::PackageFolderResolveErrorKind;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::errors::PackageResolveErrorKind;
|
||||
use node_resolver::errors::PackageSubpathResolveError;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolution;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
@ -56,6 +40,20 @@ use crate::npm::InnerCliNpmResolverRef;
|
|||
use crate::util::sync::AtomicFlag;
|
||||
use crate::util::text_encoding::from_utf8_lossy_owned;
|
||||
|
||||
pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>;
|
||||
pub type IsCjsResolver =
|
||||
deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>;
|
||||
pub type IsCjsResolverOptions = deno_resolver::cjs::IsCjsResolverOptions;
|
||||
pub type CliSloppyImportsResolver =
|
||||
SloppyImportsResolver<SloppyImportsCachedFs>;
|
||||
pub type CliDenoResolver = deno_resolver::DenoResolver<
|
||||
CliDenoResolverFs,
|
||||
DenoFsNodeResolverEnv,
|
||||
SloppyImportsCachedFs,
|
||||
>;
|
||||
pub type CliNpmReqResolver =
|
||||
deno_resolver::npm::NpmReqResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>;
|
||||
|
||||
pub struct ModuleCodeStringSource {
|
||||
pub code: ModuleSourceCode,
|
||||
pub found_url: ModuleSpecifier,
|
||||
|
@ -77,6 +75,10 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
|||
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn exists_sync(&self, path: &Path) -> bool {
|
||||
self.0.exists_sync(path)
|
||||
}
|
||||
|
||||
fn is_dir_sync(&self, path: &Path) -> bool {
|
||||
self.0.is_dir_sync(path)
|
||||
}
|
||||
|
@ -102,211 +104,6 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNodeResolver {
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
}
|
||||
|
||||
impl CliNodeResolver {
|
||||
pub fn new(
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
node_resolver,
|
||||
npm_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.in_npm_pkg_checker.in_npm_package(specifier)
|
||||
}
|
||||
|
||||
pub fn resolve_if_for_npm_pkg(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
let resolution_result =
|
||||
self.resolve(specifier, referrer, referrer_kind, mode);
|
||||
match resolution_result {
|
||||
Ok(res) => Ok(Some(res)),
|
||||
Err(err) => {
|
||||
let err = err.into_kind();
|
||||
match err {
|
||||
NodeResolveErrorKind::RelativeJoin(_)
|
||||
| NodeResolveErrorKind::PackageImportsResolve(_)
|
||||
| NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
|
||||
| NodeResolveErrorKind::DataUrlReferrer(_)
|
||||
| NodeResolveErrorKind::TypesNotFound(_)
|
||||
| NodeResolveErrorKind::FinalizeResolution(_) => Err(err.into()),
|
||||
NodeResolveErrorKind::PackageResolve(err) => {
|
||||
let err = err.into_kind();
|
||||
match err {
|
||||
PackageResolveErrorKind::ClosestPkgJson(_)
|
||||
| PackageResolveErrorKind::InvalidModuleSpecifier(_)
|
||||
| PackageResolveErrorKind::ExportsResolve(_)
|
||||
| PackageResolveErrorKind::SubpathResolve(_) => Err(err.into()),
|
||||
PackageResolveErrorKind::PackageFolderResolve(err) => {
|
||||
match err.as_kind() {
|
||||
PackageFolderResolveErrorKind::Io(
|
||||
PackageFolderResolveIoError { package_name, .. },
|
||||
)
|
||||
| PackageFolderResolveErrorKind::PackageNotFound(
|
||||
PackageNotFoundError { package_name, .. },
|
||||
) => {
|
||||
if self.in_npm_package(referrer) {
|
||||
return Err(err.into());
|
||||
}
|
||||
if let Some(byonm_npm_resolver) =
|
||||
self.npm_resolver.as_byonm()
|
||||
{
|
||||
if byonm_npm_resolver
|
||||
.find_ancestor_package_json_with_dep(
|
||||
package_name,
|
||||
referrer,
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
concat!(
|
||||
"Could not resolve \"{}\", but found it in a package.json. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
specifier
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
PackageFolderResolveErrorKind::ReferrerNotFound(_) => {
|
||||
if self.in_npm_package(referrer) {
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<NodeResolution, NodeResolveError> {
|
||||
self
|
||||
.node_resolver
|
||||
.resolve(specifier, referrer, referrer_kind, mode)
|
||||
}
|
||||
|
||||
pub fn resolve_req_reference(
|
||||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
self.resolve_req_with_sub_path(
|
||||
req_ref.req(),
|
||||
req_ref.sub_path(),
|
||||
referrer,
|
||||
referrer_kind,
|
||||
mode,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn resolve_req_with_sub_path(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
sub_path: Option<&str>,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_pkg_folder_from_deno_module_req(req, referrer)?;
|
||||
let resolution_result = self.resolve_package_sub_path_from_deno_module(
|
||||
&package_folder,
|
||||
sub_path,
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
mode,
|
||||
);
|
||||
match resolution_result {
|
||||
Ok(url) => Ok(url),
|
||||
Err(err) => {
|
||||
if self.npm_resolver.as_byonm().is_some() {
|
||||
let package_json_path = package_folder.join("package.json");
|
||||
if !self.fs.exists_sync(&package_json_path) {
|
||||
return Err(anyhow!(
|
||||
"Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?",
|
||||
package_json_path.display(),
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(err.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_package_sub_path_from_deno_module(
|
||||
&self,
|
||||
package_folder: &Path,
|
||||
sub_path: Option<&str>,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, PackageSubpathResolveError> {
|
||||
self.node_resolver.resolve_package_subpath_from_deno_module(
|
||||
package_folder,
|
||||
sub_path,
|
||||
maybe_referrer,
|
||||
referrer_kind,
|
||||
mode,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn handle_if_in_node_modules(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<ModuleSpecifier>, AnyError> {
|
||||
// skip canonicalizing if we definitely know it's unnecessary
|
||||
if specifier.scheme() == "file"
|
||||
&& specifier.path().contains("/node_modules/")
|
||||
{
|
||||
// Specifiers in the node_modules directory are canonicalized
|
||||
// so canoncalize then check if it's in the node_modules directory.
|
||||
// If so, check if we need to store this specifier as being a CJS
|
||||
// resolution.
|
||||
let specifier = crate::node::resolve_specifier_into_node_modules(
|
||||
specifier,
|
||||
self.fs.as_ref(),
|
||||
);
|
||||
return Ok(Some(specifier));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("{media_type} files are not supported in npm packages: {specifier}")]
|
||||
pub struct NotSupportedKindInNpmError {
|
||||
|
@ -409,305 +206,36 @@ impl NpmModuleLoader {
|
|||
}
|
||||
}
|
||||
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
///
|
||||
/// Modules that are `.js` or `.ts` are only known to be CJS or
|
||||
/// ESM after they're loaded based on their contents. So these files
|
||||
/// will be "maybe CJS" until they're loaded.
|
||||
#[derive(Debug)]
|
||||
pub struct CjsTracker {
|
||||
is_cjs_resolver: IsCjsResolver,
|
||||
known: DashMap<ModuleSpecifier, NodeModuleKind>,
|
||||
pub struct CliResolverOptions {
|
||||
pub deno_resolver: Arc<CliDenoResolver>,
|
||||
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
pub bare_node_builtins_enabled: bool,
|
||||
}
|
||||
|
||||
impl CjsTracker {
|
||||
pub fn new(
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: IsCjsResolverOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
is_cjs_resolver: IsCjsResolver::new(
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
options,
|
||||
),
|
||||
known: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks whether the file might be treated as CJS, but it's not for sure
|
||||
/// yet because the source hasn't been loaded to see whether it contains
|
||||
/// imports or exports.
|
||||
pub fn is_maybe_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
self.treat_as_cjs_with_is_script(specifier, media_type, None)
|
||||
}
|
||||
|
||||
/// Gets whether the file is CJS. If true, this is for sure
|
||||
/// cjs because `is_script` is provided.
|
||||
///
|
||||
/// `is_script` should be `true` when the contents of the file at the
|
||||
/// provided specifier are known to be a script and not an ES module.
|
||||
pub fn is_cjs_with_known_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: bool,
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
self.treat_as_cjs_with_is_script(specifier, media_type, Some(is_script))
|
||||
}
|
||||
|
||||
fn treat_as_cjs_with_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: Option<bool>,
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
let kind = match self
|
||||
.get_known_kind_with_is_script(specifier, media_type, is_script)
|
||||
{
|
||||
Some(kind) => kind,
|
||||
None => self.is_cjs_resolver.check_based_on_pkg_json(specifier)?,
|
||||
};
|
||||
Ok(kind == NodeModuleKind::Cjs)
|
||||
}
|
||||
|
||||
pub fn get_known_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
) -> Option<NodeModuleKind> {
|
||||
self.get_known_kind_with_is_script(specifier, media_type, None)
|
||||
}
|
||||
|
||||
pub fn get_referrer_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> NodeModuleKind {
|
||||
if specifier.scheme() != "file" {
|
||||
return NodeModuleKind::Esm;
|
||||
}
|
||||
self
|
||||
.get_known_kind(specifier, MediaType::from_specifier(specifier))
|
||||
.unwrap_or(NodeModuleKind::Esm)
|
||||
}
|
||||
|
||||
fn get_known_kind_with_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: Option<bool>,
|
||||
) -> Option<NodeModuleKind> {
|
||||
self.is_cjs_resolver.get_known_kind_with_is_script(
|
||||
specifier,
|
||||
media_type,
|
||||
is_script,
|
||||
&self.known,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IsCjsResolverOptions {
|
||||
pub detect_cjs: bool,
|
||||
pub is_node_main: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IsCjsResolver {
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: IsCjsResolverOptions,
|
||||
}
|
||||
|
||||
impl IsCjsResolver {
|
||||
pub fn new(
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: IsCjsResolverOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
options,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_lsp_referrer_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_script: Option<bool>,
|
||||
) -> NodeModuleKind {
|
||||
if specifier.scheme() != "file" {
|
||||
return NodeModuleKind::Esm;
|
||||
}
|
||||
match MediaType::from_specifier(specifier) {
|
||||
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => NodeModuleKind::Esm,
|
||||
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => NodeModuleKind::Cjs,
|
||||
MediaType::Dts => {
|
||||
// dts files are always determined based on the package.json because
|
||||
// they contain imports/exports even when considered CJS
|
||||
self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm)
|
||||
}
|
||||
MediaType::Wasm |
|
||||
MediaType::Json => NodeModuleKind::Esm,
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Tsx
|
||||
// treat these as unknown
|
||||
| MediaType::Css
|
||||
| MediaType::SourceMap
|
||||
| MediaType::Unknown => {
|
||||
match is_script {
|
||||
Some(true) => self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm),
|
||||
Some(false) | None => NodeModuleKind::Esm,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_known_kind_with_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: Option<bool>,
|
||||
known_cache: &DashMap<ModuleSpecifier, NodeModuleKind>,
|
||||
) -> Option<NodeModuleKind> {
|
||||
if specifier.scheme() != "file" {
|
||||
return Some(NodeModuleKind::Esm);
|
||||
}
|
||||
|
||||
match media_type {
|
||||
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(NodeModuleKind::Esm),
|
||||
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(NodeModuleKind::Cjs),
|
||||
MediaType::Dts => {
|
||||
// dts files are always determined based on the package.json because
|
||||
// they contain imports/exports even when considered CJS
|
||||
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
|
||||
Some(value)
|
||||
} else {
|
||||
let value = self.check_based_on_pkg_json(specifier).ok();
|
||||
if let Some(value) = value {
|
||||
known_cache.insert(specifier.clone(), value);
|
||||
}
|
||||
Some(value.unwrap_or(NodeModuleKind::Esm))
|
||||
}
|
||||
}
|
||||
MediaType::Wasm |
|
||||
MediaType::Json => Some(NodeModuleKind::Esm),
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Tsx
|
||||
// treat these as unknown
|
||||
| MediaType::Css
|
||||
| MediaType::SourceMap
|
||||
| MediaType::Unknown => {
|
||||
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
|
||||
if value == NodeModuleKind::Cjs && is_script == Some(false) {
|
||||
// we now know this is actually esm
|
||||
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
|
||||
Some(NodeModuleKind::Esm)
|
||||
} else {
|
||||
Some(value)
|
||||
}
|
||||
} else if is_script == Some(false) {
|
||||
// we know this is esm
|
||||
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
|
||||
Some(NodeModuleKind::Esm)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_based_on_pkg_json(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<NodeModuleKind, ClosestPkgJsonError> {
|
||||
if self.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||
if let Some(pkg_json) =
|
||||
self.pkg_json_resolver.get_closest_package_json(specifier)?
|
||||
{
|
||||
let is_file_location_cjs = pkg_json.typ != "module";
|
||||
Ok(if is_file_location_cjs {
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
})
|
||||
} else {
|
||||
Ok(NodeModuleKind::Cjs)
|
||||
}
|
||||
} else if self.options.detect_cjs || self.options.is_node_main {
|
||||
if let Some(pkg_json) =
|
||||
self.pkg_json_resolver.get_closest_package_json(specifier)?
|
||||
{
|
||||
let is_cjs_type = pkg_json.typ == "commonjs"
|
||||
|| self.options.is_node_main && pkg_json.typ == "none";
|
||||
Ok(if is_cjs_type {
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
})
|
||||
} else if self.options.is_node_main {
|
||||
Ok(NodeModuleKind::Cjs)
|
||||
} else {
|
||||
Ok(NodeModuleKind::Esm)
|
||||
}
|
||||
} else {
|
||||
Ok(NodeModuleKind::Esm)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type CliSloppyImportsResolver =
|
||||
SloppyImportsResolver<SloppyImportsCachedFs>;
|
||||
|
||||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug)]
|
||||
pub struct CliResolver {
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
deno_resolver: Arc<CliDenoResolver>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
maybe_vendor_specifier: Option<ModuleSpecifier>,
|
||||
found_package_json_dep_flag: AtomicFlag,
|
||||
bare_node_builtins_enabled: bool,
|
||||
warned_pkgs: DashSet<PackageReq>,
|
||||
}
|
||||
|
||||
pub struct CliResolverOptions<'a> {
|
||||
pub node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
pub workspace_resolver: Arc<WorkspaceResolver>,
|
||||
pub bare_node_builtins_enabled: bool,
|
||||
pub maybe_vendor_dir: Option<&'a PathBuf>,
|
||||
}
|
||||
|
||||
impl CliResolver {
|
||||
pub fn new(options: CliResolverOptions) -> Self {
|
||||
Self {
|
||||
node_resolver: options.node_resolver,
|
||||
deno_resolver: options.deno_resolver,
|
||||
npm_resolver: options.npm_resolver,
|
||||
sloppy_imports_resolver: options.sloppy_imports_resolver,
|
||||
workspace_resolver: options.workspace_resolver,
|
||||
maybe_vendor_specifier: options
|
||||
.maybe_vendor_dir
|
||||
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
|
||||
found_package_json_dep_flag: Default::default(),
|
||||
bare_node_builtins_enabled: options.bare_node_builtins_enabled,
|
||||
warned_pkgs: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): move this off CliResolver as CliResolver is acting
|
||||
// like a factory by doing this (it's beyond its responsibility)
|
||||
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
|
||||
WorkerCliNpmGraphResolver {
|
||||
npm_resolver: self.npm_resolver.as_ref(),
|
||||
|
@ -730,223 +258,50 @@ impl CliResolver {
|
|||
}
|
||||
}
|
||||
|
||||
let referrer = &referrer_range.specifier;
|
||||
|
||||
// Use node resolution if we're in an npm package
|
||||
if let Some(node_resolver) = self.node_resolver.as_ref() {
|
||||
if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) {
|
||||
return node_resolver
|
||||
.resolve(raw_specifier, referrer, referrer_kind, to_node_mode(mode))
|
||||
.map(|res| res.into_url())
|
||||
.map_err(|e| ResolveError::Other(e.into()));
|
||||
}
|
||||
}
|
||||
|
||||
// Attempt to resolve with the workspace resolver
|
||||
let result: Result<_, ResolveError> = self
|
||||
.workspace_resolver
|
||||
.resolve(raw_specifier, referrer)
|
||||
.map_err(|err| match err {
|
||||
MappedResolutionError::Specifier(err) => ResolveError::Specifier(err),
|
||||
MappedResolutionError::ImportMap(err) => {
|
||||
ResolveError::Other(err.into())
|
||||
}
|
||||
MappedResolutionError::Workspace(err) => {
|
||||
ResolveError::Other(err.into())
|
||||
}
|
||||
});
|
||||
let result = match result {
|
||||
Ok(resolution) => match resolution {
|
||||
MappedResolution::Normal {
|
||||
specifier,
|
||||
maybe_diagnostic,
|
||||
}
|
||||
| MappedResolution::ImportMap {
|
||||
specifier,
|
||||
maybe_diagnostic,
|
||||
} => {
|
||||
if let Some(diagnostic) = maybe_diagnostic {
|
||||
match &*diagnostic {
|
||||
MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { reference, .. } => {
|
||||
if self.warned_pkgs.insert(reference.req().clone()) {
|
||||
log::warn!("{} {}\n at {}", colors::yellow("Warning"), diagnostic, referrer_range);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// do sloppy imports resolution if enabled
|
||||
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
|
||||
Ok(
|
||||
sloppy_imports_resolver
|
||||
let resolution = self
|
||||
.deno_resolver
|
||||
.resolve(
|
||||
&specifier,
|
||||
match mode {
|
||||
ResolutionMode::Execution => {
|
||||
SloppyImportsResolutionMode::Execution
|
||||
}
|
||||
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
|
||||
},
|
||||
)
|
||||
.map(|s| s.into_specifier())
|
||||
.unwrap_or(specifier),
|
||||
)
|
||||
} else {
|
||||
Ok(specifier)
|
||||
}
|
||||
}
|
||||
MappedResolution::WorkspaceJsrPackage { specifier, .. } => {
|
||||
Ok(specifier)
|
||||
}
|
||||
MappedResolution::WorkspaceNpmPackage {
|
||||
target_pkg_json: pkg_json,
|
||||
sub_path,
|
||||
..
|
||||
} => self
|
||||
.node_resolver
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(referrer),
|
||||
raw_specifier,
|
||||
&referrer_range.specifier,
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into())),
|
||||
MappedResolution::PackageJson {
|
||||
dep_result,
|
||||
alias,
|
||||
sub_path,
|
||||
.map_err(|err| match err.into_kind() {
|
||||
deno_resolver::DenoResolveErrorKind::MappedResolution(
|
||||
mapped_resolution_error,
|
||||
) => match mapped_resolution_error {
|
||||
MappedResolutionError::Specifier(e) => ResolveError::Specifier(e),
|
||||
// deno_graph checks specifically for an ImportMapError
|
||||
MappedResolutionError::ImportMap(e) => ResolveError::Other(e.into()),
|
||||
err => ResolveError::Other(err.into()),
|
||||
},
|
||||
err => ResolveError::Other(err.into()),
|
||||
})?;
|
||||
|
||||
if resolution.found_package_json_dep {
|
||||
// mark that we need to do an "npm install" later
|
||||
self.found_package_json_dep_flag.raise();
|
||||
}
|
||||
|
||||
if let Some(diagnostic) = resolution.maybe_diagnostic {
|
||||
match &*diagnostic {
|
||||
MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion {
|
||||
reference,
|
||||
..
|
||||
} => {
|
||||
// found a specifier in the package.json, so mark that
|
||||
// we need to do an "npm install" later
|
||||
self.found_package_json_dep_flag.raise();
|
||||
|
||||
dep_result
|
||||
.as_ref()
|
||||
.map_err(|e| ResolveError::Other(e.clone().into()))
|
||||
.and_then(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => {
|
||||
ModuleSpecifier::parse(&format!(
|
||||
"npm:{}{}",
|
||||
req,
|
||||
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
|
||||
))
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => self
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||
alias,
|
||||
version_req,
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
.and_then(|pkg_folder| {
|
||||
self
|
||||
.node_resolver
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
}),
|
||||
})
|
||||
}
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
};
|
||||
|
||||
// When the user is vendoring, don't allow them to import directly from the vendor/ directory
|
||||
// as it might cause them confusion or duplicate dependencies. Additionally, this folder has
|
||||
// special treatment in the language server so it will definitely cause issues/confusion there
|
||||
// if they do this.
|
||||
if let Some(vendor_specifier) = &self.maybe_vendor_specifier {
|
||||
if let Ok(specifier) = &result {
|
||||
if specifier.as_str().starts_with(vendor_specifier.as_str()) {
|
||||
return Err(ResolveError::Other(anyhow!("Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring.")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(node_resolver) = &self.node_resolver else {
|
||||
return result;
|
||||
};
|
||||
|
||||
let is_byonm = self
|
||||
.npm_resolver
|
||||
.as_ref()
|
||||
.is_some_and(|r| r.as_byonm().is_some());
|
||||
match result {
|
||||
Ok(specifier) => {
|
||||
if let Ok(npm_req_ref) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
// check if the npm specifier resolves to a workspace member
|
||||
if let Some(pkg_folder) = self
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_npm_specifier(
|
||||
npm_req_ref.req(),
|
||||
)
|
||||
{
|
||||
return node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
npm_req_ref.sub_path(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()));
|
||||
}
|
||||
|
||||
// do npm resolution for byonm
|
||||
if is_byonm {
|
||||
return node_resolver
|
||||
.resolve_req_reference(
|
||||
&npm_req_ref,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|err| err.into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(match node_resolver.handle_if_in_node_modules(&specifier)? {
|
||||
Some(specifier) => specifier,
|
||||
None => specifier,
|
||||
})
|
||||
}
|
||||
Err(err) => {
|
||||
// If byonm, check if the bare specifier resolves to an npm package
|
||||
if is_byonm && referrer.scheme() == "file" {
|
||||
let maybe_resolution = node_resolver
|
||||
.resolve_if_for_npm_pkg(
|
||||
raw_specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(ResolveError::Other)?;
|
||||
if let Some(res) = maybe_resolution {
|
||||
match res {
|
||||
NodeResolution::Module(url) => return Ok(url),
|
||||
NodeResolution::BuiltIn(_) => {
|
||||
// don't resolve bare specifiers for built-in modules via node resolution
|
||||
if self.warned_pkgs.insert(reference.req().clone()) {
|
||||
log::warn!(
|
||||
"{} {}\n at {}",
|
||||
colors::yellow("Warning"),
|
||||
diagnostic,
|
||||
referrer_range
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
Ok(resolution.url)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -431,8 +431,34 @@
|
|||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[A-Za-z][A-Za-z0-9_\\-:]*$": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Command to execute for this task name."
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "A definition of a task to execute",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Description of a task that will be shown when running `deno task` without a task name"
|
||||
},
|
||||
"command": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "The task to execute"
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Tasks that should be executed before this task"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
|
@ -531,9 +557,11 @@
|
|||
"ffi",
|
||||
"fs",
|
||||
"fmt-component",
|
||||
"fmt-sql",
|
||||
"http",
|
||||
"kv",
|
||||
"net",
|
||||
"node-globals",
|
||||
"sloppy-imports",
|
||||
"temporal",
|
||||
"unsafe-proto",
|
||||
|
|
|
@ -64,6 +64,7 @@ use crate::args::NpmInstallDepsProvider;
|
|||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
|
@ -174,6 +175,7 @@ pub struct SerializedWorkspaceResolver {
|
|||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub seed: Option<u64>,
|
||||
pub code_cache_key: Option<u64>,
|
||||
pub permissions: PermissionFlags,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
|
@ -199,7 +201,8 @@ fn write_binary_bytes(
|
|||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let data_section_bytes =
|
||||
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?;
|
||||
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)
|
||||
.context("Serializing binary data section.")?;
|
||||
|
||||
let target = compile_flags.resolve_target();
|
||||
if target.contains("linux") {
|
||||
|
@ -362,6 +365,7 @@ pub fn extract_standalone(
|
|||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
cjs_tracker: &'a CjsTracker,
|
||||
cli_options: &'a CliOptions,
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
|
@ -375,6 +379,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
cjs_tracker: &'a CjsTracker,
|
||||
cli_options: &'a CliOptions,
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
|
@ -385,6 +390,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
cli_options,
|
||||
deno_dir,
|
||||
emitter,
|
||||
file_fetcher,
|
||||
|
@ -401,8 +407,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
graph: &ModuleGraph,
|
||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
include_files: &[ModuleSpecifier],
|
||||
compile_flags: &CompileFlags,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
// Select base binary based on target
|
||||
let mut original_binary = self.get_base_binary(compile_flags).await?;
|
||||
|
@ -415,7 +421,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
target,
|
||||
)
|
||||
}
|
||||
set_windows_binary_to_gui(&mut original_binary)?;
|
||||
set_windows_binary_to_gui(&mut original_binary)
|
||||
.context("Setting windows binary to GUI.")?;
|
||||
}
|
||||
if compile_flags.icon.is_some() {
|
||||
let target = compile_flags.resolve_target();
|
||||
|
@ -433,7 +440,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
graph,
|
||||
root_dir_url,
|
||||
entrypoint,
|
||||
cli_options,
|
||||
include_files,
|
||||
compile_flags,
|
||||
)
|
||||
.await
|
||||
|
@ -476,10 +483,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
if !binary_path.exists() {
|
||||
self
|
||||
.download_base_binary(&download_directory, &binary_path_suffix)
|
||||
.await?;
|
||||
.await
|
||||
.context("Setting up base binary.")?;
|
||||
}
|
||||
|
||||
let archive_data = std::fs::read(binary_path)?;
|
||||
let read_file = |path: &Path| -> Result<Vec<u8>, AnyError> {
|
||||
std::fs::read(path).with_context(|| format!("Reading {}", path.display()))
|
||||
};
|
||||
let archive_data = read_file(&binary_path)?;
|
||||
let temp_dir = tempfile::TempDir::new()?;
|
||||
let base_binary_path = archive::unpack_into_dir(archive::UnpackArgs {
|
||||
exe_name: "denort",
|
||||
|
@ -488,7 +499,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
is_windows: target.contains("windows"),
|
||||
dest_path: temp_dir.path(),
|
||||
})?;
|
||||
let base_binary = std::fs::read(base_binary_path)?;
|
||||
let base_binary = read_file(&base_binary_path)?;
|
||||
drop(temp_dir); // delete the temp dir
|
||||
Ok(base_binary)
|
||||
}
|
||||
|
@ -516,15 +527,19 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
let bytes = match maybe_bytes {
|
||||
Some(bytes) => bytes,
|
||||
None => {
|
||||
log::info!("Download could not be found, aborting");
|
||||
std::process::exit(1)
|
||||
bail!("Download could not be found, aborting");
|
||||
}
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(output_directory)?;
|
||||
let create_dir_all = |dir: &Path| {
|
||||
std::fs::create_dir_all(dir)
|
||||
.with_context(|| format!("Creating {}", dir.display()))
|
||||
};
|
||||
create_dir_all(output_directory)?;
|
||||
let output_path = output_directory.join(binary_path_suffix);
|
||||
std::fs::create_dir_all(output_path.parent().unwrap())?;
|
||||
tokio::fs::write(output_path, bytes).await?;
|
||||
create_dir_all(output_path.parent().unwrap())?;
|
||||
std::fs::write(&output_path, bytes)
|
||||
.with_context(|| format!("Writing {}", output_path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -538,39 +553,39 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
graph: &ModuleGraph,
|
||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
cli_options: &CliOptions,
|
||||
include_files: &[ModuleSpecifier],
|
||||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let ca_data = match cli_options.ca_data() {
|
||||
let ca_data = match self.cli_options.ca_data() {
|
||||
Some(CaData::File(ca_file)) => Some(
|
||||
std::fs::read(ca_file)
|
||||
.with_context(|| format!("Reading: {ca_file}"))?,
|
||||
std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
|
||||
),
|
||||
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
||||
None => None,
|
||||
};
|
||||
let root_path = root_dir_url.inner().to_file_path().unwrap();
|
||||
let (maybe_npm_vfs, node_modules, npm_snapshot) = match self
|
||||
.npm_resolver
|
||||
.as_inner()
|
||||
{
|
||||
let (maybe_npm_vfs, node_modules, npm_snapshot) =
|
||||
match self.npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
|
||||
let npm_vfs_builder = self
|
||||
.build_npm_vfs(&root_path)
|
||||
.context("Building npm vfs.")?;
|
||||
(
|
||||
Some(npm_vfs_builder),
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|
||||
|path| {
|
||||
node_modules_dir: self
|
||||
.npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|path| {
|
||||
root_dir_url
|
||||
.specifier_key(
|
||||
&ModuleSpecifier::from_directory_path(path).unwrap(),
|
||||
)
|
||||
.into_owned()
|
||||
},
|
||||
),
|
||||
}),
|
||||
}),
|
||||
Some(snapshot),
|
||||
)
|
||||
|
@ -579,7 +594,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(resolver) => {
|
||||
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
|
||||
let npm_vfs_builder = self.build_npm_vfs(&root_path)?;
|
||||
(
|
||||
Some(npm_vfs_builder),
|
||||
Some(NodeModules::Byonm {
|
||||
|
@ -603,11 +618,36 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
} else {
|
||||
VfsBuilder::new(root_path.clone())?
|
||||
};
|
||||
for include_file in include_files {
|
||||
let path = deno_path_util::url_to_file_path(include_file)?;
|
||||
if path.is_dir() {
|
||||
// TODO(#26941): we should analyze if any of these are
|
||||
// modules in order to include their dependencies
|
||||
vfs
|
||||
.add_dir_recursive(&path)
|
||||
.with_context(|| format!("Including {}", path.display()))?;
|
||||
} else {
|
||||
vfs
|
||||
.add_file_at_path(&path)
|
||||
.with_context(|| format!("Including {}", path.display()))?;
|
||||
}
|
||||
}
|
||||
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
|
||||
let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() {
|
||||
Some(FastInsecureHasher::new_deno_versioned())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
for module in graph.modules() {
|
||||
if module.specifier().scheme() == "data" {
|
||||
continue; // don't store data urls as an entry as they're in the code
|
||||
}
|
||||
if let Some(hasher) = &mut code_cache_key_hasher {
|
||||
if let Some(source) = module.source() {
|
||||
hasher.write(module.specifier().as_str().as_bytes());
|
||||
hasher.write(source.as_bytes());
|
||||
}
|
||||
}
|
||||
let (maybe_source, media_type) = match module {
|
||||
deno_graph::Module::Js(m) => {
|
||||
let source = if m.media_type.is_emittable() {
|
||||
|
@ -635,6 +675,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
deno_graph::Module::Json(m) => {
|
||||
(Some(m.source.as_bytes().to_vec()), m.media_type)
|
||||
}
|
||||
deno_graph::Module::Wasm(m) => {
|
||||
(Some(m.source.to_vec()), MediaType::Wasm)
|
||||
}
|
||||
deno_graph::Module::Npm(_)
|
||||
| deno_graph::Module::Node(_)
|
||||
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
|
||||
|
@ -658,25 +701,33 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
remote_modules_store.add_redirects(&graph.redirects);
|
||||
|
||||
let env_vars_from_env_file = match cli_options.env_file_name() {
|
||||
Some(env_filename) => {
|
||||
let env_vars_from_env_file = match self.cli_options.env_file_name() {
|
||||
Some(env_filenames) => {
|
||||
let mut aggregated_env_vars = IndexMap::new();
|
||||
for env_filename in env_filenames.iter().rev() {
|
||||
log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename);
|
||||
get_file_env_vars(env_filename.to_string())?
|
||||
|
||||
let env_vars = get_file_env_vars(env_filename.to_string())?;
|
||||
aggregated_env_vars.extend(env_vars);
|
||||
}
|
||||
aggregated_env_vars
|
||||
}
|
||||
None => Default::default(),
|
||||
};
|
||||
|
||||
let metadata = Metadata {
|
||||
argv: compile_flags.args.clone(),
|
||||
seed: cli_options.seed(),
|
||||
location: cli_options.location_flag().clone(),
|
||||
permissions: cli_options.permission_flags().clone(),
|
||||
v8_flags: cli_options.v8_flags().clone(),
|
||||
unsafely_ignore_certificate_errors: cli_options
|
||||
seed: self.cli_options.seed(),
|
||||
code_cache_key: code_cache_key_hasher.map(|h| h.finish()),
|
||||
location: self.cli_options.location_flag().clone(),
|
||||
permissions: self.cli_options.permission_flags().clone(),
|
||||
v8_flags: self.cli_options.v8_flags().clone(),
|
||||
unsafely_ignore_certificate_errors: self
|
||||
.cli_options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
log_level: cli_options.log_level(),
|
||||
ca_stores: cli_options.ca_stores().clone(),
|
||||
log_level: self.cli_options.log_level(),
|
||||
ca_stores: self.cli_options.ca_stores().clone(),
|
||||
ca_data,
|
||||
env_vars_from_env_file,
|
||||
entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
|
||||
|
@ -719,11 +770,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
node_modules,
|
||||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: false,
|
||||
bare_node_builtins: cli_options.unstable_bare_node_builtins(),
|
||||
sloppy_imports: cli_options.unstable_sloppy_imports(),
|
||||
features: cli_options.unstable_features(),
|
||||
bare_node_builtins: self.cli_options.unstable_bare_node_builtins(),
|
||||
sloppy_imports: self.cli_options.unstable_sloppy_imports(),
|
||||
features: self.cli_options.unstable_features(),
|
||||
},
|
||||
otel_config: cli_options.otel_config(),
|
||||
otel_config: self.cli_options.otel_config(),
|
||||
};
|
||||
|
||||
write_binary_bytes(
|
||||
|
@ -735,13 +786,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
vfs,
|
||||
compile_flags,
|
||||
)
|
||||
.context("Writing binary bytes")
|
||||
}
|
||||
|
||||
fn build_npm_vfs(
|
||||
&self,
|
||||
root_path: &Path,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<VfsBuilder, AnyError> {
|
||||
fn build_npm_vfs(&self, root_path: &Path) -> Result<VfsBuilder, AnyError> {
|
||||
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
|
||||
if system_info != &NpmSystemInfo::default() {
|
||||
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
|
||||
|
@ -818,13 +866,18 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
InnerCliNpmResolverRef::Byonm(_) => {
|
||||
maybe_warn_different_system(&self.npm_system_info);
|
||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
||||
for pkg_json in cli_options.workspace().package_jsons() {
|
||||
for pkg_json in self.cli_options.workspace().package_jsons() {
|
||||
builder.add_file_at_path(&pkg_json.path)?;
|
||||
}
|
||||
// traverse and add all the node_modules directories in the workspace
|
||||
let mut pending_dirs = VecDeque::new();
|
||||
pending_dirs.push_back(
|
||||
cli_options.workspace().root_dir().to_file_path().unwrap(),
|
||||
self
|
||||
.cli_options
|
||||
.workspace()
|
||||
.root_dir()
|
||||
.to_file_path()
|
||||
.unwrap(),
|
||||
);
|
||||
while let Some(pending_dir) = pending_dirs.pop_front() {
|
||||
let mut entries = fs::read_dir(&pending_dir)
|
||||
|
|
523
cli/standalone/code_cache.rs
Normal file
523
cli/standalone/code_cache.rs
Normal file
|
@ -0,0 +1,523 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::io::BufReader;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::unsync::sync::AtomicFlag;
|
||||
use deno_runtime::code_cache::CodeCache;
|
||||
use deno_runtime::code_cache::CodeCacheType;
|
||||
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::util::path::get_atomic_file_path;
|
||||
use crate::worker::CliCodeCache;
|
||||
|
||||
enum CodeCacheStrategy {
|
||||
FirstRun(FirstRunCodeCacheStrategy),
|
||||
SubsequentRun(SubsequentRunCodeCacheStrategy),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct DenoCompileCodeCacheEntry {
|
||||
pub source_hash: u64,
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
pub struct DenoCompileCodeCache {
|
||||
strategy: CodeCacheStrategy,
|
||||
}
|
||||
|
||||
impl DenoCompileCodeCache {
|
||||
pub fn new(file_path: PathBuf, cache_key: u64) -> Self {
|
||||
// attempt to deserialize the cache data
|
||||
match deserialize(&file_path, cache_key) {
|
||||
Ok(data) => {
|
||||
log::debug!(
|
||||
"Loaded {} code cache entries from {}",
|
||||
data.len(),
|
||||
file_path.display()
|
||||
);
|
||||
Self {
|
||||
strategy: CodeCacheStrategy::SubsequentRun(
|
||||
SubsequentRunCodeCacheStrategy {
|
||||
is_finished: AtomicFlag::lowered(),
|
||||
data: Mutex::new(data),
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!(
|
||||
"Failed to deserialize code cache from {}: {:#}",
|
||||
file_path.display(),
|
||||
err
|
||||
);
|
||||
Self {
|
||||
strategy: CodeCacheStrategy::FirstRun(FirstRunCodeCacheStrategy {
|
||||
cache_key,
|
||||
file_path,
|
||||
is_finished: AtomicFlag::lowered(),
|
||||
data: Mutex::new(FirstRunCodeCacheData {
|
||||
cache: HashMap::new(),
|
||||
add_count: 0,
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeCache for DenoCompileCodeCache {
|
||||
fn get_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
match &self.strategy {
|
||||
CodeCacheStrategy::FirstRun(strategy) => {
|
||||
if !strategy.is_finished.is_raised() {
|
||||
// we keep track of how many times the cache is requested
|
||||
// then serialize the cache when we get that number of
|
||||
// "set" calls
|
||||
strategy.data.lock().add_count += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
CodeCacheStrategy::SubsequentRun(strategy) => {
|
||||
if strategy.is_finished.is_raised() {
|
||||
return None;
|
||||
}
|
||||
strategy.take_from_cache(specifier, code_cache_type, source_hash)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_sync(
|
||||
&self,
|
||||
specifier: ModuleSpecifier,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
bytes: &[u8],
|
||||
) {
|
||||
match &self.strategy {
|
||||
CodeCacheStrategy::FirstRun(strategy) => {
|
||||
if strategy.is_finished.is_raised() {
|
||||
return;
|
||||
}
|
||||
|
||||
let data_to_serialize = {
|
||||
let mut data = strategy.data.lock();
|
||||
data.cache.insert(
|
||||
(specifier.to_string(), code_cache_type),
|
||||
DenoCompileCodeCacheEntry {
|
||||
source_hash,
|
||||
data: bytes.to_vec(),
|
||||
},
|
||||
);
|
||||
if data.add_count != 0 {
|
||||
data.add_count -= 1;
|
||||
}
|
||||
if data.add_count == 0 {
|
||||
// don't allow using the cache anymore
|
||||
strategy.is_finished.raise();
|
||||
if data.cache.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(std::mem::take(&mut data.cache))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
if let Some(cache_data) = &data_to_serialize {
|
||||
strategy.write_cache_data(cache_data);
|
||||
}
|
||||
}
|
||||
CodeCacheStrategy::SubsequentRun(_) => {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CliCodeCache for DenoCompileCodeCache {
|
||||
fn enabled(&self) -> bool {
|
||||
match &self.strategy {
|
||||
CodeCacheStrategy::FirstRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
CodeCacheStrategy::SubsequentRun(strategy) => {
|
||||
!strategy.is_finished.is_raised()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
type CodeCacheKey = (String, CodeCacheType);
|
||||
|
||||
struct FirstRunCodeCacheData {
|
||||
cache: HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
|
||||
add_count: usize,
|
||||
}
|
||||
|
||||
struct FirstRunCodeCacheStrategy {
|
||||
cache_key: u64,
|
||||
file_path: PathBuf,
|
||||
is_finished: AtomicFlag,
|
||||
data: Mutex<FirstRunCodeCacheData>,
|
||||
}
|
||||
|
||||
impl FirstRunCodeCacheStrategy {
|
||||
fn write_cache_data(
|
||||
&self,
|
||||
cache_data: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
|
||||
) {
|
||||
let count = cache_data.len();
|
||||
let temp_file = get_atomic_file_path(&self.file_path);
|
||||
match serialize(&temp_file, self.cache_key, cache_data) {
|
||||
Ok(()) => {
|
||||
if let Err(err) = std::fs::rename(&temp_file, &self.file_path) {
|
||||
log::debug!("Failed to rename code cache: {}", err);
|
||||
let _ = std::fs::remove_file(&temp_file);
|
||||
} else {
|
||||
log::debug!("Serialized {} code cache entries", count);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let _ = std::fs::remove_file(&temp_file);
|
||||
log::debug!("Failed to serialize code cache: {}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SubsequentRunCodeCacheStrategy {
|
||||
is_finished: AtomicFlag,
|
||||
data: Mutex<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>>,
|
||||
}
|
||||
|
||||
impl SubsequentRunCodeCacheStrategy {
|
||||
fn take_from_cache(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code_cache_type: CodeCacheType,
|
||||
source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
let mut data = self.data.lock();
|
||||
// todo(dsherret): how to avoid the clone here?
|
||||
let entry = data.remove(&(specifier.to_string(), code_cache_type))?;
|
||||
if entry.source_hash != source_hash {
|
||||
return None;
|
||||
}
|
||||
if data.is_empty() {
|
||||
self.is_finished.raise();
|
||||
}
|
||||
Some(entry.data)
|
||||
}
|
||||
}
|
||||
|
||||
/// File format:
|
||||
/// - <header>
|
||||
/// - <cache key>
|
||||
/// - <u32: number of entries>
|
||||
/// - <[entry length]> - u64 * number of entries
|
||||
/// - <[entry]>
|
||||
/// - <[u8]: entry data>
|
||||
/// - <String: specifier>
|
||||
/// - <u8>: code cache type
|
||||
/// - <u32: specifier length>
|
||||
/// - <u64: source hash>
|
||||
/// - <u64: entry data hash>
|
||||
fn serialize(
|
||||
file_path: &Path,
|
||||
cache_key: u64,
|
||||
cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
|
||||
) -> Result<(), AnyError> {
|
||||
let cache_file = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.open(file_path)?;
|
||||
let mut writer = BufWriter::new(cache_file);
|
||||
serialize_with_writer(&mut writer, cache_key, cache)
|
||||
}
|
||||
|
||||
fn serialize_with_writer<T: Write>(
|
||||
writer: &mut BufWriter<T>,
|
||||
cache_key: u64,
|
||||
cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>,
|
||||
) -> Result<(), AnyError> {
|
||||
// header
|
||||
writer.write_all(&cache_key.to_le_bytes())?;
|
||||
writer.write_all(&(cache.len() as u32).to_le_bytes())?;
|
||||
// lengths of each entry
|
||||
for ((specifier, _), entry) in cache {
|
||||
let len: u64 =
|
||||
entry.data.len() as u64 + specifier.len() as u64 + 1 + 4 + 8 + 8;
|
||||
writer.write_all(&len.to_le_bytes())?;
|
||||
}
|
||||
// entries
|
||||
for ((specifier, code_cache_type), entry) in cache {
|
||||
writer.write_all(&entry.data)?;
|
||||
writer.write_all(&[match code_cache_type {
|
||||
CodeCacheType::EsModule => 0,
|
||||
CodeCacheType::Script => 1,
|
||||
}])?;
|
||||
writer.write_all(specifier.as_bytes())?;
|
||||
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
|
||||
writer.write_all(&entry.source_hash.to_le_bytes())?;
|
||||
let hash: u64 = FastInsecureHasher::new_without_deno_version()
|
||||
.write(&entry.data)
|
||||
.finish();
|
||||
writer.write_all(&hash.to_le_bytes())?;
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
file_path: &Path,
|
||||
expected_cache_key: u64,
|
||||
) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> {
|
||||
let cache_file = std::fs::File::open(file_path)?;
|
||||
let mut reader = BufReader::new(cache_file);
|
||||
deserialize_with_reader(&mut reader, expected_cache_key)
|
||||
}
|
||||
|
||||
fn deserialize_with_reader<T: Read>(
|
||||
reader: &mut BufReader<T>,
|
||||
expected_cache_key: u64,
|
||||
) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> {
|
||||
// it's very important to use this below so that a corrupt cache file
|
||||
// doesn't cause a memory allocation error
|
||||
fn new_vec_sized<T: Clone>(
|
||||
capacity: usize,
|
||||
default_value: T,
|
||||
) -> Result<Vec<T>, AnyError> {
|
||||
let mut vec = Vec::new();
|
||||
vec.try_reserve(capacity)?;
|
||||
vec.resize(capacity, default_value);
|
||||
Ok(vec)
|
||||
}
|
||||
|
||||
fn try_subtract(a: usize, b: usize) -> Result<usize, AnyError> {
|
||||
if a < b {
|
||||
bail!("Integer underflow");
|
||||
}
|
||||
Ok(a - b)
|
||||
}
|
||||
|
||||
let mut header_bytes = vec![0; 8 + 4];
|
||||
reader.read_exact(&mut header_bytes)?;
|
||||
let actual_cache_key = u64::from_le_bytes(header_bytes[..8].try_into()?);
|
||||
if actual_cache_key != expected_cache_key {
|
||||
// cache bust
|
||||
bail!("Cache key mismatch");
|
||||
}
|
||||
let len = u32::from_le_bytes(header_bytes[8..].try_into()?) as usize;
|
||||
// read the lengths for each entry found in the file
|
||||
let entry_len_bytes_capacity = len * 8;
|
||||
let mut entry_len_bytes = new_vec_sized(entry_len_bytes_capacity, 0)?;
|
||||
reader.read_exact(&mut entry_len_bytes)?;
|
||||
let mut lengths = Vec::new();
|
||||
lengths.try_reserve(len)?;
|
||||
for i in 0..len {
|
||||
let pos = i * 8;
|
||||
lengths.push(
|
||||
u64::from_le_bytes(entry_len_bytes[pos..pos + 8].try_into()?) as usize,
|
||||
);
|
||||
}
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.try_reserve(len)?;
|
||||
for len in lengths {
|
||||
let mut buffer = new_vec_sized(len, 0)?;
|
||||
reader.read_exact(&mut buffer)?;
|
||||
let entry_data_hash_start_pos = try_subtract(buffer.len(), 8)?;
|
||||
let expected_entry_data_hash =
|
||||
u64::from_le_bytes(buffer[entry_data_hash_start_pos..].try_into()?);
|
||||
let source_hash_start_pos = try_subtract(entry_data_hash_start_pos, 8)?;
|
||||
let source_hash = u64::from_le_bytes(
|
||||
buffer[source_hash_start_pos..entry_data_hash_start_pos].try_into()?,
|
||||
);
|
||||
let specifier_end_pos = try_subtract(source_hash_start_pos, 4)?;
|
||||
let specifier_len = u32::from_le_bytes(
|
||||
buffer[specifier_end_pos..source_hash_start_pos].try_into()?,
|
||||
) as usize;
|
||||
let specifier_start_pos = try_subtract(specifier_end_pos, specifier_len)?;
|
||||
let specifier = String::from_utf8(
|
||||
buffer[specifier_start_pos..specifier_end_pos].to_vec(),
|
||||
)?;
|
||||
let code_cache_type_pos = try_subtract(specifier_start_pos, 1)?;
|
||||
let code_cache_type = match buffer[code_cache_type_pos] {
|
||||
0 => CodeCacheType::EsModule,
|
||||
1 => CodeCacheType::Script,
|
||||
_ => bail!("Invalid code cache type"),
|
||||
};
|
||||
buffer.truncate(code_cache_type_pos);
|
||||
let actual_entry_data_hash: u64 =
|
||||
FastInsecureHasher::new_without_deno_version()
|
||||
.write(&buffer)
|
||||
.finish();
|
||||
if expected_entry_data_hash != actual_entry_data_hash {
|
||||
bail!("Hash mismatch.")
|
||||
}
|
||||
map.insert(
|
||||
(specifier, code_cache_type),
|
||||
DenoCompileCodeCacheEntry {
|
||||
source_hash,
|
||||
data: buffer,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use test_util::TempDir;
|
||||
|
||||
use super::*;
|
||||
use std::fs::File;
|
||||
|
||||
#[test]
|
||||
fn serialize_deserialize() {
|
||||
let cache_key = 123456;
|
||||
let cache = {
|
||||
let mut cache = HashMap::new();
|
||||
cache.insert(
|
||||
("specifier1".to_string(), CodeCacheType::EsModule),
|
||||
DenoCompileCodeCacheEntry {
|
||||
source_hash: 1,
|
||||
data: vec![1, 2, 3],
|
||||
},
|
||||
);
|
||||
cache.insert(
|
||||
("specifier2".to_string(), CodeCacheType::EsModule),
|
||||
DenoCompileCodeCacheEntry {
|
||||
source_hash: 2,
|
||||
data: vec![4, 5, 6],
|
||||
},
|
||||
);
|
||||
cache.insert(
|
||||
("specifier2".to_string(), CodeCacheType::Script),
|
||||
DenoCompileCodeCacheEntry {
|
||||
source_hash: 2,
|
||||
data: vec![6, 5, 1],
|
||||
},
|
||||
);
|
||||
cache
|
||||
};
|
||||
let mut buffer = Vec::new();
|
||||
serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache)
|
||||
.unwrap();
|
||||
let deserialized =
|
||||
deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key)
|
||||
.unwrap();
|
||||
assert_eq!(cache, deserialized);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_deserialize_empty() {
|
||||
let cache_key = 1234;
|
||||
let cache = HashMap::new();
|
||||
let mut buffer = Vec::new();
|
||||
serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache)
|
||||
.unwrap();
|
||||
let deserialized =
|
||||
deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key)
|
||||
.unwrap();
|
||||
assert_eq!(cache, deserialized);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_deserialize_corrupt() {
|
||||
let buffer = "corrupttestingtestingtesting".as_bytes().to_vec();
|
||||
let err = deserialize_with_reader(&mut BufReader::new(&buffer[..]), 1234)
|
||||
.unwrap_err();
|
||||
assert_eq!(err.to_string(), "Cache key mismatch");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn code_cache() {
|
||||
let temp_dir = TempDir::new();
|
||||
let file_path = temp_dir.path().join("cache.bin").to_path_buf();
|
||||
let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap();
|
||||
let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap();
|
||||
// first run
|
||||
{
|
||||
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);
|
||||
assert!(code_cache
|
||||
.get_sync(&url1, CodeCacheType::EsModule, 0)
|
||||
.is_none());
|
||||
assert!(code_cache
|
||||
.get_sync(&url2, CodeCacheType::EsModule, 1)
|
||||
.is_none());
|
||||
assert!(code_cache.enabled());
|
||||
code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[1, 2, 3]);
|
||||
assert!(code_cache.enabled());
|
||||
assert!(!file_path.exists());
|
||||
code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[2, 1, 3]);
|
||||
assert!(file_path.exists()); // now the new code cache exists
|
||||
assert!(!code_cache.enabled()); // no longer enabled
|
||||
}
|
||||
// second run
|
||||
{
|
||||
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);
|
||||
assert!(code_cache.enabled());
|
||||
let result1 = code_cache
|
||||
.get_sync(&url1, CodeCacheType::EsModule, 0)
|
||||
.unwrap();
|
||||
assert!(code_cache.enabled());
|
||||
let result2 = code_cache
|
||||
.get_sync(&url2, CodeCacheType::EsModule, 1)
|
||||
.unwrap();
|
||||
assert!(!code_cache.enabled()); // no longer enabled
|
||||
assert_eq!(result1, vec![1, 2, 3]);
|
||||
assert_eq!(result2, vec![2, 1, 3]);
|
||||
}
|
||||
|
||||
// new cache key first run
|
||||
{
|
||||
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321);
|
||||
assert!(code_cache
|
||||
.get_sync(&url1, CodeCacheType::EsModule, 0)
|
||||
.is_none());
|
||||
assert!(code_cache
|
||||
.get_sync(&url2, CodeCacheType::EsModule, 1)
|
||||
.is_none());
|
||||
code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[2, 2, 3]);
|
||||
code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[3, 2, 3]);
|
||||
}
|
||||
// new cache key second run
|
||||
{
|
||||
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321);
|
||||
let result1 = code_cache
|
||||
.get_sync(&url1, CodeCacheType::EsModule, 0)
|
||||
.unwrap();
|
||||
assert_eq!(result1, vec![2, 2, 3]);
|
||||
assert!(code_cache
|
||||
.get_sync(&url2, CodeCacheType::EsModule, 5) // different hash will cause none
|
||||
.is_none());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
use binary::StandaloneData;
|
||||
use binary::StandaloneModules;
|
||||
use code_cache::DenoCompileCodeCache;
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
|
@ -17,6 +18,7 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::FastString;
|
||||
|
@ -27,8 +29,10 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::ModuleType;
|
||||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceCodeCacheInfo;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_resolver::npm::NpmReqResolverOptions;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::create_host_defined_options;
|
||||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
|
@ -63,6 +67,7 @@ use crate::args::StorageKeyResolver;
|
|||
use crate::cache::Caches;
|
||||
use crate::cache::DenoCacheEnvFsAdapter;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::RealDenoCacheEnv;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
|
@ -79,18 +84,20 @@ use crate::npm::CliNpmResolverManagedSnapshotOption;
|
|||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliNpmReqResolver;
|
||||
use crate::resolver::IsCjsResolverOptions;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::util::v8::construct_v8_flags;
|
||||
use crate::worker::CliCodeCache;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CliMainWorkerOptions;
|
||||
use crate::worker::CreateModuleLoaderResult;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
|
||||
pub mod binary;
|
||||
mod code_cache;
|
||||
mod file_system;
|
||||
mod serialization;
|
||||
mod virtual_fs;
|
||||
|
@ -107,10 +114,40 @@ struct SharedModuleLoaderState {
|
|||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
modules: StandaloneModules,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_module_loader: Arc<NpmModuleLoader>,
|
||||
npm_req_resolver: Arc<CliNpmReqResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
code_cache: Option<Arc<dyn CliCodeCache>>,
|
||||
}
|
||||
|
||||
impl SharedModuleLoaderState {
|
||||
fn get_code_cache(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &[u8],
|
||||
) -> Option<SourceCodeCacheInfo> {
|
||||
let Some(code_cache) = &self.code_cache else {
|
||||
return None;
|
||||
};
|
||||
if !code_cache.enabled() {
|
||||
return None;
|
||||
}
|
||||
// deno version is already included in the root cache key
|
||||
let hash = FastInsecureHasher::new_without_deno_version()
|
||||
.write_hashable(source)
|
||||
.finish();
|
||||
let data = code_cache.get_sync(
|
||||
specifier,
|
||||
deno_runtime::code_cache::CodeCacheType::EsModule,
|
||||
hash,
|
||||
);
|
||||
Some(SourceCodeCacheInfo {
|
||||
hash,
|
||||
data: data.map(Cow::Owned),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -190,7 +227,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
|
@ -204,15 +241,17 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
alias,
|
||||
..
|
||||
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
|
||||
PackageJsonDepValue::Req(req) => {
|
||||
self.shared.node_resolver.resolve_req_with_sub_path(
|
||||
PackageJsonDepValue::Req(req) => self
|
||||
.shared
|
||||
.npm_req_resolver
|
||||
.resolve_req_with_sub_path(
|
||||
req,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
}
|
||||
.map_err(AnyError::from),
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
let pkg_folder = self
|
||||
.shared
|
||||
|
@ -225,7 +264,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
.resolve_package_subpath_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
|
@ -240,12 +279,12 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return self.shared.node_resolver.resolve_req_reference(
|
||||
return Ok(self.shared.npm_req_resolver.resolve_req_reference(
|
||||
&reference,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
);
|
||||
)?);
|
||||
}
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
|
@ -260,14 +299,14 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(&specifier)?
|
||||
.handle_if_in_node_modules(&specifier)
|
||||
.unwrap_or(specifier),
|
||||
)
|
||||
}
|
||||
Err(err)
|
||||
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
|
||||
{
|
||||
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
|
||||
let maybe_res = self.shared.npm_req_resolver.resolve_if_for_npm_pkg(
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
|
@ -325,14 +364,19 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
}
|
||||
|
||||
if self.shared.node_resolver.in_npm_package(original_specifier) {
|
||||
let npm_module_loader = self.shared.npm_module_loader.clone();
|
||||
let shared = self.shared.clone();
|
||||
let original_specifier = original_specifier.clone();
|
||||
let maybe_referrer = maybe_referrer.cloned();
|
||||
return deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code_source = npm_module_loader
|
||||
let code_source = shared
|
||||
.npm_module_loader
|
||||
.load(&original_specifier, maybe_referrer.as_ref())
|
||||
.await?;
|
||||
let code_cache_entry = shared.get_code_cache(
|
||||
&code_source.found_url,
|
||||
code_source.code.as_bytes(),
|
||||
);
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match code_source.media_type {
|
||||
MediaType::Json => ModuleType::Json,
|
||||
|
@ -341,7 +385,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
code_source.code,
|
||||
&original_specifier,
|
||||
&code_source.found_url,
|
||||
None,
|
||||
code_cache_entry,
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
|
@ -394,25 +438,30 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
ModuleSourceCode::String(FastString::from_static(source))
|
||||
}
|
||||
};
|
||||
let code_cache_entry = shared
|
||||
.get_code_cache(&module_specifier, module_source.as_bytes());
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
module_type,
|
||||
module_source,
|
||||
&original_specifier,
|
||||
&module_specifier,
|
||||
None,
|
||||
code_cache_entry,
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
)
|
||||
} else {
|
||||
let module_source = module_source.into_for_v8();
|
||||
let code_cache_entry = self
|
||||
.shared
|
||||
.get_code_cache(module_specifier, module_source.as_bytes());
|
||||
deno_core::ModuleLoadResponse::Sync(Ok(
|
||||
deno_core::ModuleSource::new_with_redirect(
|
||||
module_type,
|
||||
module_source,
|
||||
original_specifier,
|
||||
module_specifier,
|
||||
None,
|
||||
code_cache_entry,
|
||||
),
|
||||
))
|
||||
}
|
||||
|
@ -425,6 +474,23 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn code_cache_ready(
|
||||
&self,
|
||||
specifier: ModuleSpecifier,
|
||||
source_hash: u64,
|
||||
code_cache_data: &[u8],
|
||||
) -> LocalBoxFuture<'static, ()> {
|
||||
if let Some(code_cache) = &self.shared.code_cache {
|
||||
code_cache.set_sync(
|
||||
specifier,
|
||||
deno_runtime::code_cache::CodeCacheType::EsModule,
|
||||
source_hash,
|
||||
code_cache_data,
|
||||
);
|
||||
}
|
||||
std::future::ready(()).boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeRequireLoader for EmbeddedModuleLoader {
|
||||
|
@ -651,7 +717,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
let node_resolver = Arc::new(NodeResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
in_npm_pkg_checker.clone(),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
npm_resolver.clone().into_npm_pkg_folder_resolver(),
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
let cjs_tracker = Arc::new(CjsTracker::new(
|
||||
|
@ -664,12 +730,14 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
));
|
||||
let cache_db = Caches::new(deno_dir_provider.clone());
|
||||
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
|
||||
let cli_node_resolver = Arc::new(CliNodeResolver::new(
|
||||
fs.clone(),
|
||||
in_npm_pkg_checker.clone(),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let npm_req_resolver =
|
||||
Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions {
|
||||
byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(),
|
||||
fs: CliDenoResolverFs(fs.clone()),
|
||||
in_npm_pkg_checker: in_npm_pkg_checker.clone(),
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(),
|
||||
}));
|
||||
let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new(
|
||||
node_analysis_cache,
|
||||
cjs_tracker.clone(),
|
||||
|
@ -681,7 +749,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
in_npm_pkg_checker,
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
npm_resolver.clone().into_npm_pkg_folder_resolver(),
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
let workspace_resolver = {
|
||||
|
@ -733,20 +801,35 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
metadata.workspace_resolver.pkg_json_resolution,
|
||||
)
|
||||
};
|
||||
let code_cache = match metadata.code_cache_key {
|
||||
Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new(
|
||||
root_path.with_file_name(format!(
|
||||
"{}.cache",
|
||||
root_path.file_name().unwrap().to_string_lossy()
|
||||
)),
|
||||
code_cache_key,
|
||||
)) as Arc<dyn CliCodeCache>),
|
||||
None => {
|
||||
log::debug!("Code cache disabled.");
|
||||
None
|
||||
}
|
||||
};
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
cjs_tracker: cjs_tracker.clone(),
|
||||
fs: fs.clone(),
|
||||
modules,
|
||||
node_code_translator: node_code_translator.clone(),
|
||||
node_resolver: cli_node_resolver.clone(),
|
||||
node_resolver: node_resolver.clone(),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
cjs_tracker.clone(),
|
||||
fs.clone(),
|
||||
node_code_translator,
|
||||
)),
|
||||
code_cache: code_cache.clone(),
|
||||
npm_resolver: npm_resolver.clone(),
|
||||
workspace_resolver,
|
||||
npm_req_resolver,
|
||||
}),
|
||||
};
|
||||
|
||||
|
@ -785,8 +868,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
});
|
||||
let worker_factory = CliMainWorkerFactory::new(
|
||||
Arc::new(BlobStore::default()),
|
||||
// Code cache is not supported for standalone binary yet.
|
||||
None,
|
||||
code_cache,
|
||||
feature_checker,
|
||||
fs,
|
||||
None,
|
||||
|
|
|
@ -51,7 +51,8 @@ pub struct VfsBuilder {
|
|||
|
||||
impl VfsBuilder {
|
||||
pub fn new(root_path: PathBuf) -> Result<Self, AnyError> {
|
||||
let root_path = canonicalize_path(&root_path)?;
|
||||
let root_path = canonicalize_path(&root_path)
|
||||
.with_context(|| format!("Canonicalizing {}", root_path.display()))?;
|
||||
log::debug!("Building vfs with root '{}'", root_path.display());
|
||||
Ok(Self {
|
||||
root_dir: VirtualDirectory {
|
||||
|
@ -633,7 +634,7 @@ impl FileBackedVfsFile {
|
|||
}
|
||||
|
||||
fn read_to_buf(&self, buf: &mut [u8]) -> FsResult<usize> {
|
||||
let pos = {
|
||||
let read_pos = {
|
||||
let mut pos = self.pos.lock();
|
||||
let read_pos = *pos;
|
||||
// advance the position due to the read
|
||||
|
@ -642,12 +643,12 @@ impl FileBackedVfsFile {
|
|||
};
|
||||
self
|
||||
.vfs
|
||||
.read_file(&self.file, pos, buf)
|
||||
.read_file(&self.file, read_pos, buf)
|
||||
.map_err(|err| err.into())
|
||||
}
|
||||
|
||||
fn read_to_end(&self) -> FsResult<Vec<u8>> {
|
||||
let pos = {
|
||||
let read_pos = {
|
||||
let mut pos = self.pos.lock();
|
||||
let read_pos = *pos;
|
||||
// todo(dsherret): should this always set it to the end of the file?
|
||||
|
@ -657,12 +658,12 @@ impl FileBackedVfsFile {
|
|||
}
|
||||
read_pos
|
||||
};
|
||||
if pos > self.file.len {
|
||||
if read_pos > self.file.len {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let size = (self.file.len - pos) as usize;
|
||||
let size = (self.file.len - read_pos) as usize;
|
||||
let mut buf = vec![0; size];
|
||||
self.vfs.read_file(&self.file, pos, &mut buf)?;
|
||||
self.vfs.read_file(&self.file, read_pos, &mut buf)?;
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
@ -892,8 +893,9 @@ impl FileBackedVfs {
|
|||
buf: &mut [u8],
|
||||
) -> std::io::Result<usize> {
|
||||
let read_range = self.get_read_range(file, pos, buf.len() as u64)?;
|
||||
buf.copy_from_slice(&self.vfs_data[read_range]);
|
||||
Ok(buf.len())
|
||||
let read_len = read_range.len();
|
||||
buf[..read_len].copy_from_slice(&self.vfs_data[read_range]);
|
||||
Ok(read_len)
|
||||
}
|
||||
|
||||
fn get_read_range(
|
||||
|
@ -902,15 +904,15 @@ impl FileBackedVfs {
|
|||
pos: u64,
|
||||
len: u64,
|
||||
) -> std::io::Result<Range<usize>> {
|
||||
let data = &self.vfs_data;
|
||||
let start = self.fs_root.start_file_offset + file.offset + pos;
|
||||
let end = start + len;
|
||||
if end > data.len() as u64 {
|
||||
if pos > file.len {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"unexpected EOF",
|
||||
));
|
||||
}
|
||||
let file_offset = self.fs_root.start_file_offset + file.offset;
|
||||
let start = file_offset + pos;
|
||||
let end = file_offset + std::cmp::min(pos + len, file.len);
|
||||
Ok(start as usize..end as usize)
|
||||
}
|
||||
|
||||
|
|
|
@ -483,10 +483,23 @@ fn resolve_execution_path_from_npx_shim(
|
|||
static SCRIPT_PATH_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
|
||||
|
||||
if text.starts_with("#!/usr/bin/env node") {
|
||||
let maybe_first_line = {
|
||||
let index = text.find("\n")?;
|
||||
Some(&text[0..index])
|
||||
};
|
||||
|
||||
if let Some(first_line) = maybe_first_line {
|
||||
// NOTE(bartlomieju): this is not perfect, but handle two most common scenarios
|
||||
// where Node is run without any args. If there are args then we use `NodeCommand`
|
||||
// struct.
|
||||
if first_line == "#!/usr/bin/env node"
|
||||
|| first_line == "#!/usr/bin/env -S node"
|
||||
{
|
||||
// launch this file itself because it's a JS file
|
||||
Some(file_path)
|
||||
} else {
|
||||
return Some(file_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Search for...
|
||||
// > "$basedir/../next/dist/bin/next" "$@"
|
||||
// ...which is what it will look like on Windows
|
||||
|
@ -496,7 +509,6 @@ fn resolve_execution_path_from_npx_shim(
|
|||
.map(|relative_path| {
|
||||
file_path.parent().unwrap().join(relative_path.as_str())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_managed_npm_commands(
|
||||
|
@ -564,6 +576,16 @@ mod test {
|
|||
let unix_shim = r#"#!/usr/bin/env node
|
||||
"use strict";
|
||||
console.log('Hi!');
|
||||
"#;
|
||||
let path = PathBuf::from("/node_modules/.bin/example");
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
|
||||
path
|
||||
);
|
||||
// example shim on unix
|
||||
let unix_shim = r#"#!/usr/bin/env -S node
|
||||
"use strict";
|
||||
console.log('Hi!');
|
||||
"#;
|
||||
let path = PathBuf::from("/node_modules/.bin/example");
|
||||
assert_eq!(
|
||||
|
|
|
@ -486,6 +486,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
),
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
let bench_flags = bench_flags.clone();
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
|
|
|
@ -380,6 +380,11 @@ fn get_check_hash(
|
|||
hasher.write_str(module.specifier.as_str());
|
||||
hasher.write_str(&module.source);
|
||||
}
|
||||
Module::Wasm(module) => {
|
||||
has_file_to_type_check = true;
|
||||
hasher.write_str(module.specifier.as_str());
|
||||
hasher.write_str(&module.source_dts);
|
||||
}
|
||||
Module::External(module) => {
|
||||
hasher.write_str(module.specifier.as_str());
|
||||
}
|
||||
|
@ -437,6 +442,7 @@ fn get_tsc_roots(
|
|||
| MediaType::SourceMap
|
||||
| MediaType::Unknown => None,
|
||||
},
|
||||
Module::Wasm(module) => Some((module.specifier.clone(), MediaType::Dmts)),
|
||||
Module::External(_)
|
||||
| Module::Node(_)
|
||||
| Module::Npm(_)
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::factory::CliFactory;
|
|||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
|
||||
use crate::standalone::is_standalone_binary;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
|
@ -31,15 +32,12 @@ pub async fn compile(
|
|||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let binary_writer = factory.create_compile_binary_writer().await?;
|
||||
let http_client = factory.http_client_provider();
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
let module_roots = {
|
||||
let mut vec = Vec::with_capacity(compile_flags.include.len() + 1);
|
||||
vec.push(module_specifier.clone());
|
||||
for side_module in &compile_flags.include {
|
||||
vec.push(resolve_url_or_path(side_module, cli_options.initial_cwd())?);
|
||||
}
|
||||
vec
|
||||
};
|
||||
let entrypoint = cli_options.resolve_main_module()?;
|
||||
let (module_roots, include_files) = get_module_roots_and_include_files(
|
||||
entrypoint,
|
||||
&compile_flags,
|
||||
cli_options.initial_cwd(),
|
||||
)?;
|
||||
|
||||
// this is not supported, so show a warning about it, but don't error in order
|
||||
// to allow someone to still run `deno compile` when this is in a deno.json
|
||||
|
@ -82,18 +80,22 @@ pub async fn compile(
|
|||
check_warn_tsconfig(&ts_config_for_emit);
|
||||
let root_dir_url = resolve_root_dir_from_specifiers(
|
||||
cli_options.workspace().root_dir(),
|
||||
graph.specifiers().map(|(s, _)| s).chain(
|
||||
graph
|
||||
.specifiers()
|
||||
.map(|(s, _)| s)
|
||||
.chain(
|
||||
cli_options
|
||||
.node_modules_dir_path()
|
||||
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
|
||||
.iter(),
|
||||
),
|
||||
)
|
||||
.chain(include_files.iter()),
|
||||
);
|
||||
log::debug!("Binary root dir: {}", root_dir_url);
|
||||
log::info!(
|
||||
"{} {} to {}",
|
||||
colors::green("Compile"),
|
||||
module_specifier.to_string(),
|
||||
entrypoint,
|
||||
output_path.display(),
|
||||
);
|
||||
validate_output_path(&output_path)?;
|
||||
|
@ -118,9 +120,9 @@ pub async fn compile(
|
|||
file,
|
||||
&graph,
|
||||
StandaloneRelativeFileBaseUrl::from(&root_dir_url),
|
||||
module_specifier,
|
||||
entrypoint,
|
||||
&include_files,
|
||||
&compile_flags,
|
||||
cli_options,
|
||||
)
|
||||
.await
|
||||
.with_context(|| {
|
||||
|
@ -212,6 +214,48 @@ fn validate_output_path(output_path: &Path) -> Result<(), AnyError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn get_module_roots_and_include_files(
|
||||
entrypoint: &ModuleSpecifier,
|
||||
compile_flags: &CompileFlags,
|
||||
initial_cwd: &Path,
|
||||
) -> Result<(Vec<ModuleSpecifier>, Vec<ModuleSpecifier>), AnyError> {
|
||||
fn is_module_graph_module(url: &ModuleSpecifier) -> bool {
|
||||
if url.scheme() != "file" {
|
||||
return true;
|
||||
}
|
||||
let media_type = MediaType::from_specifier(url);
|
||||
match media_type {
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::Mjs
|
||||
| MediaType::Cjs
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx
|
||||
| MediaType::Json
|
||||
| MediaType::Wasm => true,
|
||||
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => false,
|
||||
}
|
||||
}
|
||||
|
||||
let mut module_roots = Vec::with_capacity(compile_flags.include.len() + 1);
|
||||
let mut include_files = Vec::with_capacity(compile_flags.include.len());
|
||||
module_roots.push(entrypoint.clone());
|
||||
for side_module in &compile_flags.include {
|
||||
let url = resolve_url_or_path(side_module, initial_cwd)?;
|
||||
if is_module_graph_module(&url) {
|
||||
module_roots.push(url);
|
||||
} else {
|
||||
include_files.push(url);
|
||||
}
|
||||
}
|
||||
Ok((module_roots, include_files))
|
||||
}
|
||||
|
||||
async fn resolve_compile_executable_output_path(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
compile_flags: &CompileFlags,
|
||||
|
|
200
cli/tools/doc.rs
200
cli/tools/doc.rs
|
@ -21,6 +21,8 @@ use deno_core::error::AnyError;
|
|||
use deno_core::serde_json;
|
||||
use deno_doc as doc;
|
||||
use deno_doc::html::UrlResolveKind;
|
||||
use deno_doc::html::UsageComposer;
|
||||
use deno_doc::html::UsageComposerEntry;
|
||||
use deno_graph::source::NullFileSystem;
|
||||
use deno_graph::EsParser;
|
||||
use deno_graph::GraphKind;
|
||||
|
@ -35,6 +37,9 @@ use std::sync::Arc;
|
|||
|
||||
const JSON_SCHEMA_VERSION: u8 = 1;
|
||||
|
||||
const PRISM_CSS: &str = include_str!("./doc/prism.css");
|
||||
const PRISM_JS: &str = include_str!("./doc/prism.js");
|
||||
|
||||
async fn generate_doc_nodes_for_builtin_types(
|
||||
doc_flags: DocFlags,
|
||||
parser: &dyn EsParser,
|
||||
|
@ -312,10 +317,6 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
None
|
||||
}
|
||||
|
||||
fn resolve_usage(&self, current_resolve: UrlResolveKind) -> Option<String> {
|
||||
current_resolve.get_file().map(|file| file.path.to_string())
|
||||
}
|
||||
|
||||
fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> {
|
||||
Some(location.filename.to_string())
|
||||
}
|
||||
|
@ -350,105 +351,30 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
}
|
||||
}
|
||||
|
||||
struct DenoDocResolver(bool);
|
||||
struct DocComposer;
|
||||
|
||||
impl deno_doc::html::HrefResolver for DenoDocResolver {
|
||||
fn resolve_path(
|
||||
impl UsageComposer for DocComposer {
|
||||
fn is_single_mode(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn compose(
|
||||
&self,
|
||||
current: UrlResolveKind,
|
||||
target: UrlResolveKind,
|
||||
) -> String {
|
||||
let path = deno_doc::html::href_path_resolve(current, target);
|
||||
if self.0 {
|
||||
if let Some(path) = path
|
||||
.strip_suffix("index.html")
|
||||
.or_else(|| path.strip_suffix(".html"))
|
||||
{
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_import_href(
|
||||
&self,
|
||||
_symbol: &[String],
|
||||
_src: &str,
|
||||
) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_usage(&self, _current_resolve: UrlResolveKind) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_external_jsdoc_module(
|
||||
&self,
|
||||
_module: &str,
|
||||
_symbol: Option<&str>,
|
||||
) -> Option<(String, String)> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
struct NodeDocResolver(bool);
|
||||
|
||||
impl deno_doc::html::HrefResolver for NodeDocResolver {
|
||||
fn resolve_path(
|
||||
&self,
|
||||
current: UrlResolveKind,
|
||||
target: UrlResolveKind,
|
||||
) -> String {
|
||||
let path = deno_doc::html::href_path_resolve(current, target);
|
||||
if self.0 {
|
||||
if let Some(path) = path
|
||||
.strip_suffix("index.html")
|
||||
.or_else(|| path.strip_suffix(".html"))
|
||||
{
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn resolve_global_symbol(&self, _symbol: &[String]) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_import_href(
|
||||
&self,
|
||||
_symbol: &[String],
|
||||
_src: &str,
|
||||
) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_usage(&self, current_resolve: UrlResolveKind) -> Option<String> {
|
||||
current_resolve: UrlResolveKind,
|
||||
usage_to_md: deno_doc::html::UsageToMd,
|
||||
) -> IndexMap<UsageComposerEntry, String> {
|
||||
current_resolve
|
||||
.get_file()
|
||||
.map(|file| format!("node:{}", file.path))
|
||||
}
|
||||
|
||||
fn resolve_source(&self, _location: &deno_doc::Location) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_external_jsdoc_module(
|
||||
&self,
|
||||
_module: &str,
|
||||
_symbol: Option<&str>,
|
||||
) -> Option<(String, String)> {
|
||||
None
|
||||
.map(|current_file| {
|
||||
IndexMap::from([(
|
||||
UsageComposerEntry {
|
||||
name: "".to_string(),
|
||||
icon: None,
|
||||
},
|
||||
usage_to_md(current_file.path.as_str(), None),
|
||||
)])
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -461,30 +387,10 @@ fn generate_docs_directory(
|
|||
let cwd = std::env::current_dir().context("Failed to get CWD")?;
|
||||
let output_dir_resolved = cwd.join(&html_options.output);
|
||||
|
||||
let internal_env = std::env::var("DENO_INTERNAL_HTML_DOCS").ok();
|
||||
|
||||
let href_resolver: Rc<dyn deno_doc::html::HrefResolver> = if internal_env
|
||||
.as_ref()
|
||||
.is_some_and(|internal_html_docs| internal_html_docs == "node")
|
||||
{
|
||||
Rc::new(NodeDocResolver(html_options.strip_trailing_html))
|
||||
} else if internal_env
|
||||
.as_ref()
|
||||
.is_some_and(|internal_html_docs| internal_html_docs == "deno")
|
||||
|| deno_ns.is_empty()
|
||||
{
|
||||
Rc::new(DenoDocResolver(html_options.strip_trailing_html))
|
||||
} else {
|
||||
Rc::new(DocResolver {
|
||||
deno_ns,
|
||||
strip_trailing_html: html_options.strip_trailing_html,
|
||||
})
|
||||
};
|
||||
|
||||
let category_docs =
|
||||
if let Some(category_docs_path) = &html_options.category_docs_path {
|
||||
let content = std::fs::read(category_docs_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
Some(serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -493,7 +399,7 @@ fn generate_docs_directory(
|
|||
&html_options.symbol_redirect_map_path
|
||||
{
|
||||
let content = std::fs::read(symbol_redirect_map_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
Some(serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -502,7 +408,7 @@ fn generate_docs_directory(
|
|||
&html_options.default_symbol_map_path
|
||||
{
|
||||
let content = std::fs::read(default_symbol_map_path)?;
|
||||
Some(deno_core::serde_json::from_slice(&content)?)
|
||||
Some(serde_json::from_slice(&content)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -511,17 +417,59 @@ fn generate_docs_directory(
|
|||
package_name: html_options.name.clone(),
|
||||
main_entrypoint: None,
|
||||
rewrite_map,
|
||||
href_resolver,
|
||||
usage_composer: None,
|
||||
href_resolver: Rc::new(DocResolver {
|
||||
deno_ns,
|
||||
strip_trailing_html: html_options.strip_trailing_html,
|
||||
}),
|
||||
usage_composer: Rc::new(DocComposer),
|
||||
category_docs,
|
||||
disable_search: internal_env.is_some(),
|
||||
disable_search: false,
|
||||
symbol_redirect_map,
|
||||
default_symbol_map,
|
||||
markdown_renderer: deno_doc::html::comrak::create_renderer(
|
||||
None,
|
||||
Some(Box::new(|ammonia| {
|
||||
ammonia.add_allowed_classes(
|
||||
"code",
|
||||
&[
|
||||
"language-ts",
|
||||
"language-tsx",
|
||||
"language-typescript",
|
||||
"language-js",
|
||||
"language-jsx",
|
||||
"language-javascript",
|
||||
"language-bash",
|
||||
"language-shell",
|
||||
"language-md",
|
||||
"language-markdown",
|
||||
"language-rs",
|
||||
"language-rust",
|
||||
"language-html",
|
||||
"language-xml",
|
||||
"language-css",
|
||||
"language-json",
|
||||
"language-regex",
|
||||
"language-svg",
|
||||
],
|
||||
);
|
||||
})),
|
||||
None,
|
||||
),
|
||||
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
|
||||
head_inject: Some(Rc::new(|root| {
|
||||
format!(
|
||||
r#"<link href="{root}{}" rel="stylesheet" /><link href="{root}prism.css" rel="stylesheet" /><script src="{root}prism.js"></script>"#,
|
||||
deno_doc::html::comrak::COMRAK_STYLESHEET_FILENAME
|
||||
)
|
||||
})),
|
||||
};
|
||||
|
||||
let files = deno_doc::html::generate(options, doc_nodes_by_url)
|
||||
let mut files = deno_doc::html::generate(options, doc_nodes_by_url)
|
||||
.context("Failed to generate HTML documentation")?;
|
||||
|
||||
files.insert("prism.js".to_string(), PRISM_JS.to_string());
|
||||
files.insert("prism.css".to_string(), PRISM_CSS.to_string());
|
||||
|
||||
let path = &output_dir_resolved;
|
||||
let _ = std::fs::remove_dir_all(path);
|
||||
std::fs::create_dir(path)
|
||||
|
|
3
cli/tools/doc/prism.css
Normal file
3
cli/tools/doc/prism.css
Normal file
|
@ -0,0 +1,3 @@
|
|||
/* PrismJS 1.29.0
|
||||
https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript+bash+json+markdown+regex+rust+typescript */
|
||||
code[class*=language-],pre[class*=language-]{color:#000;background:0 0;text-shadow:0 1px #fff;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}code[class*=language-] ::-moz-selection,code[class*=language-]::-moz-selection,pre[class*=language-] ::-moz-selection,pre[class*=language-]::-moz-selection{text-shadow:none;background:#b3d4fc}code[class*=language-] ::selection,code[class*=language-]::selection,pre[class*=language-] ::selection,pre[class*=language-]::selection{text-shadow:none;background:#b3d4fc}@media print{code[class*=language-],pre[class*=language-]{text-shadow:none}}pre[class*=language-]{overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#f5f2f0}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#708090}.token.punctuation{color:#999}.token.namespace{opacity:.7}.token.boolean,.token.constant,.token.deleted,.token.number,.token.property,.token.symbol,.token.tag{color:#905}.token.attr-name,.token.builtin,.token.char,.token.inserted,.token.selector,.token.string{color:#690}.language-css .token.string,.style .token.string,.token.entity,.token.operator,.token.url{color:#9a6e3a;background:hsla(0,0%,100%,.5)}.token.atrule,.token.attr-value,.token.keyword{color:#07a}.token.class-name,.token.function{color:#dd4a68}.token.important,.token.regex,.token.variable{color:#e90}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}
|
15
cli/tools/doc/prism.js
Normal file
15
cli/tools/doc/prism.js
Normal file
File diff suppressed because one or more lines are too long
|
@ -83,6 +83,7 @@ pub async fn format(
|
|||
file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen),
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
let fmt_flags = fmt_flags.clone();
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
|
@ -227,6 +228,7 @@ fn collect_fmt_files(
|
|||
})
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.use_gitignore()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
|
||||
}
|
||||
|
@ -270,6 +272,7 @@ fn format_markdown(
|
|||
| "njk"
|
||||
| "yml"
|
||||
| "yaml"
|
||||
| "sql"
|
||||
) {
|
||||
// It's important to tell dprint proper file extension, otherwise
|
||||
// it might parse the file twice.
|
||||
|
@ -299,6 +302,13 @@ fn format_markdown(
|
|||
}
|
||||
}
|
||||
"yml" | "yaml" => format_yaml(text, fmt_options),
|
||||
"sql" => {
|
||||
if unstable_options.sql {
|
||||
format_sql(text, fmt_options)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let mut codeblock_config =
|
||||
get_resolved_typescript_config(fmt_options);
|
||||
|
@ -501,7 +511,52 @@ pub fn format_html(
|
|||
})
|
||||
}
|
||||
|
||||
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, or IPYNB file.
|
||||
pub fn format_sql(
|
||||
file_text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let ignore_file = file_text
|
||||
.lines()
|
||||
.take_while(|line| line.starts_with("--"))
|
||||
.any(|line| {
|
||||
line
|
||||
.strip_prefix("--")
|
||||
.unwrap()
|
||||
.trim()
|
||||
.starts_with("deno-fmt-ignore-file")
|
||||
});
|
||||
|
||||
if ignore_file {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut formatted_str = sqlformat::format(
|
||||
file_text,
|
||||
&sqlformat::QueryParams::None,
|
||||
&sqlformat::FormatOptions {
|
||||
ignore_case_convert: None,
|
||||
indent: if fmt_options.use_tabs.unwrap_or_default() {
|
||||
sqlformat::Indent::Tabs
|
||||
} else {
|
||||
sqlformat::Indent::Spaces(fmt_options.indent_width.unwrap_or(2))
|
||||
},
|
||||
// leave one blank line between queries.
|
||||
lines_between_queries: 2,
|
||||
uppercase: Some(true),
|
||||
},
|
||||
);
|
||||
|
||||
// Add single new line to the end of file.
|
||||
formatted_str.push('\n');
|
||||
|
||||
Ok(if formatted_str == file_text {
|
||||
None
|
||||
} else {
|
||||
Some(formatted_str)
|
||||
})
|
||||
}
|
||||
|
||||
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, IPYNB or SQL file.
|
||||
pub fn format_file(
|
||||
file_path: &Path,
|
||||
file_text: &str,
|
||||
|
@ -536,6 +591,13 @@ pub fn format_file(
|
|||
format_file(file_path, &file_text, fmt_options, unstable_options, None)
|
||||
},
|
||||
),
|
||||
"sql" => {
|
||||
if unstable_options.sql {
|
||||
format_sql(file_text, fmt_options)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let config = get_resolved_typescript_config(fmt_options);
|
||||
dprint_plugin_typescript::format_text(
|
||||
|
@ -1207,6 +1269,7 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
|
|||
| "yml"
|
||||
| "yaml"
|
||||
| "ipynb"
|
||||
| "sql"
|
||||
)
|
||||
})
|
||||
}
|
||||
|
@ -1267,6 +1330,11 @@ mod test {
|
|||
assert!(is_supported_ext_fmt(Path::new("foo.yaml")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.YaML")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.ipynb")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.sql")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.Sql")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.sQl")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.sqL")));
|
||||
assert!(is_supported_ext_fmt(Path::new("foo.SQL")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -126,6 +126,7 @@ fn print_cache_info(
|
|||
let registry_cache = dir.registries_folder_path();
|
||||
let mut origin_dir = dir.origin_data_folder_path();
|
||||
let deno_dir = dir.root_path_for_display().to_string();
|
||||
let web_cache_dir = crate::worker::get_cache_storage_dir();
|
||||
|
||||
if let Some(location) = &location {
|
||||
origin_dir =
|
||||
|
@ -143,6 +144,7 @@ fn print_cache_info(
|
|||
"typescriptCache": typescript_cache,
|
||||
"registryCache": registry_cache,
|
||||
"originStorage": origin_dir,
|
||||
"webCacheStorage": web_cache_dir,
|
||||
});
|
||||
|
||||
if location.is_some() {
|
||||
|
@ -177,6 +179,11 @@ fn print_cache_info(
|
|||
colors::bold("Origin storage:"),
|
||||
origin_dir.display()
|
||||
);
|
||||
println!(
|
||||
"{} {}",
|
||||
colors::bold("Web cache storage:"),
|
||||
web_cache_dir.display()
|
||||
);
|
||||
if location.is_some() {
|
||||
println!(
|
||||
"{} {}",
|
||||
|
@ -446,6 +453,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
let maybe_cache_info = match root {
|
||||
Module::Js(module) => module.maybe_cache_info.as_ref(),
|
||||
Module::Json(module) => module.maybe_cache_info.as_ref(),
|
||||
Module::Wasm(module) => module.maybe_cache_info.as_ref(),
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
|
||||
};
|
||||
if let Some(cache_info) = maybe_cache_info {
|
||||
|
@ -468,6 +476,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
let size = match m {
|
||||
Module::Js(module) => module.size(),
|
||||
Module::Json(module) => module.size(),
|
||||
Module::Wasm(module) => module.size(),
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => 0,
|
||||
};
|
||||
size as f64
|
||||
|
@ -567,6 +576,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
Specifier(_) => match module {
|
||||
Module::Js(module) => Some(module.size() as u64),
|
||||
Module::Json(module) => Some(module.size() as u64),
|
||||
Module::Wasm(module) => Some(module.size() as u64),
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
|
||||
},
|
||||
};
|
||||
|
@ -580,8 +590,8 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
Package(package) => {
|
||||
tree_node.children.extend(self.build_npm_deps(package));
|
||||
}
|
||||
Specifier(_) => {
|
||||
if let Some(module) = module.js() {
|
||||
Specifier(_) => match module {
|
||||
Module::Js(module) => {
|
||||
if let Some(types_dep) = &module.maybe_types_dependency {
|
||||
if let Some(child) =
|
||||
self.build_resolved_info(&types_dep.dependency, true)
|
||||
|
@ -593,8 +603,17 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
tree_node.children.extend(self.build_dep_info(dep));
|
||||
}
|
||||
}
|
||||
Module::Wasm(module) => {
|
||||
for dep in module.dependencies.values() {
|
||||
tree_node.children.extend(self.build_dep_info(dep));
|
||||
}
|
||||
}
|
||||
Module::Json(_)
|
||||
| Module::Npm(_)
|
||||
| Module::Node(_)
|
||||
| Module::External(_) => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
tree_node
|
||||
}
|
||||
|
@ -658,7 +677,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
};
|
||||
self.build_error_msg(specifier, message.as_ref())
|
||||
}
|
||||
ModuleError::ParseErr(_, _) => {
|
||||
ModuleError::ParseErr(_, _) | ModuleError::WasmParseErr(_, _) => {
|
||||
self.build_error_msg(specifier, "(parsing error)")
|
||||
}
|
||||
ModuleError::UnsupportedImportAttributeType { .. } => {
|
||||
|
|
|
@ -1,15 +1,28 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InitFlags;
|
||||
use crate::args::PackagesAllowedScripts;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::RunFlags;
|
||||
use crate::colors;
|
||||
use color_print::cformat;
|
||||
use color_print::cstr;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use log::info;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
|
||||
pub async fn init_project(init_flags: InitFlags) -> Result<i32, AnyError> {
|
||||
if let Some(package) = &init_flags.package {
|
||||
return init_npm(package, init_flags.package_args).await;
|
||||
}
|
||||
|
||||
let cwd =
|
||||
std::env::current_dir().context("Can't read current working directory.")?;
|
||||
let dir = if let Some(dir) = &init_flags.dir {
|
||||
|
@ -235,7 +248,58 @@ Deno.test(function addTest() {
|
|||
info!(" {}", colors::gray("# Run the tests"));
|
||||
info!(" deno test");
|
||||
}
|
||||
Ok(())
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
|
||||
let script_name = format!("npm:create-{}", name);
|
||||
|
||||
fn print_manual_usage(script_name: &str, args: &[String]) -> i32 {
|
||||
log::info!("{}", cformat!("You can initialize project manually by running <u>deno run {} {}</> and applying desired permissions.", script_name, args.join(" ")));
|
||||
1
|
||||
}
|
||||
|
||||
if std::io::stdin().is_terminal() {
|
||||
log::info!(
|
||||
cstr!("⚠️ Do you fully trust <y>{}</> package? Deno will invoke code from it with all permissions. Do you want to continue? <p(245)>[y/n]</>"),
|
||||
script_name
|
||||
);
|
||||
loop {
|
||||
let _ = std::io::stdout().write(b"> ")?;
|
||||
std::io::stdout().flush()?;
|
||||
let mut answer = String::new();
|
||||
if std::io::stdin().read_line(&mut answer).is_ok() {
|
||||
let answer = answer.trim().to_ascii_lowercase();
|
||||
if answer != "y" {
|
||||
return Ok(print_manual_usage(&script_name, &args));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Ok(print_manual_usage(&script_name, &args));
|
||||
}
|
||||
|
||||
let new_flags = Flags {
|
||||
permissions: PermissionFlags {
|
||||
allow_all: true,
|
||||
..Default::default()
|
||||
},
|
||||
allow_scripts: PackagesAllowedScripts::All,
|
||||
argv: args,
|
||||
subcommand: DenoSubcommand::Run(RunFlags {
|
||||
script: script_name,
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
crate::tools::run::run_script(
|
||||
WorkerExecutionMode::Run,
|
||||
new_flags.into(),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
fn create_json_file(
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use crate::args::resolve_no_prompt;
|
||||
use crate::args::AddFlags;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::ConfigFlag;
|
||||
use crate::args::Flags;
|
||||
use crate::args::InstallFlags;
|
||||
|
@ -13,8 +14,11 @@ use crate::args::TypeCheckMode;
|
|||
use crate::args::UninstallFlags;
|
||||
use crate::args::UninstallKind;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::jsr::JsrFetchResolver;
|
||||
use crate::npm::NpmFetchResolver;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -354,12 +358,54 @@ async fn install_global(
|
|||
) -> Result<(), AnyError> {
|
||||
// ensure the module is cached
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
|
||||
let cli_options = factory.cli_options()?;
|
||||
let http_client = factory.http_client_provider();
|
||||
let deps_http_cache = factory.global_http_cache()?;
|
||||
let mut deps_file_fetcher = FileFetcher::new(
|
||||
deps_http_cache.clone(),
|
||||
CacheSetting::ReloadAll,
|
||||
true,
|
||||
http_client.clone(),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
||||
let npmrc = factory.cli_options().unwrap().npmrc();
|
||||
|
||||
deps_file_fetcher.set_download_log_level(log::Level::Trace);
|
||||
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
||||
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
||||
let npm_resolver = Arc::new(NpmFetchResolver::new(
|
||||
deps_file_fetcher.clone(),
|
||||
npmrc.clone(),
|
||||
));
|
||||
|
||||
let entry_text = install_flags_global.module_url.as_str();
|
||||
if !cli_options.initial_cwd().join(entry_text).exists() {
|
||||
// check for package requirement missing prefix
|
||||
if let Ok(Err(package_req)) =
|
||||
super::registry::AddRmPackageReq::parse(entry_text)
|
||||
{
|
||||
if jsr_resolver.req_to_nv(&package_req).await.is_some() {
|
||||
bail!(
|
||||
"{entry_text} is missing a prefix. Did you mean `{}`?",
|
||||
crate::colors::yellow(format!("deno install -g jsr:{package_req}"))
|
||||
);
|
||||
} else if npm_resolver.req_to_nv(&package_req).await.is_some() {
|
||||
bail!(
|
||||
"{entry_text} is missing a prefix. Did you mean `{}`?",
|
||||
crate::colors::yellow(format!("deno install -g npm:{package_req}"))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
factory
|
||||
.main_module_graph_container()
|
||||
.await?
|
||||
.load_and_type_check_files(&[install_flags_global.module_url.clone()])
|
||||
.await?;
|
||||
let http_client = factory.http_client_provider();
|
||||
|
||||
// create the install shim
|
||||
create_install_shim(http_client, &flags, install_flags_global).await
|
||||
|
|
|
@ -80,6 +80,7 @@ pub async fn lint(
|
|||
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
let lint_flags = lint_flags.clone();
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
|
@ -191,7 +192,7 @@ pub async fn lint(
|
|||
linter.finish()
|
||||
};
|
||||
if !success {
|
||||
std::process::exit(1);
|
||||
deno_runtime::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -435,6 +436,7 @@ fn collect_lint_files(
|
|||
})
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.use_gitignore()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
|
||||
}
|
||||
|
|
|
@ -175,6 +175,7 @@ struct JsonLintReporter {
|
|||
version: u8,
|
||||
diagnostics: Vec<JsonLintDiagnostic>,
|
||||
errors: Vec<LintError>,
|
||||
checked_files: Vec<String>,
|
||||
}
|
||||
|
||||
impl JsonLintReporter {
|
||||
|
@ -183,6 +184,7 @@ impl JsonLintReporter {
|
|||
version: JSON_SCHEMA_VERSION,
|
||||
diagnostics: Vec::new(),
|
||||
errors: Vec::new(),
|
||||
checked_files: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -209,6 +211,17 @@ impl LintReporter for JsonLintReporter {
|
|||
code: d.code().to_string(),
|
||||
hint: d.hint().map(|h| h.to_string()),
|
||||
});
|
||||
|
||||
let file_path = d
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
if !self.checked_files.contains(&file_path) {
|
||||
self.checked_files.push(file_path);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
|
||||
|
@ -216,10 +229,15 @@ impl LintReporter for JsonLintReporter {
|
|||
file_path: file_path.to_string(),
|
||||
message: err.to_string(),
|
||||
});
|
||||
|
||||
if !self.checked_files.contains(&file_path.to_string()) {
|
||||
self.checked_files.push(file_path.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
fn close(&mut self, _check_count: usize) {
|
||||
sort_diagnostics(&mut self.diagnostics);
|
||||
self.checked_files.sort();
|
||||
let json = serde_json::to_string_pretty(&self);
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
|
|
|
@ -12,6 +12,7 @@ use std::sync::Arc;
|
|||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::Workspace;
|
||||
|
@ -67,8 +68,10 @@ use auth::get_auth_method;
|
|||
use auth::AuthMethod;
|
||||
pub use pm::add;
|
||||
pub use pm::cache_top_level_deps;
|
||||
pub use pm::outdated;
|
||||
pub use pm::remove;
|
||||
pub use pm::AddCommandName;
|
||||
pub use pm::AddRmPackageReq;
|
||||
use publish_order::PublishOrderGraph;
|
||||
use unfurl::SpecifierUnfurler;
|
||||
|
||||
|
@ -89,13 +92,14 @@ pub async fn publish(
|
|||
|
||||
let cli_options = cli_factory.cli_options()?;
|
||||
let directory_path = cli_options.initial_cwd();
|
||||
let publish_configs = cli_options.start_dir.jsr_packages_for_publish();
|
||||
let mut publish_configs = cli_options.start_dir.jsr_packages_for_publish();
|
||||
if publish_configs.is_empty() {
|
||||
match cli_options.start_dir.maybe_deno_json() {
|
||||
Some(deno_json) => {
|
||||
debug_assert!(!deno_json.is_package());
|
||||
error_missing_exports_field(deno_json)?;
|
||||
bail!(
|
||||
"Missing 'name', 'version' and 'exports' field in '{}'.",
|
||||
"Missing 'name' or 'exports' field in '{}'.",
|
||||
deno_json.specifier
|
||||
);
|
||||
}
|
||||
|
@ -107,6 +111,18 @@ pub async fn publish(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(version) = &publish_flags.set_version {
|
||||
if publish_configs.len() > 1 {
|
||||
bail!("Cannot use --set-version when publishing a workspace. Change your cwd to an individual package instead.");
|
||||
}
|
||||
if let Some(publish_config) = publish_configs.get_mut(0) {
|
||||
let mut config_file = publish_config.config_file.as_ref().clone();
|
||||
config_file.json.version = Some(version.clone());
|
||||
publish_config.config_file = Arc::new(config_file);
|
||||
}
|
||||
}
|
||||
|
||||
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
||||
if cli_options.unstable_sloppy_imports() {
|
||||
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
||||
|
@ -403,43 +419,15 @@ impl PublishPreparer {
|
|||
graph: Arc<deno_graph::ModuleGraph>,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
||||
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
||||
["mod.ts", "mod.js", "index.ts", "index.js"];
|
||||
|
||||
let deno_json = &package.config_file;
|
||||
let config_path = deno_json.specifier.to_file_path().unwrap();
|
||||
let root_dir = config_path.parent().unwrap().to_path_buf();
|
||||
let Some(version) = deno_json.json.version.clone() else {
|
||||
bail!("{} is missing 'version' field", deno_json.specifier);
|
||||
};
|
||||
if deno_json.json.exports.is_none() {
|
||||
let mut suggested_entrypoint = None;
|
||||
|
||||
for entrypoint in SUGGESTED_ENTRYPOINTS {
|
||||
if root_dir.join(entrypoint).exists() {
|
||||
suggested_entrypoint = Some(entrypoint);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let exports_content = format!(
|
||||
r#"{{
|
||||
"name": "{}",
|
||||
"version": "{}",
|
||||
"exports": "{}"
|
||||
}}"#,
|
||||
package.name,
|
||||
version,
|
||||
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
||||
);
|
||||
|
||||
bail!(
|
||||
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
||||
package.name,
|
||||
deno_json.specifier,
|
||||
exports_content
|
||||
);
|
||||
}
|
||||
let version = deno_json.json.version.clone().ok_or_else(|| {
|
||||
deno_core::anyhow::anyhow!(
|
||||
"{} is missing 'version' field",
|
||||
deno_json.specifier
|
||||
)
|
||||
})?;
|
||||
let Some(name_no_at) = package.name.strip_prefix('@') else {
|
||||
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||
};
|
||||
|
@ -1106,9 +1094,9 @@ fn collect_excluded_module_diagnostics(
|
|||
let graph_specifiers = graph
|
||||
.modules()
|
||||
.filter_map(|m| match m {
|
||||
deno_graph::Module::Js(_) | deno_graph::Module::Json(_) => {
|
||||
Some(m.specifier())
|
||||
}
|
||||
deno_graph::Module::Js(_)
|
||||
| deno_graph::Module::Json(_)
|
||||
| deno_graph::Module::Wasm(_) => Some(m.specifier()),
|
||||
deno_graph::Module::Npm(_)
|
||||
| deno_graph::Module::Node(_)
|
||||
| deno_graph::Module::External(_) => None,
|
||||
|
@ -1271,6 +1259,36 @@ fn has_license_file<'a>(
|
|||
})
|
||||
}
|
||||
|
||||
fn error_missing_exports_field(deno_json: &ConfigFile) -> Result<(), AnyError> {
|
||||
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
||||
["mod.ts", "mod.js", "index.ts", "index.js"];
|
||||
let mut suggested_entrypoint = None;
|
||||
|
||||
for entrypoint in SUGGESTED_ENTRYPOINTS {
|
||||
if deno_json.dir_path().join(entrypoint).exists() {
|
||||
suggested_entrypoint = Some(entrypoint);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let exports_content = format!(
|
||||
r#"{{
|
||||
"name": "{}",
|
||||
"version": "{}",
|
||||
"exports": "{}"
|
||||
}}"#,
|
||||
deno_json.json.name.as_deref().unwrap_or("@scope/name"),
|
||||
deno_json.json.name.as_deref().unwrap_or("0.0.0"),
|
||||
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
||||
);
|
||||
|
||||
bail!(
|
||||
"You did not specify an entrypoint in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
||||
deno_json.specifier,
|
||||
exports_content
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn ring_bell() {
|
||||
// ASCII code for the bell character.
|
||||
|
|
|
@ -16,6 +16,7 @@ use deno_semver::package::PackageNv;
|
|||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use deno_semver::VersionReq;
|
||||
use deps::KeyPath;
|
||||
use jsonc_parser::cst::CstObject;
|
||||
use jsonc_parser::cst::CstObjectProp;
|
||||
use jsonc_parser::cst::CstRootNode;
|
||||
|
@ -32,10 +33,13 @@ use crate::jsr::JsrFetchResolver;
|
|||
use crate::npm::NpmFetchResolver;
|
||||
|
||||
mod cache_deps;
|
||||
pub(crate) mod deps;
|
||||
mod outdated;
|
||||
|
||||
pub use cache_deps::cache_top_level_deps;
|
||||
pub use outdated::outdated;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Hash)]
|
||||
enum ConfigKind {
|
||||
DenoJson,
|
||||
PackageJson,
|
||||
|
@ -86,6 +90,28 @@ impl ConfigUpdater {
|
|||
self.cst.to_string()
|
||||
}
|
||||
|
||||
fn get_property_for_mutation(
|
||||
&mut self,
|
||||
key_path: &KeyPath,
|
||||
) -> Option<CstObjectProp> {
|
||||
let mut current_node = self.root_object.clone();
|
||||
|
||||
self.modified = true;
|
||||
|
||||
for (i, part) in key_path.parts.iter().enumerate() {
|
||||
let s = part.as_str();
|
||||
if i < key_path.parts.len().saturating_sub(1) {
|
||||
let object = current_node.object_value(s)?;
|
||||
current_node = object;
|
||||
} else {
|
||||
// last part
|
||||
return current_node.get(s);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn add(&mut self, selected: SelectedPackage, dev: bool) {
|
||||
fn insert_index(object: &CstObject, searching_name: &str) -> usize {
|
||||
object
|
||||
|
@ -679,7 +705,7 @@ enum AddRmPackageReqValue {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct AddRmPackageReq {
|
||||
pub struct AddRmPackageReq {
|
||||
alias: String,
|
||||
value: AddRmPackageReqValue,
|
||||
}
|
||||
|
@ -824,7 +850,7 @@ async fn npm_install_after_modification(
|
|||
flags: Arc<Flags>,
|
||||
// explicitly provided to prevent redownloading
|
||||
jsr_resolver: Option<Arc<crate::jsr::JsrFetchResolver>>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<CliFactory, AnyError> {
|
||||
// clear the previously cached package.json from memory before reloading it
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
|
||||
|
@ -842,7 +868,7 @@ async fn npm_install_after_modification(
|
|||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(cli_factory)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::graph_container::ModuleGraphUpdatePermit;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
|
||||
pub async fn cache_top_level_deps(
|
||||
// todo(dsherret): don't pass the factory into this function. Instead use ctor deps
|
||||
|
@ -56,15 +56,20 @@ pub async fn cache_top_level_deps(
|
|||
match specifier.scheme() {
|
||||
"jsr" => {
|
||||
let specifier_str = specifier.as_str();
|
||||
let specifier_str =
|
||||
specifier_str.strip_prefix("jsr:").unwrap_or(specifier_str);
|
||||
if let Ok(req) = PackageReq::from_str(specifier_str) {
|
||||
if !seen_reqs.insert(req.clone()) {
|
||||
if let Ok(req) = JsrPackageReqReference::from_str(specifier_str) {
|
||||
if let Some(sub_path) = req.sub_path() {
|
||||
if sub_path.ends_with('/') {
|
||||
continue;
|
||||
}
|
||||
roots.push(specifier.clone());
|
||||
continue;
|
||||
}
|
||||
if !seen_reqs.insert(req.req().clone()) {
|
||||
continue;
|
||||
}
|
||||
let jsr_resolver = jsr_resolver.clone();
|
||||
info_futures.push(async move {
|
||||
if let Some(nv) = jsr_resolver.req_to_nv(&req).await {
|
||||
if let Some(nv) = jsr_resolver.req_to_nv(req.req()).await {
|
||||
if let Some(info) = jsr_resolver.package_version_info(&nv).await
|
||||
{
|
||||
return Some((specifier.clone(), info));
|
||||
|
|
964
cli/tools/registry/pm/deps.rs
Normal file
964
cli/tools/registry/pm/deps.rs
Normal file
|
@ -0,0 +1,964 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::deno_json::ConfigFile;
|
||||
use deno_config::deno_json::ConfigFileRc;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceDirectory;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::try_join;
|
||||
use deno_core::futures::stream::FuturesOrdered;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_graph::FillFromLockfileOptions;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_package_json::PackageJsonDepValueParseError;
|
||||
use deno_package_json::PackageJsonRc;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::package::PackageReqReference;
|
||||
use deno_semver::VersionReq;
|
||||
use import_map::ImportMap;
|
||||
use import_map::ImportMapWithDiagnostics;
|
||||
use import_map::SpecifierMapEntry;
|
||||
use indexmap::IndexMap;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
use crate::args::CliLockfile;
|
||||
use crate::graph_container::MainModuleGraphContainer;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::graph_container::ModuleGraphUpdatePermit;
|
||||
use crate::jsr::JsrFetchResolver;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::NpmFetchResolver;
|
||||
|
||||
use super::ConfigUpdater;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ImportMapKind {
|
||||
Inline,
|
||||
Outline,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DepLocation {
|
||||
DenoJson(ConfigFileRc, KeyPath, ImportMapKind),
|
||||
PackageJson(PackageJsonRc, KeyPath),
|
||||
}
|
||||
|
||||
impl DepLocation {
|
||||
pub fn is_deno_json(&self) -> bool {
|
||||
matches!(self, DepLocation::DenoJson(..))
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> Cow<std::path::Path> {
|
||||
match self {
|
||||
DepLocation::DenoJson(arc, _, _) => {
|
||||
Cow::Owned(arc.specifier.to_file_path().unwrap())
|
||||
}
|
||||
DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()),
|
||||
}
|
||||
}
|
||||
fn config_kind(&self) -> super::ConfigKind {
|
||||
match self {
|
||||
DepLocation::DenoJson(_, _, _) => super::ConfigKind::DenoJson,
|
||||
DepLocation::PackageJson(_, _) => super::ConfigKind::PackageJson,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugAdapter<T>(T);
|
||||
|
||||
impl<'a> std::fmt::Debug for DebugAdapter<&'a ConfigFileRc> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ConfigFile")
|
||||
.field("specifier", &self.0.specifier)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Debug for DebugAdapter<&'a PackageJsonRc> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("PackageJson")
|
||||
.field("path", &self.0.path)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DepLocation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DepLocation::DenoJson(arc, key_path, kind) => {
|
||||
let mut debug = f.debug_tuple("DenoJson");
|
||||
debug
|
||||
.field(&DebugAdapter(arc))
|
||||
.field(key_path)
|
||||
.field(kind)
|
||||
.finish()
|
||||
}
|
||||
DepLocation::PackageJson(arc, key_path) => {
|
||||
let mut debug = f.debug_tuple("PackageJson");
|
||||
debug.field(&DebugAdapter(arc)).field(key_path).finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum DepKind {
|
||||
Jsr,
|
||||
Npm,
|
||||
}
|
||||
|
||||
impl DepKind {
|
||||
pub fn scheme(&self) -> &'static str {
|
||||
match self {
|
||||
DepKind::Npm => "npm",
|
||||
DepKind::Jsr => "jsr",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum KeyPart {
|
||||
Imports,
|
||||
Scopes,
|
||||
Dependencies,
|
||||
DevDependencies,
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl From<String> for KeyPart {
|
||||
fn from(value: String) -> Self {
|
||||
KeyPart::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PackageJsonDepKind> for KeyPart {
|
||||
fn from(value: PackageJsonDepKind) -> Self {
|
||||
match value {
|
||||
PackageJsonDepKind::Normal => Self::Dependencies,
|
||||
PackageJsonDepKind::Dev => Self::DevDependencies,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyPart {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
KeyPart::Imports => "imports",
|
||||
KeyPart::Scopes => "scopes",
|
||||
KeyPart::Dependencies => "dependencies",
|
||||
KeyPart::DevDependencies => "devDependencies",
|
||||
KeyPart::String(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeyPath {
|
||||
pub parts: Vec<KeyPart>,
|
||||
}
|
||||
|
||||
impl KeyPath {
|
||||
fn from_parts(parts: impl IntoIterator<Item = KeyPart>) -> Self {
|
||||
Self {
|
||||
parts: parts.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
fn last(&self) -> Option<&KeyPart> {
|
||||
self.parts.last()
|
||||
}
|
||||
fn push(&mut self, part: KeyPart) {
|
||||
self.parts.push(part)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Dep {
|
||||
pub req: PackageReq,
|
||||
pub kind: DepKind,
|
||||
pub location: DepLocation,
|
||||
#[allow(dead_code)]
|
||||
pub id: DepId,
|
||||
#[allow(dead_code)]
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
|
||||
fn import_map_entries(
|
||||
import_map: &ImportMap,
|
||||
) -> impl Iterator<Item = (KeyPath, SpecifierMapEntry<'_>)> {
|
||||
import_map
|
||||
.imports()
|
||||
.entries()
|
||||
.map(|entry| {
|
||||
(
|
||||
KeyPath::from_parts([
|
||||
KeyPart::Imports,
|
||||
KeyPart::String(entry.raw_key.into()),
|
||||
]),
|
||||
entry,
|
||||
)
|
||||
})
|
||||
.chain(import_map.scopes().flat_map(|scope| {
|
||||
let path = KeyPath::from_parts([
|
||||
KeyPart::Scopes,
|
||||
scope.raw_key.to_string().into(),
|
||||
]);
|
||||
|
||||
scope.imports.entries().map(move |entry| {
|
||||
let mut full_path = path.clone();
|
||||
full_path.push(KeyPart::String(entry.raw_key.to_string()));
|
||||
(full_path, entry)
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
fn to_import_map_value_from_imports(
|
||||
deno_json: &ConfigFile,
|
||||
) -> serde_json::Value {
|
||||
let mut value = serde_json::Map::with_capacity(2);
|
||||
if let Some(imports) = &deno_json.json.imports {
|
||||
value.insert("imports".to_string(), imports.clone());
|
||||
}
|
||||
if let Some(scopes) = &deno_json.json.scopes {
|
||||
value.insert("scopes".to_string(), scopes.clone());
|
||||
}
|
||||
serde_json::Value::Object(value)
|
||||
}
|
||||
|
||||
fn deno_json_import_map(
|
||||
deno_json: &ConfigFile,
|
||||
) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> {
|
||||
let (value, kind) =
|
||||
if deno_json.json.imports.is_some() || deno_json.json.scopes.is_some() {
|
||||
(
|
||||
to_import_map_value_from_imports(deno_json),
|
||||
ImportMapKind::Inline,
|
||||
)
|
||||
} else {
|
||||
match deno_json.to_import_map_path()? {
|
||||
Some(path) => {
|
||||
let text = std::fs::read_to_string(&path)?;
|
||||
let value = serde_json::from_str(&text)?;
|
||||
(value, ImportMapKind::Outline)
|
||||
}
|
||||
None => return Ok(None),
|
||||
}
|
||||
};
|
||||
|
||||
import_map::parse_from_value(deno_json.specifier.clone(), value)
|
||||
.map_err(Into::into)
|
||||
.map(|import_map| Some((import_map, kind)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum PackageJsonDepKind {
|
||||
Normal,
|
||||
Dev,
|
||||
}
|
||||
|
||||
type PackageJsonDeps = IndexMap<
|
||||
String,
|
||||
Result<
|
||||
(PackageJsonDepKind, PackageJsonDepValue),
|
||||
PackageJsonDepValueParseError,
|
||||
>,
|
||||
>;
|
||||
|
||||
/// Resolve the package.json's dependencies.
|
||||
// TODO(nathanwhit): Remove once we update deno_package_json with dev deps split out
|
||||
fn resolve_local_package_json_deps(
|
||||
package_json: &PackageJsonRc,
|
||||
) -> PackageJsonDeps {
|
||||
/// Gets the name and raw version constraint for a registry info or
|
||||
/// package.json dependency entry taking into account npm package aliases.
|
||||
fn parse_dep_entry_name_and_raw_version<'a>(
|
||||
key: &'a str,
|
||||
value: &'a str,
|
||||
) -> (&'a str, &'a str) {
|
||||
if let Some(package_and_version) = value.strip_prefix("npm:") {
|
||||
if let Some((name, version)) = package_and_version.rsplit_once('@') {
|
||||
// if empty, then the name was scoped and there's no version
|
||||
if name.is_empty() {
|
||||
(package_and_version, "*")
|
||||
} else {
|
||||
(name, version)
|
||||
}
|
||||
} else {
|
||||
(package_and_version, "*")
|
||||
}
|
||||
} else {
|
||||
(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_entry(
|
||||
key: &str,
|
||||
value: &str,
|
||||
) -> Result<PackageJsonDepValue, PackageJsonDepValueParseError> {
|
||||
if let Some(workspace_key) = value.strip_prefix("workspace:") {
|
||||
let version_req = VersionReq::parse_from_npm(workspace_key)?;
|
||||
return Ok(PackageJsonDepValue::Workspace(version_req));
|
||||
}
|
||||
if value.starts_with("file:")
|
||||
|| value.starts_with("git:")
|
||||
|| value.starts_with("http:")
|
||||
|| value.starts_with("https:")
|
||||
{
|
||||
return Err(PackageJsonDepValueParseError::Unsupported {
|
||||
scheme: value.split(':').next().unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
|
||||
let result = VersionReq::parse_from_npm(version_req);
|
||||
match result {
|
||||
Ok(version_req) => Ok(PackageJsonDepValue::Req(PackageReq {
|
||||
name: name.to_string(),
|
||||
version_req,
|
||||
})),
|
||||
Err(err) => Err(PackageJsonDepValueParseError::VersionReq(err)),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_deps(
|
||||
deps: Option<&IndexMap<String, String>>,
|
||||
result: &mut PackageJsonDeps,
|
||||
kind: PackageJsonDepKind,
|
||||
) {
|
||||
if let Some(deps) = deps {
|
||||
for (key, value) in deps {
|
||||
result.entry(key.to_string()).or_insert_with(|| {
|
||||
parse_entry(key, value).map(|entry| (kind, entry))
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let deps = package_json.dependencies.as_ref();
|
||||
let dev_deps = package_json.dev_dependencies.as_ref();
|
||||
let mut result = IndexMap::new();
|
||||
|
||||
// favors the deps over dev_deps
|
||||
insert_deps(deps, &mut result, PackageJsonDepKind::Normal);
|
||||
insert_deps(dev_deps, &mut result, PackageJsonDepKind::Dev);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn add_deps_from_deno_json(
|
||||
deno_json: &Arc<ConfigFile>,
|
||||
mut filter: impl DepFilter,
|
||||
deps: &mut Vec<Dep>,
|
||||
) {
|
||||
let (import_map, import_map_kind) = match deno_json_import_map(deno_json) {
|
||||
Ok(Some((import_map, import_map_kind))) => (import_map, import_map_kind),
|
||||
Ok(None) => return,
|
||||
Err(e) => {
|
||||
log::warn!("failed to parse imports from {}: {e}", &deno_json.specifier);
|
||||
return;
|
||||
}
|
||||
};
|
||||
for (key_path, entry) in import_map_entries(&import_map.import_map) {
|
||||
let Some(value) = entry.value else { continue };
|
||||
let kind = match value.scheme() {
|
||||
"npm" => DepKind::Npm,
|
||||
"jsr" => DepKind::Jsr,
|
||||
_ => continue,
|
||||
};
|
||||
let req = match parse_req_reference(value.as_str(), kind) {
|
||||
Ok(req) => req.req.clone(),
|
||||
Err(err) => {
|
||||
log::warn!("failed to parse package req \"{}\": {err}", value.as_str());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let alias: &str = key_path.last().unwrap().as_str().trim_end_matches('/');
|
||||
let alias = (alias != req.name).then(|| alias.to_string());
|
||||
if !filter.should_include(alias.as_deref(), &req, kind) {
|
||||
continue;
|
||||
}
|
||||
let id = DepId(deps.len());
|
||||
deps.push(Dep {
|
||||
location: DepLocation::DenoJson(
|
||||
deno_json.clone(),
|
||||
key_path,
|
||||
import_map_kind,
|
||||
),
|
||||
kind,
|
||||
req,
|
||||
id,
|
||||
alias,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn add_deps_from_package_json(
|
||||
package_json: &PackageJsonRc,
|
||||
mut filter: impl DepFilter,
|
||||
deps: &mut Vec<Dep>,
|
||||
) {
|
||||
let package_json_deps = resolve_local_package_json_deps(package_json);
|
||||
for (k, v) in package_json_deps {
|
||||
let (package_dep_kind, v) = match v {
|
||||
Ok((k, v)) => (k, v),
|
||||
Err(e) => {
|
||||
log::warn!("bad package json dep value: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
match v {
|
||||
deno_package_json::PackageJsonDepValue::Req(req) => {
|
||||
let alias = k.as_str();
|
||||
let alias = (alias != req.name).then(|| alias.to_string());
|
||||
if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) {
|
||||
continue;
|
||||
}
|
||||
let id = DepId(deps.len());
|
||||
deps.push(Dep {
|
||||
id,
|
||||
kind: DepKind::Npm,
|
||||
location: DepLocation::PackageJson(
|
||||
package_json.clone(),
|
||||
KeyPath::from_parts([package_dep_kind.into(), k.into()]),
|
||||
),
|
||||
req,
|
||||
alias,
|
||||
})
|
||||
}
|
||||
deno_package_json::PackageJsonDepValue::Workspace(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deps_from_workspace(
|
||||
workspace: &Arc<Workspace>,
|
||||
dep_filter: impl DepFilter,
|
||||
) -> Result<Vec<Dep>, AnyError> {
|
||||
let mut deps = Vec::with_capacity(256);
|
||||
for deno_json in workspace.deno_jsons() {
|
||||
add_deps_from_deno_json(deno_json, dep_filter, &mut deps);
|
||||
}
|
||||
for package_json in workspace.package_jsons() {
|
||||
add_deps_from_package_json(package_json, dep_filter, &mut deps);
|
||||
}
|
||||
|
||||
Ok(deps)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct DepId(usize);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Change {
|
||||
Update(DepId, VersionReq),
|
||||
}
|
||||
|
||||
pub trait DepFilter: Copy {
|
||||
fn should_include(
|
||||
&mut self,
|
||||
alias: Option<&str>,
|
||||
package_req: &PackageReq,
|
||||
dep_kind: DepKind,
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
impl<T> DepFilter for T
|
||||
where
|
||||
T: FnMut(Option<&str>, &PackageReq, DepKind) -> bool + Copy,
|
||||
{
|
||||
fn should_include<'a>(
|
||||
&mut self,
|
||||
alias: Option<&'a str>,
|
||||
package_req: &'a PackageReq,
|
||||
dep_kind: DepKind,
|
||||
) -> bool {
|
||||
(*self)(alias, package_req, dep_kind)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PackageLatestVersion {
|
||||
pub semver_compatible: Option<PackageNv>,
|
||||
pub latest: Option<PackageNv>,
|
||||
}
|
||||
|
||||
pub struct DepManager {
|
||||
deps: Vec<Dep>,
|
||||
resolved_versions: Vec<Option<PackageNv>>,
|
||||
latest_versions: Vec<PackageLatestVersion>,
|
||||
|
||||
pending_changes: Vec<Change>,
|
||||
|
||||
dependencies_resolved: AtomicBool,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
// TODO(nathanwhit): probably shouldn't be pub
|
||||
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
|
||||
pub(crate) npm_fetch_resolver: Arc<NpmFetchResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
permissions_container: PermissionsContainer,
|
||||
main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
lockfile: Option<Arc<CliLockfile>>,
|
||||
}
|
||||
|
||||
pub struct DepManagerArgs {
|
||||
pub module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
pub jsr_fetch_resolver: Arc<JsrFetchResolver>,
|
||||
pub npm_fetch_resolver: Arc<NpmFetchResolver>,
|
||||
pub npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
pub permissions_container: PermissionsContainer,
|
||||
pub main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
pub lockfile: Option<Arc<CliLockfile>>,
|
||||
}
|
||||
|
||||
impl DepManager {
|
||||
pub fn reloaded_after_modification(self, args: DepManagerArgs) -> Self {
|
||||
let mut new = Self::with_deps_args(self.deps, args);
|
||||
new.latest_versions = self.latest_versions;
|
||||
new
|
||||
}
|
||||
fn with_deps_args(deps: Vec<Dep>, args: DepManagerArgs) -> Self {
|
||||
let DepManagerArgs {
|
||||
module_load_preparer,
|
||||
jsr_fetch_resolver,
|
||||
npm_fetch_resolver,
|
||||
npm_resolver,
|
||||
permissions_container,
|
||||
main_module_graph_container,
|
||||
lockfile,
|
||||
} = args;
|
||||
Self {
|
||||
deps,
|
||||
resolved_versions: Vec::new(),
|
||||
latest_versions: Vec::new(),
|
||||
jsr_fetch_resolver,
|
||||
dependencies_resolved: AtomicBool::new(false),
|
||||
module_load_preparer,
|
||||
npm_fetch_resolver,
|
||||
npm_resolver,
|
||||
permissions_container,
|
||||
main_module_graph_container,
|
||||
lockfile,
|
||||
pending_changes: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn from_workspace_dir(
|
||||
workspace_dir: &Arc<WorkspaceDirectory>,
|
||||
dep_filter: impl DepFilter,
|
||||
args: DepManagerArgs,
|
||||
) -> Result<Self, AnyError> {
|
||||
let mut deps = Vec::with_capacity(256);
|
||||
if let Some(deno_json) = workspace_dir.maybe_deno_json() {
|
||||
if deno_json.specifier.scheme() != "file" {
|
||||
bail!("remote deno.json files are not supported");
|
||||
}
|
||||
let path = deno_json.specifier.to_file_path().unwrap();
|
||||
if path.parent().unwrap() == workspace_dir.dir_path() {
|
||||
add_deps_from_deno_json(deno_json, dep_filter, &mut deps);
|
||||
}
|
||||
}
|
||||
if let Some(package_json) = workspace_dir.maybe_pkg_json() {
|
||||
add_deps_from_package_json(package_json, dep_filter, &mut deps);
|
||||
}
|
||||
|
||||
Ok(Self::with_deps_args(deps, args))
|
||||
}
|
||||
pub fn from_workspace(
|
||||
workspace: &Arc<Workspace>,
|
||||
dep_filter: impl DepFilter,
|
||||
args: DepManagerArgs,
|
||||
) -> Result<Self, AnyError> {
|
||||
let deps = deps_from_workspace(workspace, dep_filter)?;
|
||||
Ok(Self::with_deps_args(deps, args))
|
||||
}
|
||||
|
||||
async fn run_dependency_resolution(&self) -> Result<(), AnyError> {
|
||||
if self
|
||||
.dependencies_resolved
|
||||
.load(std::sync::atomic::Ordering::Relaxed)
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut graph_permit = self
|
||||
.main_module_graph_container
|
||||
.acquire_update_permit()
|
||||
.await;
|
||||
let graph = graph_permit.graph_mut();
|
||||
// populate the information from the lockfile
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
let lockfile = lockfile.lock();
|
||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
||||
redirects: lockfile
|
||||
.content
|
||||
.redirects
|
||||
.iter()
|
||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
||||
package_specifiers: lockfile
|
||||
.content
|
||||
.packages
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|(dep, id)| (dep, id.as_str())),
|
||||
});
|
||||
}
|
||||
|
||||
let npm_resolver = self.npm_resolver.as_managed().unwrap();
|
||||
if self.deps.iter().all(|dep| match dep.kind {
|
||||
DepKind::Npm => {
|
||||
npm_resolver.resolve_pkg_id_from_pkg_req(&dep.req).is_ok()
|
||||
}
|
||||
DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req),
|
||||
}) {
|
||||
self
|
||||
.dependencies_resolved
|
||||
.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
graph_permit.commit();
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
let mut roots = Vec::new();
|
||||
let mut info_futures = FuturesUnordered::new();
|
||||
for dep in &self.deps {
|
||||
if dep.location.is_deno_json() {
|
||||
match dep.kind {
|
||||
DepKind::Npm => roots.push(
|
||||
ModuleSpecifier::parse(&format!("npm:/{}/", dep.req)).unwrap(),
|
||||
),
|
||||
DepKind::Jsr => info_futures.push(async {
|
||||
if let Some(nv) = self.jsr_fetch_resolver.req_to_nv(&dep.req).await
|
||||
{
|
||||
if let Some(info) =
|
||||
self.jsr_fetch_resolver.package_version_info(&nv).await
|
||||
{
|
||||
let specifier =
|
||||
ModuleSpecifier::parse(&format!("jsr:/{}/", dep.req))
|
||||
.unwrap();
|
||||
return Some((specifier, info));
|
||||
}
|
||||
}
|
||||
None
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(info_future) = info_futures.next().await {
|
||||
if let Some((specifier, info)) = info_future {
|
||||
let exports = info.exports();
|
||||
for (k, _) in exports {
|
||||
if let Ok(spec) = specifier.join(k) {
|
||||
roots.push(spec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.module_load_preparer
|
||||
.prepare_module_load(
|
||||
graph,
|
||||
&roots,
|
||||
false,
|
||||
deno_config::deno_json::TsTypeLib::DenoWindow,
|
||||
self.permissions_container.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
graph_permit.commit();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn resolved_version(&self, id: DepId) -> Option<&PackageNv> {
|
||||
self.resolved_versions[id.0].as_ref()
|
||||
}
|
||||
|
||||
pub async fn resolve_current_versions(&mut self) -> Result<(), AnyError> {
|
||||
self.run_dependency_resolution().await?;
|
||||
|
||||
let graph = self.main_module_graph_container.graph();
|
||||
|
||||
let mut resolved = Vec::with_capacity(self.deps.len());
|
||||
let snapshot = self.npm_resolver.as_managed().unwrap().snapshot();
|
||||
let resolved_npm = snapshot.package_reqs();
|
||||
let resolved_jsr = graph.packages.mappings();
|
||||
for dep in &self.deps {
|
||||
match dep.kind {
|
||||
DepKind::Npm => {
|
||||
let resolved_version = resolved_npm.get(&dep.req).cloned();
|
||||
resolved.push(resolved_version);
|
||||
}
|
||||
DepKind::Jsr => {
|
||||
let resolved_version = resolved_jsr.get(&dep.req).cloned();
|
||||
resolved.push(resolved_version)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.resolved_versions = resolved;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_latest_versions(
|
||||
&self,
|
||||
) -> Result<Vec<PackageLatestVersion>, AnyError> {
|
||||
if self.latest_versions.len() == self.deps.len() {
|
||||
return Ok(self.latest_versions.clone());
|
||||
}
|
||||
let latest_tag_req = deno_semver::VersionReq::from_raw_text_and_inner(
|
||||
"latest".into(),
|
||||
deno_semver::RangeSetOrTag::Tag("latest".into()),
|
||||
);
|
||||
let mut latest_versions = Vec::with_capacity(self.deps.len());
|
||||
|
||||
let npm_sema = Semaphore::new(32);
|
||||
let jsr_sema = Semaphore::new(32);
|
||||
let mut futs = FuturesOrdered::new();
|
||||
|
||||
for dep in &self.deps {
|
||||
match dep.kind {
|
||||
DepKind::Npm => futs.push_back(
|
||||
async {
|
||||
let semver_req = &dep.req;
|
||||
let latest_req = PackageReq {
|
||||
name: dep.req.name.clone(),
|
||||
version_req: latest_tag_req.clone(),
|
||||
};
|
||||
let _permit = npm_sema.acquire().await;
|
||||
let semver_compatible =
|
||||
self.npm_fetch_resolver.req_to_nv(semver_req).await;
|
||||
let latest = self.npm_fetch_resolver.req_to_nv(&latest_req).await;
|
||||
PackageLatestVersion {
|
||||
latest,
|
||||
semver_compatible,
|
||||
}
|
||||
}
|
||||
.boxed_local(),
|
||||
),
|
||||
DepKind::Jsr => futs.push_back(
|
||||
async {
|
||||
let semver_req = &dep.req;
|
||||
let latest_req = PackageReq {
|
||||
name: dep.req.name.clone(),
|
||||
version_req: deno_semver::WILDCARD_VERSION_REQ.clone(),
|
||||
};
|
||||
let _permit = jsr_sema.acquire().await;
|
||||
let semver_compatible =
|
||||
self.jsr_fetch_resolver.req_to_nv(semver_req).await;
|
||||
let latest = self.jsr_fetch_resolver.req_to_nv(&latest_req).await;
|
||||
PackageLatestVersion {
|
||||
latest,
|
||||
semver_compatible,
|
||||
}
|
||||
}
|
||||
.boxed_local(),
|
||||
),
|
||||
}
|
||||
}
|
||||
while let Some(nv) = futs.next().await {
|
||||
latest_versions.push(nv);
|
||||
}
|
||||
|
||||
Ok(latest_versions)
|
||||
}
|
||||
|
||||
pub async fn resolve_versions(&mut self) -> Result<(), AnyError> {
|
||||
let (_, latest_versions) = try_join(
|
||||
self.run_dependency_resolution(),
|
||||
self.load_latest_versions(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
self.latest_versions = latest_versions;
|
||||
|
||||
self.resolve_current_versions().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn deps_with_resolved_latest_versions(
|
||||
&self,
|
||||
) -> impl IntoIterator<Item = (DepId, Option<PackageNv>, PackageLatestVersion)> + '_
|
||||
{
|
||||
self
|
||||
.resolved_versions
|
||||
.iter()
|
||||
.zip(self.latest_versions.iter())
|
||||
.enumerate()
|
||||
.map(|(i, (resolved, latest))| {
|
||||
(DepId(i), resolved.clone(), latest.clone())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_dep(&self, id: DepId) -> &Dep {
|
||||
&self.deps[id.0]
|
||||
}
|
||||
|
||||
pub fn update_dep(&mut self, dep_id: DepId, new_version_req: VersionReq) {
|
||||
self
|
||||
.pending_changes
|
||||
.push(Change::Update(dep_id, new_version_req));
|
||||
}
|
||||
|
||||
pub fn commit_changes(&mut self) -> Result<(), AnyError> {
|
||||
let changes = std::mem::take(&mut self.pending_changes);
|
||||
let mut config_updaters = HashMap::new();
|
||||
for change in changes {
|
||||
match change {
|
||||
Change::Update(dep_id, version_req) => {
|
||||
// TODO: move most of this to ConfigUpdater
|
||||
let dep = &mut self.deps[dep_id.0];
|
||||
dep.req.version_req = version_req.clone();
|
||||
match &dep.location {
|
||||
DepLocation::DenoJson(arc, key_path, import_map_kind) => {
|
||||
if matches!(import_map_kind, ImportMapKind::Outline) {
|
||||
// not supported
|
||||
continue;
|
||||
}
|
||||
let updater =
|
||||
get_or_create_updater(&mut config_updaters, &dep.location)?;
|
||||
|
||||
let Some(property) = updater.get_property_for_mutation(key_path)
|
||||
else {
|
||||
log::warn!(
|
||||
"failed to find property at path {key_path:?} for file {}",
|
||||
arc.specifier
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let Some(string_value) = cst_string_literal(&property) else {
|
||||
continue;
|
||||
};
|
||||
let mut req_reference = match dep.kind {
|
||||
DepKind::Npm => NpmPackageReqReference::from_str(&string_value)
|
||||
.unwrap()
|
||||
.into_inner(),
|
||||
DepKind::Jsr => JsrPackageReqReference::from_str(&string_value)
|
||||
.unwrap()
|
||||
.into_inner(),
|
||||
};
|
||||
req_reference.req.version_req = version_req;
|
||||
let mut new_value =
|
||||
format!("{}:{}", dep.kind.scheme(), req_reference);
|
||||
if string_value.ends_with('/') && !new_value.ends_with('/') {
|
||||
// the display impl for PackageReqReference maps `/` to the root
|
||||
// subpath, but for the import map the trailing `/` is significant
|
||||
new_value.push('/');
|
||||
}
|
||||
if string_value
|
||||
.trim_start_matches(format!("{}:", dep.kind.scheme()).as_str())
|
||||
.starts_with('/')
|
||||
{
|
||||
// this is gross
|
||||
new_value = new_value.replace(':', ":/");
|
||||
}
|
||||
property
|
||||
.set_value(jsonc_parser::cst::CstInputValue::String(new_value));
|
||||
}
|
||||
DepLocation::PackageJson(arc, key_path) => {
|
||||
let updater =
|
||||
get_or_create_updater(&mut config_updaters, &dep.location)?;
|
||||
let Some(property) = updater.get_property_for_mutation(key_path)
|
||||
else {
|
||||
log::warn!(
|
||||
"failed to find property at path {key_path:?} for file {}",
|
||||
arc.path.display()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let Some(string_value) = cst_string_literal(&property) else {
|
||||
continue;
|
||||
};
|
||||
let new_value = if string_value.starts_with("npm:") {
|
||||
// aliased
|
||||
let rest = string_value.trim_start_matches("npm:");
|
||||
let mut parts = rest.split('@');
|
||||
let first = parts.next().unwrap();
|
||||
if first.is_empty() {
|
||||
let scope_and_name = parts.next().unwrap();
|
||||
format!("npm:@{scope_and_name}@{version_req}")
|
||||
} else {
|
||||
format!("npm:{first}@{version_req}")
|
||||
}
|
||||
} else if string_value.contains(":") {
|
||||
bail!("Unexpected package json dependency string: \"{string_value}\" in {}", arc.path.display());
|
||||
} else {
|
||||
version_req.to_string()
|
||||
};
|
||||
property
|
||||
.set_value(jsonc_parser::cst::CstInputValue::String(new_value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (_, updater) in config_updaters {
|
||||
updater.commit()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_or_create_updater<'a>(
|
||||
config_updaters: &'a mut HashMap<std::path::PathBuf, ConfigUpdater>,
|
||||
location: &DepLocation,
|
||||
) -> Result<&'a mut ConfigUpdater, AnyError> {
|
||||
match config_updaters.entry(location.file_path().into_owned()) {
|
||||
std::collections::hash_map::Entry::Occupied(occupied_entry) => {
|
||||
Ok(occupied_entry.into_mut())
|
||||
}
|
||||
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
|
||||
let updater = ConfigUpdater::new(
|
||||
location.config_kind(),
|
||||
location.file_path().into_owned(),
|
||||
)?;
|
||||
Ok(vacant_entry.insert(updater))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cst_string_literal(
|
||||
property: &jsonc_parser::cst::CstObjectProp,
|
||||
) -> Option<String> {
|
||||
// TODO(nathanwhit): ensure this unwrap is safe
|
||||
let value = property.value().unwrap();
|
||||
let Some(string) = value.as_string_lit() else {
|
||||
log::warn!("malformed entry");
|
||||
return None;
|
||||
};
|
||||
let Ok(string_value) = string.decoded_value() else {
|
||||
log::warn!("malformed string: {string:?}");
|
||||
return None;
|
||||
};
|
||||
Some(string_value)
|
||||
}
|
||||
|
||||
fn parse_req_reference(
|
||||
input: &str,
|
||||
kind: DepKind,
|
||||
) -> Result<
|
||||
PackageReqReference,
|
||||
deno_semver::package::PackageReqReferenceParseError,
|
||||
> {
|
||||
Ok(match kind {
|
||||
DepKind::Npm => NpmPackageReqReference::from_str(input)?.into_inner(),
|
||||
DepKind::Jsr => JsrPackageReqReference::from_str(input)?.into_inner(),
|
||||
})
|
||||
}
|
661
cli/tools/registry/pm/outdated.rs
Normal file
661
cli/tools/registry/pm/outdated.rs
Normal file
|
@ -0,0 +1,661 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReq;
|
||||
use deno_terminal::colors;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::OutdatedFlags;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::jsr::JsrFetchResolver;
|
||||
use crate::npm::NpmFetchResolver;
|
||||
use crate::tools::registry::pm::deps::DepKind;
|
||||
|
||||
use super::deps::Dep;
|
||||
use super::deps::DepManager;
|
||||
use super::deps::DepManagerArgs;
|
||||
use super::deps::PackageLatestVersion;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct OutdatedPackage {
|
||||
kind: DepKind,
|
||||
latest: String,
|
||||
semver_compatible: String,
|
||||
current: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
fn print_outdated_table(packages: &[OutdatedPackage]) {
|
||||
const HEADINGS: &[&str] = &["Package", "Current", "Update", "Latest"];
|
||||
|
||||
let mut longest_package = 0;
|
||||
let mut longest_current = 0;
|
||||
let mut longest_update = 0;
|
||||
let mut longest_latest = 0;
|
||||
|
||||
for package in packages {
|
||||
let name_len = package.kind.scheme().len() + 1 + package.name.len();
|
||||
longest_package = longest_package.max(name_len);
|
||||
longest_current = longest_current.max(package.current.len());
|
||||
longest_update = longest_update.max(package.semver_compatible.len());
|
||||
longest_latest = longest_latest.max(package.latest.len());
|
||||
}
|
||||
|
||||
let package_column_width = longest_package.max(HEADINGS[0].len()) + 2;
|
||||
let current_column_width = longest_current.max(HEADINGS[1].len()) + 2;
|
||||
let update_column_width = longest_update.max(HEADINGS[2].len()) + 2;
|
||||
let latest_column_width = longest_latest.max(HEADINGS[3].len()) + 2;
|
||||
|
||||
let package_fill = "─".repeat(package_column_width);
|
||||
let current_fill = "─".repeat(current_column_width);
|
||||
let update_fill = "─".repeat(update_column_width);
|
||||
let latest_fill = "─".repeat(latest_column_width);
|
||||
|
||||
println!("┌{package_fill}┬{current_fill}┬{update_fill}┬{latest_fill}┐");
|
||||
println!(
|
||||
"│ {}{} │ {}{} │ {}{} │ {}{} │",
|
||||
colors::intense_blue(HEADINGS[0]),
|
||||
" ".repeat(package_column_width - 2 - HEADINGS[0].len()),
|
||||
colors::intense_blue(HEADINGS[1]),
|
||||
" ".repeat(current_column_width - 2 - HEADINGS[1].len()),
|
||||
colors::intense_blue(HEADINGS[2]),
|
||||
" ".repeat(update_column_width - 2 - HEADINGS[2].len()),
|
||||
colors::intense_blue(HEADINGS[3]),
|
||||
" ".repeat(latest_column_width - 2 - HEADINGS[3].len())
|
||||
);
|
||||
for package in packages {
|
||||
println!("├{package_fill}┼{current_fill}┼{update_fill}┼{latest_fill}┤",);
|
||||
|
||||
print!(
|
||||
"│ {:<package_column_width$} ",
|
||||
format!("{}:{}", package.kind.scheme(), package.name),
|
||||
package_column_width = package_column_width - 2
|
||||
);
|
||||
print!(
|
||||
"│ {:<current_column_width$} ",
|
||||
package.current,
|
||||
current_column_width = current_column_width - 2
|
||||
);
|
||||
print!(
|
||||
"│ {:<update_column_width$} ",
|
||||
package.semver_compatible,
|
||||
update_column_width = update_column_width - 2
|
||||
);
|
||||
println!(
|
||||
"│ {:<latest_column_width$} │",
|
||||
package.latest,
|
||||
latest_column_width = latest_column_width - 2
|
||||
);
|
||||
}
|
||||
|
||||
println!("└{package_fill}┴{current_fill}┴{update_fill}┴{latest_fill}┘",);
|
||||
}
|
||||
|
||||
fn print_outdated(
|
||||
deps: &mut DepManager,
|
||||
compatible: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut outdated = Vec::new();
|
||||
let mut seen = std::collections::BTreeSet::new();
|
||||
for (dep_id, resolved, latest_versions) in
|
||||
deps.deps_with_resolved_latest_versions()
|
||||
{
|
||||
let dep = deps.get_dep(dep_id);
|
||||
|
||||
let Some(resolved) = resolved else { continue };
|
||||
|
||||
let latest = {
|
||||
let preferred = if compatible {
|
||||
&latest_versions.semver_compatible
|
||||
} else {
|
||||
&latest_versions.latest
|
||||
};
|
||||
if let Some(v) = preferred {
|
||||
v
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if latest > &resolved
|
||||
&& seen.insert((dep.kind, dep.req.name.clone(), resolved.version.clone()))
|
||||
{
|
||||
outdated.push(OutdatedPackage {
|
||||
kind: dep.kind,
|
||||
name: dep.req.name.clone(),
|
||||
current: resolved.version.to_string(),
|
||||
latest: latest_versions
|
||||
.latest
|
||||
.map(|l| l.version.to_string())
|
||||
.unwrap_or_default(),
|
||||
semver_compatible: latest_versions
|
||||
.semver_compatible
|
||||
.map(|l| l.version.to_string())
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if !outdated.is_empty() {
|
||||
outdated.sort();
|
||||
print_outdated_table(&outdated);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn outdated(
|
||||
flags: Arc<Flags>,
|
||||
update_flags: OutdatedFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
let cli_options = factory.cli_options()?;
|
||||
let workspace = cli_options.workspace();
|
||||
let http_client = factory.http_client_provider();
|
||||
let deps_http_cache = factory.global_http_cache()?;
|
||||
let mut file_fetcher = FileFetcher::new(
|
||||
deps_http_cache.clone(),
|
||||
CacheSetting::RespectHeaders,
|
||||
true,
|
||||
http_client.clone(),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
file_fetcher.set_download_log_level(log::Level::Trace);
|
||||
let file_fetcher = Arc::new(file_fetcher);
|
||||
let npm_fetch_resolver = Arc::new(NpmFetchResolver::new(
|
||||
file_fetcher.clone(),
|
||||
cli_options.npmrc().clone(),
|
||||
));
|
||||
let jsr_fetch_resolver =
|
||||
Arc::new(JsrFetchResolver::new(file_fetcher.clone()));
|
||||
|
||||
let args = dep_manager_args(
|
||||
&factory,
|
||||
cli_options,
|
||||
npm_fetch_resolver.clone(),
|
||||
jsr_fetch_resolver.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let filter_set = filter::FilterSet::from_filter_strings(
|
||||
update_flags.filters.iter().map(|s| s.as_str()),
|
||||
)?;
|
||||
|
||||
let filter_fn = |alias: Option<&str>, req: &PackageReq, _: DepKind| {
|
||||
if filter_set.is_empty() {
|
||||
return true;
|
||||
}
|
||||
let name = alias.unwrap_or(&req.name);
|
||||
filter_set.matches(name)
|
||||
};
|
||||
let mut deps = if update_flags.recursive {
|
||||
super::deps::DepManager::from_workspace(workspace, filter_fn, args)?
|
||||
} else {
|
||||
super::deps::DepManager::from_workspace_dir(
|
||||
&cli_options.start_dir,
|
||||
filter_fn,
|
||||
args,
|
||||
)?
|
||||
};
|
||||
|
||||
deps.resolve_versions().await?;
|
||||
|
||||
match update_flags.kind {
|
||||
crate::args::OutdatedKind::Update { latest } => {
|
||||
update(deps, latest, &filter_set, flags).await?;
|
||||
}
|
||||
crate::args::OutdatedKind::PrintOutdated { compatible } => {
|
||||
print_outdated(&mut deps, compatible)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn choose_new_version_req(
|
||||
dep: &Dep,
|
||||
resolved: Option<&PackageNv>,
|
||||
latest_versions: &PackageLatestVersion,
|
||||
update_to_latest: bool,
|
||||
filter_set: &filter::FilterSet,
|
||||
) -> Option<VersionReq> {
|
||||
let explicit_version_req = filter_set
|
||||
.matching_filter(dep.alias.as_deref().unwrap_or(&dep.req.name))
|
||||
.version_spec()
|
||||
.cloned();
|
||||
|
||||
if let Some(version_req) = explicit_version_req {
|
||||
if let Some(resolved) = resolved {
|
||||
// todo(nathanwhit): handle tag
|
||||
if version_req.tag().is_none() && version_req.matches(&resolved.version) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(version_req)
|
||||
} else {
|
||||
let preferred = if update_to_latest {
|
||||
latest_versions.latest.as_ref()?
|
||||
} else {
|
||||
latest_versions.semver_compatible.as_ref()?
|
||||
};
|
||||
if preferred.version <= resolved?.version {
|
||||
return None;
|
||||
}
|
||||
Some(
|
||||
VersionReq::parse_from_specifier(
|
||||
format!("^{}", preferred.version).as_str(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async fn update(
|
||||
mut deps: DepManager,
|
||||
update_to_latest: bool,
|
||||
filter_set: &filter::FilterSet,
|
||||
flags: Arc<Flags>,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut updated = Vec::new();
|
||||
|
||||
for (dep_id, resolved, latest_versions) in deps
|
||||
.deps_with_resolved_latest_versions()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
let dep = deps.get_dep(dep_id);
|
||||
let new_version_req = choose_new_version_req(
|
||||
dep,
|
||||
resolved.as_ref(),
|
||||
&latest_versions,
|
||||
update_to_latest,
|
||||
filter_set,
|
||||
);
|
||||
let Some(new_version_req) = new_version_req else {
|
||||
continue;
|
||||
};
|
||||
|
||||
updated.push((
|
||||
dep_id,
|
||||
format!("{}:{}", dep.kind.scheme(), dep.req.name),
|
||||
deps.resolved_version(dep.id).cloned(),
|
||||
new_version_req.clone(),
|
||||
));
|
||||
|
||||
deps.update_dep(dep_id, new_version_req);
|
||||
}
|
||||
|
||||
deps.commit_changes()?;
|
||||
|
||||
if !updated.is_empty() {
|
||||
let factory = super::npm_install_after_modification(
|
||||
flags.clone(),
|
||||
Some(deps.jsr_fetch_resolver.clone()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut updated_to_versions = HashSet::new();
|
||||
let cli_options = factory.cli_options()?;
|
||||
let args = dep_manager_args(
|
||||
&factory,
|
||||
cli_options,
|
||||
deps.npm_fetch_resolver.clone(),
|
||||
deps.jsr_fetch_resolver.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut deps = deps.reloaded_after_modification(args);
|
||||
deps.resolve_current_versions().await?;
|
||||
for (dep_id, package_name, maybe_current_version, new_version_req) in
|
||||
updated
|
||||
{
|
||||
if let Some(nv) = deps.resolved_version(dep_id) {
|
||||
updated_to_versions.insert((
|
||||
package_name,
|
||||
maybe_current_version,
|
||||
nv.version.clone(),
|
||||
));
|
||||
} else {
|
||||
log::warn!(
|
||||
"Failed to resolve version for new version requirement: {} -> {}",
|
||||
package_name,
|
||||
new_version_req
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Updated {} dependenc{}:",
|
||||
updated_to_versions.len(),
|
||||
if updated_to_versions.len() == 1 {
|
||||
"y"
|
||||
} else {
|
||||
"ies"
|
||||
}
|
||||
);
|
||||
let mut updated_to_versions =
|
||||
updated_to_versions.into_iter().collect::<Vec<_>>();
|
||||
updated_to_versions.sort_by(|(k, _, _), (k2, _, _)| k.cmp(k2));
|
||||
let max_name = updated_to_versions
|
||||
.iter()
|
||||
.map(|(name, _, _)| name.len())
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
let max_old = updated_to_versions
|
||||
.iter()
|
||||
.map(|(_, maybe_current, _)| {
|
||||
maybe_current
|
||||
.as_ref()
|
||||
.map(|v| v.version.to_string().len())
|
||||
.unwrap_or(0)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
let max_new = updated_to_versions
|
||||
.iter()
|
||||
.map(|(_, _, new_version)| new_version.to_string().len())
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
for (package_name, maybe_current_version, new_version) in
|
||||
updated_to_versions
|
||||
{
|
||||
let current_version = if let Some(current_version) = maybe_current_version
|
||||
{
|
||||
current_version.version.to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
log::info!(
|
||||
" - {}{} {}{} -> {}{}",
|
||||
format!(
|
||||
"{}{}",
|
||||
colors::gray(package_name[0..4].to_string()),
|
||||
package_name[4..].to_string()
|
||||
),
|
||||
" ".repeat(max_name - package_name.len()),
|
||||
" ".repeat(max_old - current_version.len()),
|
||||
colors::gray(¤t_version),
|
||||
" ".repeat(max_new - new_version.to_string().len()),
|
||||
colors::green(&new_version),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
log::info!(
|
||||
"All {}dependencies are up to date.",
|
||||
if filter_set.is_empty() {
|
||||
""
|
||||
} else {
|
||||
"matching "
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn dep_manager_args(
|
||||
factory: &CliFactory,
|
||||
cli_options: &CliOptions,
|
||||
npm_fetch_resolver: Arc<NpmFetchResolver>,
|
||||
jsr_fetch_resolver: Arc<JsrFetchResolver>,
|
||||
) -> Result<DepManagerArgs, AnyError> {
|
||||
Ok(DepManagerArgs {
|
||||
module_load_preparer: factory.module_load_preparer().await?.clone(),
|
||||
jsr_fetch_resolver,
|
||||
npm_fetch_resolver,
|
||||
npm_resolver: factory.npm_resolver().await?.clone(),
|
||||
permissions_container: factory.root_permissions_container()?.clone(),
|
||||
main_module_graph_container: factory
|
||||
.main_module_graph_container()
|
||||
.await?
|
||||
.clone(),
|
||||
lockfile: cli_options.maybe_lockfile().cloned(),
|
||||
})
|
||||
}
|
||||
|
||||
mod filter {
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_semver::VersionReq;
|
||||
|
||||
enum FilterKind {
|
||||
Exclude,
|
||||
Include,
|
||||
}
|
||||
pub struct Filter {
|
||||
kind: FilterKind,
|
||||
regex: regex::Regex,
|
||||
version_spec: Option<VersionReq>,
|
||||
}
|
||||
|
||||
fn pattern_to_regex(pattern: &str) -> Result<regex::Regex, AnyError> {
|
||||
let escaped = regex::escape(pattern);
|
||||
let unescaped_star = escaped.replace(r"\*", ".*");
|
||||
Ok(regex::Regex::new(&format!("^{}$", unescaped_star))?)
|
||||
}
|
||||
|
||||
impl Filter {
|
||||
pub fn version_spec(&self) -> Option<&VersionReq> {
|
||||
self.version_spec.as_ref()
|
||||
}
|
||||
pub fn from_str(input: &str) -> Result<Self, AnyError> {
|
||||
let (kind, first_idx) = if input.starts_with('!') {
|
||||
(FilterKind::Exclude, 1)
|
||||
} else {
|
||||
(FilterKind::Include, 0)
|
||||
};
|
||||
let s = &input[first_idx..];
|
||||
let (pattern, version_spec) =
|
||||
if let Some(scope_name) = s.strip_prefix('@') {
|
||||
if let Some(idx) = scope_name.find('@') {
|
||||
let (pattern, version_spec) = s.split_at(idx + 1);
|
||||
(
|
||||
pattern,
|
||||
Some(
|
||||
VersionReq::parse_from_specifier(
|
||||
version_spec.trim_start_matches('@'),
|
||||
)
|
||||
.with_context(|| format!("Invalid filter \"{input}\""))?,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
(s, None)
|
||||
}
|
||||
} else {
|
||||
let mut parts = s.split('@');
|
||||
let Some(pattern) = parts.next() else {
|
||||
return Err(anyhow!("Invalid filter \"{input}\""));
|
||||
};
|
||||
(
|
||||
pattern,
|
||||
parts
|
||||
.next()
|
||||
.map(VersionReq::parse_from_specifier)
|
||||
.transpose()
|
||||
.with_context(|| format!("Invalid filter \"{input}\""))?,
|
||||
)
|
||||
};
|
||||
|
||||
Ok(Filter {
|
||||
kind,
|
||||
regex: pattern_to_regex(pattern)
|
||||
.with_context(|| format!("Invalid filter \"{input}\""))?,
|
||||
version_spec,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn matches(&self, name: &str) -> bool {
|
||||
self.regex.is_match(name)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilterSet {
|
||||
filters: Vec<Filter>,
|
||||
has_exclude: bool,
|
||||
has_include: bool,
|
||||
}
|
||||
impl FilterSet {
|
||||
pub fn from_filter_strings<'a>(
|
||||
filter_strings: impl IntoIterator<Item = &'a str>,
|
||||
) -> Result<Self, AnyError> {
|
||||
let filters = filter_strings
|
||||
.into_iter()
|
||||
.map(Filter::from_str)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let has_exclude = filters
|
||||
.iter()
|
||||
.any(|f| matches!(f.kind, FilterKind::Exclude));
|
||||
let has_include = filters
|
||||
.iter()
|
||||
.any(|f| matches!(f.kind, FilterKind::Include));
|
||||
Ok(FilterSet {
|
||||
filters,
|
||||
has_exclude,
|
||||
has_include,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.filters.is_empty()
|
||||
}
|
||||
|
||||
pub fn matches(&self, name: &str) -> bool {
|
||||
self.matching_filter(name).is_included()
|
||||
}
|
||||
|
||||
pub fn matching_filter(&self, name: &str) -> MatchResult<'_> {
|
||||
if self.filters.is_empty() {
|
||||
return MatchResult::Included;
|
||||
}
|
||||
let mut matched = None;
|
||||
for filter in &self.filters {
|
||||
match filter.kind {
|
||||
FilterKind::Include => {
|
||||
if matched.is_none() && filter.matches(name) {
|
||||
matched = Some(filter);
|
||||
}
|
||||
}
|
||||
FilterKind::Exclude => {
|
||||
if filter.matches(name) {
|
||||
return MatchResult::Excluded;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(filter) = matched {
|
||||
MatchResult::Matches(filter)
|
||||
} else if self.has_exclude && !self.has_include {
|
||||
MatchResult::Included
|
||||
} else {
|
||||
MatchResult::Excluded
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum MatchResult<'a> {
|
||||
Matches(&'a Filter),
|
||||
Included,
|
||||
Excluded,
|
||||
}
|
||||
|
||||
impl MatchResult<'_> {
|
||||
pub fn version_spec(&self) -> Option<&VersionReq> {
|
||||
match self {
|
||||
MatchResult::Matches(filter) => filter.version_spec(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn is_included(&self) -> bool {
|
||||
matches!(self, MatchResult::Included | MatchResult::Matches(_))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
fn matches_filters<'a, 'b>(
|
||||
filters: impl IntoIterator<Item = &'a str>,
|
||||
name: &str,
|
||||
) -> bool {
|
||||
let filters = super::FilterSet::from_filter_strings(filters).unwrap();
|
||||
filters.matches(name)
|
||||
}
|
||||
|
||||
fn version_spec(s: &str) -> deno_semver::VersionReq {
|
||||
deno_semver::VersionReq::parse_from_specifier(s).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_glob() {
|
||||
assert!(matches_filters(["foo*"], "foo"));
|
||||
assert!(matches_filters(["foo*"], "foobar"));
|
||||
assert!(!matches_filters(["foo*"], "barfoo"));
|
||||
|
||||
assert!(matches_filters(["*foo"], "foo"));
|
||||
assert!(matches_filters(["*foo"], "barfoo"));
|
||||
assert!(!matches_filters(["*foo"], "foobar"));
|
||||
|
||||
assert!(matches_filters(["@scope/foo*"], "@scope/foobar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_glob_with_version() {
|
||||
assert!(matches_filters(["foo*@1"], "foo",));
|
||||
assert!(matches_filters(["foo*@1"], "foobar",));
|
||||
assert!(matches_filters(["foo*@1"], "foo-bar",));
|
||||
assert!(!matches_filters(["foo*@1"], "barfoo",));
|
||||
assert!(matches_filters(["@scope/*@1"], "@scope/foo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_exclude() {
|
||||
assert!(!matches_filters(["!foo*"], "foo"));
|
||||
assert!(!matches_filters(["!foo*"], "foobar"));
|
||||
assert!(matches_filters(["!foo*"], "barfoo"));
|
||||
|
||||
assert!(!matches_filters(["!*foo"], "foo"));
|
||||
assert!(!matches_filters(["!*foo"], "barfoo"));
|
||||
assert!(matches_filters(["!*foo"], "foobar"));
|
||||
|
||||
assert!(!matches_filters(["!@scope/foo*"], "@scope/foobar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_globs() {
|
||||
assert!(matches_filters(["foo*", "bar*"], "foo"));
|
||||
assert!(matches_filters(["foo*", "bar*"], "bar"));
|
||||
assert!(!matches_filters(["foo*", "bar*"], "baz"));
|
||||
|
||||
assert!(matches_filters(["foo*", "!bar*"], "foo"));
|
||||
assert!(!matches_filters(["foo*", "!bar*"], "bar"));
|
||||
assert!(matches_filters(["foo*", "!bar*"], "foobar"));
|
||||
assert!(!matches_filters(["foo*", "!*bar"], "foobar"));
|
||||
assert!(!matches_filters(["foo*", "!*bar"], "baz"));
|
||||
|
||||
let filters =
|
||||
super::FilterSet::from_filter_strings(["foo*@1", "bar*@2"]).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
filters.matching_filter("foo").version_spec().cloned(),
|
||||
Some(version_spec("1"))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
filters.matching_filter("bar").version_spec().cloned(),
|
||||
Some(version_spec("2"))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -124,7 +124,8 @@ async fn run_with_watch(
|
|||
!watch_flags.no_clear_screen,
|
||||
),
|
||||
WatcherRestartMode::Automatic,
|
||||
move |flags, watcher_communicator, _changed_paths| {
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
|
|
|
@ -151,7 +151,8 @@ async fn serve_with_watch(
|
|||
!watch_flags.no_clear_screen,
|
||||
),
|
||||
WatcherRestartMode::Automatic,
|
||||
move |flags, watcher_communicator, _changed_paths| {
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
|
|
|
@ -1,23 +1,33 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::deno_json::Task;
|
||||
use deno_config::workspace::FolderConfigs;
|
||||
use deno_config::workspace::TaskDefinition;
|
||||
use deno_config::workspace::TaskOrScript;
|
||||
use deno_config::workspace::WorkspaceDirectory;
|
||||
use deno_config::workspace::WorkspaceMemberTasksConfig;
|
||||
use deno_config::workspace::WorkspaceTasksConfig;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::stream::futures_unordered;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::url::Url;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use indexmap::IndexMap;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
|
@ -28,6 +38,12 @@ use crate::npm::CliNpmResolver;
|
|||
use crate::task_runner;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct PackageTaskInfo {
|
||||
matched_tasks: Vec<String>,
|
||||
tasks_config: WorkspaceTasksConfig,
|
||||
}
|
||||
|
||||
pub async fn execute_script(
|
||||
flags: Arc<Flags>,
|
||||
task_flags: TaskFlags,
|
||||
|
@ -35,7 +51,7 @@ pub async fn execute_script(
|
|||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let start_dir = &cli_options.start_dir;
|
||||
if !start_dir.has_deno_or_pkg_json() {
|
||||
if !start_dir.has_deno_or_pkg_json() && !task_flags.eval {
|
||||
bail!("deno task couldn't find deno.json(c). See https://docs.deno.com/go/config")
|
||||
}
|
||||
let force_use_pkg_json =
|
||||
|
@ -48,60 +64,383 @@ pub async fn execute_script(
|
|||
v == "1"
|
||||
})
|
||||
.unwrap_or(false);
|
||||
let tasks_config = start_dir.to_tasks_config()?;
|
||||
let tasks_config = if force_use_pkg_json {
|
||||
tasks_config.with_only_pkg_json()
|
||||
} else {
|
||||
tasks_config
|
||||
};
|
||||
|
||||
let task_name = match &task_flags.task {
|
||||
Some(task) => task,
|
||||
None => {
|
||||
fn arg_to_regex(input: &str) -> Result<regex::Regex, regex::Error> {
|
||||
let mut regex_str = regex::escape(input);
|
||||
regex_str = regex_str.replace("\\*", ".*");
|
||||
|
||||
Regex::new(®ex_str)
|
||||
}
|
||||
|
||||
let packages_task_configs: Vec<PackageTaskInfo> = if let Some(filter) =
|
||||
&task_flags.filter
|
||||
{
|
||||
let task_name = task_flags.task.as_ref().unwrap();
|
||||
|
||||
// Filter based on package name
|
||||
let package_regex = arg_to_regex(filter)?;
|
||||
let task_regex = arg_to_regex(task_name)?;
|
||||
|
||||
let mut packages_task_info: Vec<PackageTaskInfo> = vec![];
|
||||
|
||||
fn matches_package(
|
||||
config: &FolderConfigs,
|
||||
force_use_pkg_json: bool,
|
||||
regex: &Regex,
|
||||
) -> bool {
|
||||
if !force_use_pkg_json {
|
||||
if let Some(deno_json) = &config.deno_json {
|
||||
if let Some(name) = &deno_json.json.name {
|
||||
if regex.is_match(name) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(package_json) = &config.pkg_json {
|
||||
if let Some(name) = &package_json.name {
|
||||
if regex.is_match(name) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
let workspace = cli_options.workspace();
|
||||
for folder in workspace.config_folders() {
|
||||
if !matches_package(folder.1, force_use_pkg_json, &package_regex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let member_dir = workspace.resolve_member_dir(folder.0);
|
||||
let mut tasks_config = member_dir.to_tasks_config()?;
|
||||
if force_use_pkg_json {
|
||||
tasks_config = tasks_config.with_only_pkg_json();
|
||||
}
|
||||
|
||||
// Any of the matched tasks could be a child task of another matched
|
||||
// one. Therefore we need to filter these out to ensure that every
|
||||
// task is only run once.
|
||||
let mut matched: HashSet<String> = HashSet::new();
|
||||
let mut visited: HashSet<String> = HashSet::new();
|
||||
|
||||
fn visit_task(
|
||||
tasks_config: &WorkspaceTasksConfig,
|
||||
visited: &mut HashSet<String>,
|
||||
name: &str,
|
||||
) {
|
||||
if visited.contains(name) {
|
||||
return;
|
||||
}
|
||||
|
||||
visited.insert(name.to_string());
|
||||
|
||||
if let Some((_, TaskOrScript::Task(_, task))) = &tasks_config.task(name)
|
||||
{
|
||||
for dep in &task.dependencies {
|
||||
visit_task(tasks_config, visited, dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Match tasks in deno.json
|
||||
for name in tasks_config.task_names() {
|
||||
if task_regex.is_match(name) && !visited.contains(name) {
|
||||
matched.insert(name.to_string());
|
||||
visit_task(&tasks_config, &mut visited, name);
|
||||
}
|
||||
}
|
||||
|
||||
packages_task_info.push(PackageTaskInfo {
|
||||
matched_tasks: matched
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
tasks_config,
|
||||
});
|
||||
}
|
||||
|
||||
// Logging every task definition would be too spammy. Pnpm only
|
||||
// logs a simple message too.
|
||||
if packages_task_info
|
||||
.iter()
|
||||
.all(|config| config.matched_tasks.is_empty())
|
||||
{
|
||||
log::warn!(
|
||||
"{}",
|
||||
colors::red(format!(
|
||||
"No matching task or script '{}' found in selected packages.",
|
||||
task_name
|
||||
))
|
||||
);
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// FIXME: Sort packages topologically
|
||||
//
|
||||
|
||||
packages_task_info
|
||||
} else {
|
||||
let mut tasks_config = start_dir.to_tasks_config()?;
|
||||
|
||||
if force_use_pkg_json {
|
||||
tasks_config = tasks_config.with_only_pkg_json()
|
||||
}
|
||||
|
||||
let Some(task_name) = &task_flags.task else {
|
||||
print_available_tasks(
|
||||
&mut std::io::stdout(),
|
||||
&cli_options.start_dir,
|
||||
&tasks_config,
|
||||
)?;
|
||||
return Ok(0);
|
||||
}
|
||||
};
|
||||
|
||||
vec![PackageTaskInfo {
|
||||
tasks_config,
|
||||
matched_tasks: vec![task_name.to_string()],
|
||||
}]
|
||||
};
|
||||
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let node_resolver = factory.node_resolver().await?;
|
||||
let env_vars = task_runner::real_env_vars();
|
||||
|
||||
match tasks_config.task(task_name) {
|
||||
Some((dir_url, task_or_script)) => match task_or_script {
|
||||
TaskOrScript::Task(_tasks, script) => {
|
||||
let cwd = match task_flags.cwd {
|
||||
let no_of_concurrent_tasks = if let Ok(value) = std::env::var("DENO_JOBS") {
|
||||
value.parse::<NonZeroUsize>().ok()
|
||||
} else {
|
||||
std::thread::available_parallelism().ok()
|
||||
}
|
||||
.unwrap_or_else(|| NonZeroUsize::new(2).unwrap());
|
||||
|
||||
let task_runner = TaskRunner {
|
||||
task_flags: &task_flags,
|
||||
npm_resolver: npm_resolver.as_ref(),
|
||||
node_resolver: node_resolver.as_ref(),
|
||||
env_vars,
|
||||
cli_options,
|
||||
concurrency: no_of_concurrent_tasks.into(),
|
||||
};
|
||||
|
||||
if task_flags.eval {
|
||||
return task_runner
|
||||
.run_deno_task(
|
||||
&Url::from_directory_path(cli_options.initial_cwd()).unwrap(),
|
||||
"",
|
||||
&TaskDefinition {
|
||||
command: task_flags.task.as_ref().unwrap().to_string(),
|
||||
dependencies: vec![],
|
||||
description: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
for task_config in &packages_task_configs {
|
||||
let exit_code = task_runner.run_tasks(task_config).await?;
|
||||
if exit_code > 0 {
|
||||
return Ok(exit_code);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
struct RunSingleOptions<'a> {
|
||||
task_name: &'a str,
|
||||
script: &'a str,
|
||||
cwd: &'a Path,
|
||||
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||
}
|
||||
|
||||
struct TaskRunner<'a> {
|
||||
task_flags: &'a TaskFlags,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
node_resolver: &'a NodeResolver,
|
||||
env_vars: HashMap<String, String>,
|
||||
cli_options: &'a CliOptions,
|
||||
concurrency: usize,
|
||||
}
|
||||
|
||||
impl<'a> TaskRunner<'a> {
|
||||
pub async fn run_tasks(
|
||||
&self,
|
||||
pkg_tasks_config: &PackageTaskInfo,
|
||||
) -> Result<i32, deno_core::anyhow::Error> {
|
||||
match sort_tasks_topo(pkg_tasks_config) {
|
||||
Ok(sorted) => self.run_tasks_in_parallel(sorted).await,
|
||||
Err(err) => match err {
|
||||
TaskError::NotFound(name) => {
|
||||
if self.task_flags.is_run {
|
||||
return Err(anyhow!("Task not found: {}", name));
|
||||
}
|
||||
|
||||
log::error!("Task not found: {}", name);
|
||||
if log::log_enabled!(log::Level::Error) {
|
||||
self.print_available_tasks(&pkg_tasks_config.tasks_config)?;
|
||||
}
|
||||
Ok(1)
|
||||
}
|
||||
TaskError::TaskDepCycle { path } => {
|
||||
log::error!("Task cycle detected: {}", path.join(" -> "));
|
||||
Ok(1)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_available_tasks(
|
||||
&self,
|
||||
tasks_config: &WorkspaceTasksConfig,
|
||||
) -> Result<(), std::io::Error> {
|
||||
print_available_tasks(
|
||||
&mut std::io::stderr(),
|
||||
&self.cli_options.start_dir,
|
||||
tasks_config,
|
||||
)
|
||||
}
|
||||
|
||||
async fn run_tasks_in_parallel(
|
||||
&self,
|
||||
tasks: Vec<ResolvedTask<'a>>,
|
||||
) -> Result<i32, deno_core::anyhow::Error> {
|
||||
struct PendingTasksContext<'a> {
|
||||
completed: HashSet<usize>,
|
||||
running: HashSet<usize>,
|
||||
tasks: &'a [ResolvedTask<'a>],
|
||||
}
|
||||
|
||||
impl<'a> PendingTasksContext<'a> {
|
||||
fn has_remaining_tasks(&self) -> bool {
|
||||
self.completed.len() < self.tasks.len()
|
||||
}
|
||||
|
||||
fn mark_complete(&mut self, task: &ResolvedTask) {
|
||||
self.running.remove(&task.id);
|
||||
self.completed.insert(task.id);
|
||||
}
|
||||
|
||||
fn get_next_task<'b>(
|
||||
&mut self,
|
||||
runner: &'b TaskRunner<'b>,
|
||||
) -> Option<
|
||||
LocalBoxFuture<'b, Result<(i32, &'a ResolvedTask<'a>), AnyError>>,
|
||||
>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
for task in self.tasks.iter() {
|
||||
if self.completed.contains(&task.id)
|
||||
|| self.running.contains(&task.id)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let should_run = task
|
||||
.dependencies
|
||||
.iter()
|
||||
.all(|dep_id| self.completed.contains(dep_id));
|
||||
if !should_run {
|
||||
continue;
|
||||
}
|
||||
|
||||
self.running.insert(task.id);
|
||||
return Some(
|
||||
async move {
|
||||
match task.task_or_script {
|
||||
TaskOrScript::Task(_, def) => {
|
||||
runner.run_deno_task(task.folder_url, task.name, def).await
|
||||
}
|
||||
TaskOrScript::Script(scripts, _) => {
|
||||
runner
|
||||
.run_npm_script(task.folder_url, task.name, scripts)
|
||||
.await
|
||||
}
|
||||
}
|
||||
.map(|exit_code| (exit_code, task))
|
||||
}
|
||||
.boxed_local(),
|
||||
);
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
let mut context = PendingTasksContext {
|
||||
completed: HashSet::with_capacity(tasks.len()),
|
||||
running: HashSet::with_capacity(self.concurrency),
|
||||
tasks: &tasks,
|
||||
};
|
||||
|
||||
let mut queue = futures_unordered::FuturesUnordered::new();
|
||||
|
||||
while context.has_remaining_tasks() {
|
||||
while queue.len() < self.concurrency {
|
||||
if let Some(task) = context.get_next_task(self) {
|
||||
queue.push(task);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If queue is empty at this point, then there are no more tasks in the queue.
|
||||
let Some(result) = queue.next().await else {
|
||||
debug_assert_eq!(context.tasks.len(), 0);
|
||||
break;
|
||||
};
|
||||
|
||||
let (exit_code, name) = result?;
|
||||
if exit_code > 0 {
|
||||
return Ok(exit_code);
|
||||
}
|
||||
|
||||
context.mark_complete(name);
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
}
|
||||
|
||||
pub async fn run_deno_task(
|
||||
&self,
|
||||
dir_url: &Url,
|
||||
task_name: &str,
|
||||
definition: &TaskDefinition,
|
||||
) -> Result<i32, deno_core::anyhow::Error> {
|
||||
let cwd = match &self.task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))
|
||||
.context("failed canonicalizing --cwd")?,
|
||||
None => normalize_path(dir_url.to_file_path().unwrap()),
|
||||
};
|
||||
|
||||
let custom_commands = task_runner::resolve_custom_commands(
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver,
|
||||
self.npm_resolver,
|
||||
self.node_resolver,
|
||||
)?;
|
||||
run_task(RunTaskOptions {
|
||||
self
|
||||
.run_single(RunSingleOptions {
|
||||
task_name,
|
||||
script,
|
||||
script: &definition.command,
|
||||
cwd: &cwd,
|
||||
env_vars,
|
||||
custom_commands,
|
||||
npm_resolver: npm_resolver.as_ref(),
|
||||
cli_options,
|
||||
})
|
||||
.await
|
||||
}
|
||||
TaskOrScript::Script(scripts, _script) => {
|
||||
|
||||
pub async fn run_npm_script(
|
||||
&self,
|
||||
dir_url: &Url,
|
||||
task_name: &str,
|
||||
scripts: &IndexMap<String, String>,
|
||||
) -> Result<i32, deno_core::anyhow::Error> {
|
||||
// ensure the npm packages are installed if using a managed resolver
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
|
||||
let cwd = match task_flags.cwd {
|
||||
let cwd = match &self.task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))?,
|
||||
None => normalize_path(dir_url.to_file_path().unwrap()),
|
||||
};
|
||||
|
@ -111,23 +450,21 @@ pub async fn execute_script(
|
|||
// dealing with package.json here and not deno.json
|
||||
let task_names = vec![
|
||||
format!("pre{}", task_name),
|
||||
task_name.clone(),
|
||||
task_name.to_string(),
|
||||
format!("post{}", task_name),
|
||||
];
|
||||
let custom_commands = task_runner::resolve_custom_commands(
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver,
|
||||
self.npm_resolver,
|
||||
self.node_resolver,
|
||||
)?;
|
||||
for task_name in &task_names {
|
||||
if let Some(script) = scripts.get(task_name) {
|
||||
let exit_code = run_task(RunTaskOptions {
|
||||
let exit_code = self
|
||||
.run_single(RunSingleOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
env_vars: env_vars.clone(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
npm_resolver: npm_resolver.as_ref(),
|
||||
cli_options,
|
||||
})
|
||||
.await?;
|
||||
if exit_code > 0 {
|
||||
|
@ -138,48 +475,21 @@ pub async fn execute_script(
|
|||
|
||||
Ok(0)
|
||||
}
|
||||
},
|
||||
None => {
|
||||
if task_flags.is_run {
|
||||
return Err(anyhow!("Task not found: {}", task_name));
|
||||
}
|
||||
log::error!("Task not found: {}", task_name);
|
||||
if log::log_enabled!(log::Level::Error) {
|
||||
print_available_tasks(
|
||||
&mut std::io::stderr(),
|
||||
&cli_options.start_dir,
|
||||
&tasks_config,
|
||||
)?;
|
||||
}
|
||||
Ok(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RunTaskOptions<'a> {
|
||||
task_name: &'a str,
|
||||
script: &'a str,
|
||||
cwd: &'a Path,
|
||||
env_vars: HashMap<String, String>,
|
||||
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
cli_options: &'a CliOptions,
|
||||
}
|
||||
|
||||
async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
||||
let RunTaskOptions {
|
||||
async fn run_single(
|
||||
&self,
|
||||
opts: RunSingleOptions<'_>,
|
||||
) -> Result<i32, AnyError> {
|
||||
let RunSingleOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd,
|
||||
env_vars,
|
||||
custom_commands,
|
||||
npm_resolver,
|
||||
cli_options,
|
||||
} = opts;
|
||||
|
||||
output_task(
|
||||
opts.task_name,
|
||||
&task_runner::get_script_with_args(script, cli_options.argv()),
|
||||
&task_runner::get_script_with_args(script, self.cli_options.argv()),
|
||||
);
|
||||
|
||||
Ok(
|
||||
|
@ -187,16 +497,130 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
|||
task_name,
|
||||
script,
|
||||
cwd,
|
||||
env_vars,
|
||||
env_vars: self.env_vars.clone(),
|
||||
custom_commands,
|
||||
init_cwd: opts.cli_options.initial_cwd(),
|
||||
argv: cli_options.argv(),
|
||||
root_node_modules_dir: npm_resolver.root_node_modules_path(),
|
||||
init_cwd: self.cli_options.initial_cwd(),
|
||||
argv: self.cli_options.argv(),
|
||||
root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
|
||||
stdio: None,
|
||||
})
|
||||
.await?
|
||||
.exit_code,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum TaskError {
|
||||
NotFound(String),
|
||||
TaskDepCycle { path: Vec<String> },
|
||||
}
|
||||
|
||||
struct ResolvedTask<'a> {
|
||||
id: usize,
|
||||
name: &'a str,
|
||||
folder_url: &'a Url,
|
||||
task_or_script: TaskOrScript<'a>,
|
||||
dependencies: Vec<usize>,
|
||||
}
|
||||
|
||||
fn sort_tasks_topo<'a>(
|
||||
pkg_task_config: &'a PackageTaskInfo,
|
||||
) -> Result<Vec<ResolvedTask<'a>>, TaskError> {
|
||||
trait TasksConfig {
|
||||
fn task(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> Option<(&Url, TaskOrScript, &dyn TasksConfig)>;
|
||||
}
|
||||
|
||||
impl TasksConfig for WorkspaceTasksConfig {
|
||||
fn task(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> Option<(&Url, TaskOrScript, &dyn TasksConfig)> {
|
||||
if let Some(member) = &self.member {
|
||||
if let Some((dir_url, task_or_script)) = member.task(name) {
|
||||
return Some((dir_url, task_or_script, self as &dyn TasksConfig));
|
||||
}
|
||||
}
|
||||
if let Some(root) = &self.root {
|
||||
if let Some((dir_url, task_or_script)) = root.task(name) {
|
||||
// switch to only using the root tasks for the dependencies
|
||||
return Some((dir_url, task_or_script, root as &dyn TasksConfig));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl TasksConfig for WorkspaceMemberTasksConfig {
|
||||
fn task(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> Option<(&Url, TaskOrScript, &dyn TasksConfig)> {
|
||||
self.task(name).map(|(dir_url, task_or_script)| {
|
||||
(dir_url, task_or_script, self as &dyn TasksConfig)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_visit<'a>(
|
||||
name: &'a str,
|
||||
sorted: &mut Vec<ResolvedTask<'a>>,
|
||||
mut path: Vec<(&'a Url, &'a str)>,
|
||||
tasks_config: &'a dyn TasksConfig,
|
||||
) -> Result<usize, TaskError> {
|
||||
let Some((folder_url, task_or_script, tasks_config)) =
|
||||
tasks_config.task(name)
|
||||
else {
|
||||
return Err(TaskError::NotFound(name.to_string()));
|
||||
};
|
||||
|
||||
if let Some(existing_task) = sorted
|
||||
.iter()
|
||||
.find(|task| task.name == name && task.folder_url == folder_url)
|
||||
{
|
||||
// already exists
|
||||
return Ok(existing_task.id);
|
||||
}
|
||||
|
||||
if path.contains(&(folder_url, name)) {
|
||||
path.push((folder_url, name));
|
||||
return Err(TaskError::TaskDepCycle {
|
||||
path: path.iter().map(|(_, s)| s.to_string()).collect(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut dependencies: Vec<usize> = Vec::new();
|
||||
if let TaskOrScript::Task(_, task) = task_or_script {
|
||||
dependencies.reserve(task.dependencies.len());
|
||||
for dep in &task.dependencies {
|
||||
let mut path = path.clone();
|
||||
path.push((folder_url, name));
|
||||
dependencies.push(sort_visit(dep, sorted, path, tasks_config)?);
|
||||
}
|
||||
}
|
||||
|
||||
let id = sorted.len();
|
||||
sorted.push(ResolvedTask {
|
||||
id,
|
||||
name,
|
||||
folder_url,
|
||||
task_or_script,
|
||||
dependencies,
|
||||
});
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
let mut sorted: Vec<ResolvedTask<'a>> = vec![];
|
||||
|
||||
for name in &pkg_task_config.matched_tasks {
|
||||
sort_visit(name, &mut sorted, Vec::new(), &pkg_task_config.tasks_config)?;
|
||||
}
|
||||
|
||||
Ok(sorted)
|
||||
}
|
||||
|
||||
fn output_task(task_name: &str, script: &str) {
|
||||
|
@ -222,78 +646,95 @@ fn print_available_tasks(
|
|||
" {}",
|
||||
colors::red("No tasks found in configuration file")
|
||||
)?;
|
||||
} else {
|
||||
let mut seen_task_names =
|
||||
HashSet::with_capacity(tasks_config.tasks_count());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
struct AvailableTaskDescription {
|
||||
is_root: bool,
|
||||
is_deno: bool,
|
||||
name: String,
|
||||
task: TaskDefinition,
|
||||
}
|
||||
let mut seen_task_names = HashSet::with_capacity(tasks_config.tasks_count());
|
||||
let mut task_descriptions = Vec::with_capacity(tasks_config.tasks_count());
|
||||
|
||||
for maybe_config in [&tasks_config.member, &tasks_config.root] {
|
||||
let Some(config) = maybe_config else {
|
||||
continue;
|
||||
};
|
||||
for (is_root, is_deno, (key, task)) in config
|
||||
.deno_json
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
|
||||
if let Some(config) = config.deno_json.as_ref() {
|
||||
let is_root = !is_cwd_root_dir
|
||||
&& config.folder_url
|
||||
== *workspace_dir.workspace.root_dir().as_ref();
|
||||
config
|
||||
.tasks
|
||||
.iter()
|
||||
.map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t))))
|
||||
})
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.chain(
|
||||
config
|
||||
.package_json
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
let is_root = !is_cwd_root_dir
|
||||
&& config.folder_url
|
||||
== *workspace_dir.workspace.root_dir().as_ref();
|
||||
config.tasks.iter().map(move |(k, v)| {
|
||||
(is_root, false, (k, Cow::Owned(Task::Definition(v.clone()))))
|
||||
})
|
||||
})
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
)
|
||||
{
|
||||
if !seen_task_names.insert(key) {
|
||||
&& config.folder_url == *workspace_dir.workspace.root_dir().as_ref();
|
||||
|
||||
for (name, definition) in &config.tasks {
|
||||
if !seen_task_names.insert(name) {
|
||||
continue; // already seen
|
||||
}
|
||||
task_descriptions.push(AvailableTaskDescription {
|
||||
is_root,
|
||||
is_deno: true,
|
||||
name: name.to_string(),
|
||||
task: definition.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(config) = config.package_json.as_ref() {
|
||||
let is_root = !is_cwd_root_dir
|
||||
&& config.folder_url == *workspace_dir.workspace.root_dir().as_ref();
|
||||
for (name, script) in &config.tasks {
|
||||
if !seen_task_names.insert(name) {
|
||||
continue; // already seen
|
||||
}
|
||||
|
||||
task_descriptions.push(AvailableTaskDescription {
|
||||
is_root,
|
||||
is_deno: false,
|
||||
name: name.to_string(),
|
||||
task: deno_config::deno_json::TaskDefinition {
|
||||
command: script.to_string(),
|
||||
dependencies: vec![],
|
||||
description: None,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for desc in task_descriptions {
|
||||
writeln!(
|
||||
writer,
|
||||
"- {}{}",
|
||||
colors::cyan(key),
|
||||
if is_root {
|
||||
if is_deno {
|
||||
colors::cyan(desc.name),
|
||||
if desc.is_root {
|
||||
if desc.is_deno {
|
||||
format!(" {}", colors::italic_gray("(workspace)"))
|
||||
} else {
|
||||
format!(" {}", colors::italic_gray("(workspace package.json)"))
|
||||
}
|
||||
} else if is_deno {
|
||||
} else if desc.is_deno {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(" {}", colors::italic_gray("(package.json)"))
|
||||
}
|
||||
)?;
|
||||
let definition = match task.as_ref() {
|
||||
Task::Definition(definition) => definition,
|
||||
Task::Commented { definition, .. } => definition,
|
||||
};
|
||||
if let Task::Commented { comments, .. } = task.as_ref() {
|
||||
if let Some(description) = &desc.task.description {
|
||||
let slash_slash = colors::italic_gray("//");
|
||||
for comment in comments {
|
||||
writeln!(
|
||||
writer,
|
||||
" {slash_slash} {}",
|
||||
colors::italic_gray(comment)
|
||||
colors::italic_gray(description)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(writer, " {definition}")?;
|
||||
}
|
||||
writeln!(writer, " {}", desc.task.command)?;
|
||||
if !desc.task.dependencies.is_empty() {
|
||||
writeln!(
|
||||
writer,
|
||||
" {} {}",
|
||||
colors::gray("depends on:"),
|
||||
colors::cyan(desc.task.dependencies.join(", "))
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1357,6 +1357,7 @@ pub async fn report_tests(
|
|||
if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) {
|
||||
eprint!("Test reporter failed to flush: {}", err)
|
||||
}
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
std::process::exit(130);
|
||||
}
|
||||
}
|
||||
|
@ -1642,6 +1643,7 @@ pub async fn run_tests_with_watch(
|
|||
loop {
|
||||
signal::ctrl_c().await.unwrap();
|
||||
if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) {
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
std::process::exit(130);
|
||||
}
|
||||
}
|
||||
|
@ -1659,6 +1661,7 @@ pub async fn run_tests_with_watch(
|
|||
),
|
||||
move |flags, watcher_communicator, changed_paths| {
|
||||
let test_flags = test_flags.clone();
|
||||
watcher_communicator.show_path_changed(changed_paths.clone());
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags_for_watcher(
|
||||
flags,
|
||||
|
|
|
@ -540,7 +540,7 @@ pub async fn upgrade(
|
|||
let Some(archive_data) = download_package(&client, download_url).await?
|
||||
else {
|
||||
log::error!("Download could not be found, aborting");
|
||||
std::process::exit(1)
|
||||
deno_runtime::exit(1)
|
||||
};
|
||||
|
||||
log::info!(
|
||||
|
|
|
@ -450,6 +450,12 @@ delete Object.prototype.__proto__;
|
|||
// We specify the resolution mode to be CommonJS for some npm files and this
|
||||
// diagnostic gets generated even though we're using custom module resolution.
|
||||
1452,
|
||||
// Module '...' cannot be imported using this construct. The specifier only resolves to an
|
||||
// ES module, which cannot be imported with 'require'.
|
||||
1471,
|
||||
// TS1479: The current file is a CommonJS module whose imports will produce 'require' calls;
|
||||
// however, the referenced file is an ECMAScript module and cannot be imported with 'require'.
|
||||
1479,
|
||||
// TS2306: File '.../index.d.ts' is not a module.
|
||||
// We get this for `x-typescript-types` declaration files which don't export
|
||||
// anything. We prefer to treat these as modules with no exports.
|
||||
|
|
6
cli/tsc/dts/lib.deno.ns.d.ts
vendored
6
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -4535,7 +4535,7 @@ declare namespace Deno {
|
|||
/** The object that is returned from a {@linkcode Deno.upgradeWebSocket}
|
||||
* request.
|
||||
*
|
||||
* @category Web Sockets */
|
||||
* @category WebSockets */
|
||||
export interface WebSocketUpgrade {
|
||||
/** The response object that represents the HTTP response to the client,
|
||||
* which should be used to the {@linkcode RequestEvent} `.respondWith()` for
|
||||
|
@ -4549,7 +4549,7 @@ declare namespace Deno {
|
|||
/** Options which can be set when performing a
|
||||
* {@linkcode Deno.upgradeWebSocket} upgrade of a {@linkcode Request}
|
||||
*
|
||||
* @category Web Sockets */
|
||||
* @category WebSockets */
|
||||
export interface UpgradeWebSocketOptions {
|
||||
/** Sets the `.protocol` property on the client side web socket to the
|
||||
* value provided here, which should be one of the strings specified in the
|
||||
|
@ -4597,7 +4597,7 @@ declare namespace Deno {
|
|||
* This operation does not yet consume the request or open the websocket. This
|
||||
* only happens once the returned response has been passed to `respondWith()`.
|
||||
*
|
||||
* @category Web Sockets
|
||||
* @category WebSockets
|
||||
*/
|
||||
export function upgradeWebSocket(
|
||||
request: Request,
|
||||
|
|
58
cli/tsc/dts/lib.deno.shared_globals.d.ts
vendored
58
cli/tsc/dts/lib.deno.shared_globals.d.ts
vendored
|
@ -15,14 +15,14 @@
|
|||
/// <reference lib="deno.crypto" />
|
||||
/// <reference lib="deno.ns" />
|
||||
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
declare namespace WebAssembly {
|
||||
/**
|
||||
* The `WebAssembly.CompileError` object indicates an error during WebAssembly decoding or validation.
|
||||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/CompileError)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class CompileError extends Error {
|
||||
/** Creates a new `WebAssembly.CompileError` object. */
|
||||
|
@ -36,7 +36,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Global)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class Global {
|
||||
/** Creates a new `Global` object. */
|
||||
|
@ -59,7 +59,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Instance)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class Instance {
|
||||
/** Creates a new Instance object. */
|
||||
|
@ -79,7 +79,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/LinkError)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class LinkError extends Error {
|
||||
/** Creates a new WebAssembly.LinkError object. */
|
||||
|
@ -95,7 +95,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Memory)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class Memory {
|
||||
/** Creates a new `Memory` object. */
|
||||
|
@ -117,7 +117,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Module)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class Module {
|
||||
/** Creates a new `Module` object. */
|
||||
|
@ -145,7 +145,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/RuntimeError)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class RuntimeError extends Error {
|
||||
/** Creates a new `WebAssembly.RuntimeError` object. */
|
||||
|
@ -160,7 +160,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Table)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export class Table {
|
||||
/** Creates a new `Table` object. */
|
||||
|
@ -182,7 +182,7 @@ declare namespace WebAssembly {
|
|||
/** The `GlobalDescriptor` describes the options you can pass to
|
||||
* `new WebAssembly.Global()`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface GlobalDescriptor {
|
||||
mutable?: boolean;
|
||||
|
@ -192,7 +192,7 @@ declare namespace WebAssembly {
|
|||
/** The `MemoryDescriptor` describes the options you can pass to
|
||||
* `new WebAssembly.Memory()`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface MemoryDescriptor {
|
||||
initial: number;
|
||||
|
@ -203,7 +203,7 @@ declare namespace WebAssembly {
|
|||
/** A `ModuleExportDescriptor` is the description of a declared export in a
|
||||
* `WebAssembly.Module`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface ModuleExportDescriptor {
|
||||
kind: ImportExportKind;
|
||||
|
@ -213,7 +213,7 @@ declare namespace WebAssembly {
|
|||
/** A `ModuleImportDescriptor` is the description of a declared import in a
|
||||
* `WebAssembly.Module`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface ModuleImportDescriptor {
|
||||
kind: ImportExportKind;
|
||||
|
@ -224,7 +224,7 @@ declare namespace WebAssembly {
|
|||
/** The `TableDescriptor` describes the options you can pass to
|
||||
* `new WebAssembly.Table()`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface TableDescriptor {
|
||||
element: TableKind;
|
||||
|
@ -234,7 +234,7 @@ declare namespace WebAssembly {
|
|||
|
||||
/** The value returned from `WebAssembly.instantiate`.
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export interface WebAssemblyInstantiatedSource {
|
||||
/* A `WebAssembly.Instance` object that contains all the exported WebAssembly functions. */
|
||||
|
@ -247,21 +247,21 @@ declare namespace WebAssembly {
|
|||
module: Module;
|
||||
}
|
||||
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type ImportExportKind = "function" | "global" | "memory" | "table";
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type TableKind = "anyfunc";
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type ValueType = "f32" | "f64" | "i32" | "i64";
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type ExportValue = Function | Global | Memory | Table;
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type Exports = Record<string, ExportValue>;
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type ImportValue = ExportValue | number;
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type ModuleImports = Record<string, ImportValue>;
|
||||
/** @category WASM */
|
||||
/** @category Wasm */
|
||||
export type Imports = Record<string, ModuleImports>;
|
||||
|
||||
/**
|
||||
|
@ -272,7 +272,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/compile)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function compile(bytes: BufferSource): Promise<Module>;
|
||||
|
||||
|
@ -284,7 +284,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/compileStreaming)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function compileStreaming(
|
||||
source: Response | Promise<Response>,
|
||||
|
@ -301,7 +301,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/instantiate)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function instantiate(
|
||||
bytes: BufferSource,
|
||||
|
@ -318,7 +318,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/instantiate)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function instantiate(
|
||||
moduleObject: Module,
|
||||
|
@ -332,7 +332,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/instantiateStreaming)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function instantiateStreaming(
|
||||
response: Response | PromiseLike<Response>,
|
||||
|
@ -346,7 +346,7 @@ declare namespace WebAssembly {
|
|||
*
|
||||
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/validate)
|
||||
*
|
||||
* @category WASM
|
||||
* @category Wasm
|
||||
*/
|
||||
export function validate(bytes: BufferSource): boolean;
|
||||
}
|
||||
|
|
149
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
149
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
|
@ -1180,6 +1180,32 @@ declare namespace Deno {
|
|||
...values: unknown[]
|
||||
): Displayable;
|
||||
|
||||
/**
|
||||
* Display a JPG or PNG image.
|
||||
*
|
||||
* ```
|
||||
* Deno.jupyter.image("./cat.jpg");
|
||||
* Deno.jupyter.image("./dog.png");
|
||||
* ```
|
||||
*
|
||||
* @category Jupyter
|
||||
* @experimental
|
||||
*/
|
||||
export function image(path: string): Displayable;
|
||||
|
||||
/**
|
||||
* Display a JPG or PNG image.
|
||||
*
|
||||
* ```
|
||||
* const img = Deno.readFileSync("./cat.jpg");
|
||||
* Deno.jupyter.image(img);
|
||||
* ```
|
||||
*
|
||||
* @category Jupyter
|
||||
* @experimental
|
||||
*/
|
||||
export function image(data: Uint8Array): Displayable;
|
||||
|
||||
/**
|
||||
* Format an object for displaying in Deno
|
||||
*
|
||||
|
@ -1226,80 +1252,53 @@ declare namespace Deno {
|
|||
}
|
||||
|
||||
/**
|
||||
* **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* APIs for working with the OpenTelemetry observability framework. Deno can
|
||||
* export traces, metrics, and logs to OpenTelemetry compatible backends via
|
||||
* the OTLP protocol.
|
||||
*
|
||||
* Deno automatically instruments the runtime with OpenTelemetry traces and
|
||||
* metrics. This data is exported via OTLP to OpenTelemetry compatible
|
||||
* backends. User logs from the `console` API are exported as OpenTelemetry
|
||||
* logs via OTLP.
|
||||
*
|
||||
* User code can also create custom traces, metrics, and logs using the
|
||||
* OpenTelemetry API. This is done using the official OpenTelemetry package
|
||||
* for JavaScript:
|
||||
* [`npm:@opentelemetry/api`](https://opentelemetry.io/docs/languages/js/).
|
||||
* Deno integrates with this package to provide trace context propagation
|
||||
* between native Deno APIs (like `Deno.serve` or `fetch`) and custom user
|
||||
* code. Deno also provides APIs that allow exporting custom telemetry data
|
||||
* via the same OTLP channel used by the Deno runtime. This is done using the
|
||||
* [`jsr:@deno/otel`](https://jsr.io/@deno/otel) package.
|
||||
*
|
||||
* @example Using OpenTelemetry API to create custom traces
|
||||
* ```ts,ignore
|
||||
* import { trace } from "npm:@opentelemetry/api@1";
|
||||
* import "jsr:@deno/otel@0.0.2/register";
|
||||
*
|
||||
* const tracer = trace.getTracer("example-tracer");
|
||||
*
|
||||
* async function doWork() {
|
||||
* return tracer.startActiveSpan("doWork", async (span) => {
|
||||
* span.setAttribute("key", "value");
|
||||
* await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
* span.end();
|
||||
* });
|
||||
* }
|
||||
*
|
||||
* Deno.serve(async (req) => {
|
||||
* await doWork();
|
||||
* const resp = await fetch("https://example.com");
|
||||
* return resp;
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export namespace tracing {
|
||||
/**
|
||||
* Whether tracing is enabled.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export const enabled: boolean;
|
||||
|
||||
/**
|
||||
* Allowed attribute type.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export type AttributeValue = string | number | boolean | bigint;
|
||||
|
||||
/**
|
||||
* A tracing span.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export class Span implements Disposable {
|
||||
readonly traceId: string;
|
||||
readonly spanId: string;
|
||||
readonly parentSpanId: string;
|
||||
readonly kind: string;
|
||||
readonly name: string;
|
||||
readonly startTime: number;
|
||||
readonly endTime: number;
|
||||
readonly status: null | { code: 1 } | { code: 2; message: string };
|
||||
readonly attributes: Record<string, AttributeValue>;
|
||||
readonly traceFlags: number;
|
||||
|
||||
/**
|
||||
* Construct a new Span and enter it as the "current" span.
|
||||
*/
|
||||
constructor(
|
||||
name: string,
|
||||
kind?: "internal" | "server" | "client" | "producer" | "consumer",
|
||||
);
|
||||
|
||||
/**
|
||||
* Set an attribute on this span.
|
||||
*/
|
||||
setAttribute(
|
||||
name: string,
|
||||
value: AttributeValue,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Enter this span as the "current" span.
|
||||
*/
|
||||
enter(): void;
|
||||
|
||||
/**
|
||||
* Exit this span as the "current" span and restore the previous one.
|
||||
*/
|
||||
exit(): void;
|
||||
|
||||
/**
|
||||
* End this span, and exit it as the "current" span.
|
||||
*/
|
||||
end(): void;
|
||||
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
/**
|
||||
* Get the "current" span, if one exists.
|
||||
*/
|
||||
static current(): Span | undefined | null;
|
||||
}
|
||||
|
||||
export namespace telemetry {
|
||||
/**
|
||||
* A SpanExporter compatible with OpenTelemetry.js
|
||||
* https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_sdk_trace_base.SpanExporter.html
|
||||
|
@ -1319,14 +1318,6 @@ declare namespace Deno {
|
|||
export {}; // only export exports
|
||||
}
|
||||
|
||||
/**
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export namespace metrics {
|
||||
export {}; // only export exports
|
||||
}
|
||||
|
||||
export {}; // only export exports
|
||||
}
|
||||
|
||||
|
|
|
@ -6,10 +6,10 @@ use crate::cache::FastInsecureHasher;
|
|||
use crate::cache::ModuleInfoCache;
|
||||
use crate::node;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::util::checksum;
|
||||
use crate::util::path::mapped_specifier_for_tsc;
|
||||
use crate::worker::create_isolate_create_params;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::anyhow;
|
||||
|
@ -34,6 +34,7 @@ use deno_graph::GraphKind;
|
|||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ResolutionResolved;
|
||||
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
@ -649,6 +650,10 @@ fn op_load_inner(
|
|||
media_type = MediaType::Json;
|
||||
Some(Cow::Borrowed(&*module.source))
|
||||
}
|
||||
Module::Wasm(module) => {
|
||||
media_type = MediaType::Dts;
|
||||
Some(Cow::Borrowed(&*module.source_dts))
|
||||
}
|
||||
Module::Npm(_) | Module::Node(_) => None,
|
||||
Module::External(module) => {
|
||||
// means it's Deno code importing an npm module
|
||||
|
@ -888,6 +893,9 @@ fn resolve_graph_specifier_types(
|
|||
Some(Module::Json(module)) => {
|
||||
Ok(Some((module.specifier.clone(), module.media_type)))
|
||||
}
|
||||
Some(Module::Wasm(module)) => {
|
||||
Ok(Some((module.specifier.clone(), MediaType::Dmts)))
|
||||
}
|
||||
Some(Module::Npm(module)) => {
|
||||
if let Some(npm) = &state.maybe_npm.as_ref() {
|
||||
let package_folder = npm
|
||||
|
@ -1104,6 +1112,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> {
|
|||
root_map,
|
||||
remapped_specifiers,
|
||||
)],
|
||||
create_params: create_isolate_create_params(),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
|
@ -1194,7 +1203,7 @@ mod tests {
|
|||
.context("Unable to get CWD")
|
||||
.unwrap(),
|
||||
);
|
||||
let mut op_state = OpState::new(None);
|
||||
let mut op_state = OpState::new(None, None);
|
||||
op_state.put(state);
|
||||
op_state
|
||||
}
|
||||
|
|
|
@ -73,7 +73,6 @@ impl DebouncedReceiver {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
async fn error_handler<F>(watch_future: F) -> bool
|
||||
where
|
||||
F: Future<Output = Result<(), AnyError>>,
|
||||
|
@ -84,7 +83,7 @@ where
|
|||
Some(e) => format_js_error(e),
|
||||
None => format!("{err:?}"),
|
||||
};
|
||||
eprintln!(
|
||||
log::error!(
|
||||
"{}: {}",
|
||||
colors::red_bold("error"),
|
||||
error_string.trim_start_matches("error: ")
|
||||
|
@ -128,19 +127,12 @@ impl PrintConfig {
|
|||
}
|
||||
}
|
||||
|
||||
fn create_print_after_restart_fn(
|
||||
banner: &'static str,
|
||||
clear_screen: bool,
|
||||
) -> impl Fn() {
|
||||
fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() {
|
||||
move || {
|
||||
#[allow(clippy::print_stderr)]
|
||||
if clear_screen && std::io::stderr().is_terminal() {
|
||||
eprint!("{}", CLEAR_SCREEN);
|
||||
}
|
||||
info!(
|
||||
"{} File change detected! Restarting!",
|
||||
colors::intense_blue(banner),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,7 +180,17 @@ impl WatcherCommunicator {
|
|||
}
|
||||
|
||||
pub fn print(&self, msg: String) {
|
||||
log::info!("{} {}", self.banner, msg);
|
||||
log::info!("{} {}", self.banner, colors::gray(msg));
|
||||
}
|
||||
|
||||
pub fn show_path_changed(&self, changed_paths: Option<Vec<PathBuf>>) {
|
||||
if let Some(paths) = changed_paths {
|
||||
if !paths.is_empty() {
|
||||
self.print(format!("Restarting! File change detected: {:?}", paths[0]))
|
||||
} else {
|
||||
self.print("Restarting! File change detected.".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -264,7 +266,7 @@ where
|
|||
clear_screen,
|
||||
} = print_config;
|
||||
|
||||
let print_after_restart = create_print_after_restart_fn(banner, clear_screen);
|
||||
let print_after_restart = create_print_after_restart_fn(clear_screen);
|
||||
let watcher_communicator = Arc::new(WatcherCommunicator {
|
||||
paths_to_watch_tx: paths_to_watch_tx.clone(),
|
||||
changed_paths_rx: changed_paths_rx.resubscribe(),
|
||||
|
|
|
@ -29,6 +29,7 @@ impl log::Log for CliLogger {
|
|||
// thread's state
|
||||
DrawThread::hide();
|
||||
self.0.log(record);
|
||||
deno_runtime::ops::otel::handle_log(record);
|
||||
DrawThread::show();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,16 @@ pub fn is_importable_ext(path: &Path) -> bool {
|
|||
if let Some(ext) = get_extension(path) {
|
||||
matches!(
|
||||
ext.as_str(),
|
||||
"ts" | "tsx" | "js" | "jsx" | "mjs" | "mts" | "cjs" | "cts" | "json"
|
||||
"ts"
|
||||
| "tsx"
|
||||
| "js"
|
||||
| "jsx"
|
||||
| "mjs"
|
||||
| "mts"
|
||||
| "cjs"
|
||||
| "cts"
|
||||
| "json"
|
||||
| "wasm"
|
||||
)
|
||||
} else {
|
||||
false
|
||||
|
@ -222,6 +231,7 @@ mod test {
|
|||
assert!(is_script_ext(Path::new("foo.cjs")));
|
||||
assert!(is_script_ext(Path::new("foo.cts")));
|
||||
assert!(!is_script_ext(Path::new("foo.json")));
|
||||
assert!(!is_script_ext(Path::new("foo.wasm")));
|
||||
assert!(!is_script_ext(Path::new("foo.mjsx")));
|
||||
}
|
||||
|
||||
|
@ -243,6 +253,7 @@ mod test {
|
|||
assert!(is_importable_ext(Path::new("foo.cjs")));
|
||||
assert!(is_importable_ext(Path::new("foo.cts")));
|
||||
assert!(is_importable_ext(Path::new("foo.json")));
|
||||
assert!(is_importable_ext(Path::new("foo.wasm")));
|
||||
assert!(!is_importable_ext(Path::new("foo.mjsx")));
|
||||
}
|
||||
|
||||
|
|
|
@ -46,15 +46,14 @@ pub fn init_v8_flags(
|
|||
.skip(1)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
if !unrecognized_v8_flags.is_empty() {
|
||||
for f in unrecognized_v8_flags {
|
||||
eprintln!("error: V8 did not recognize flag '{f}'");
|
||||
log::error!("error: V8 did not recognize flag '{f}'");
|
||||
}
|
||||
eprintln!("\nFor a list of V8 flags, use '--v8-flags=--help'");
|
||||
std::process::exit(1);
|
||||
log::error!("\nFor a list of V8 flags, use '--v8-flags=--help'");
|
||||
deno_runtime::exit(1);
|
||||
}
|
||||
if v8_flags_includes_help {
|
||||
std::process::exit(0);
|
||||
deno_runtime::exit(0);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,6 +83,15 @@ pub trait HmrRunner: Send + Sync {
|
|||
async fn run(&mut self) -> Result<(), AnyError>;
|
||||
}
|
||||
|
||||
pub trait CliCodeCache: code_cache::CodeCache {
|
||||
/// Gets if the code cache is still enabled.
|
||||
fn enabled(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache>;
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
pub trait CoverageCollector: Send + Sync {
|
||||
async fn start_collecting(&mut self) -> Result<(), AnyError>;
|
||||
|
@ -127,7 +136,7 @@ pub struct CliMainWorkerOptions {
|
|||
struct SharedWorkerState {
|
||||
blob_store: Arc<BlobStore>,
|
||||
broadcast_channel: InMemoryBroadcastChannel,
|
||||
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
|
||||
code_cache: Option<Arc<dyn CliCodeCache>>,
|
||||
compiled_wasm_module_store: CompiledWasmModuleStore,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
|
@ -155,7 +164,7 @@ impl SharedWorkerState {
|
|||
NodeExtInitServices {
|
||||
node_require_loader,
|
||||
node_resolver: self.node_resolver.clone(),
|
||||
npm_resolver: self.npm_resolver.clone().into_npm_resolver(),
|
||||
npm_resolver: self.npm_resolver.clone().into_npm_pkg_folder_resolver(),
|
||||
pkg_json_resolver: self.pkg_json_resolver.clone(),
|
||||
}
|
||||
}
|
||||
|
@ -384,6 +393,13 @@ impl CliMainWorker {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this should be moved to some other place, added to avoid string
|
||||
// duplication between worker setups and `deno info` output.
|
||||
pub fn get_cache_storage_dir() -> PathBuf {
|
||||
// Note: we currently use temp_dir() to avoid managing storage size.
|
||||
std::env::temp_dir().join("deno_cache")
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CliMainWorkerFactory {
|
||||
shared: Arc<SharedWorkerState>,
|
||||
|
@ -393,7 +409,7 @@ impl CliMainWorkerFactory {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
blob_store: Arc<BlobStore>,
|
||||
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
|
||||
code_cache: Option<Arc<dyn CliCodeCache>>,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
maybe_file_watcher_communicator: Option<Arc<WatcherCommunicator>>,
|
||||
|
@ -520,10 +536,7 @@ impl CliMainWorkerFactory {
|
|||
});
|
||||
let cache_storage_dir = maybe_storage_key.map(|key| {
|
||||
// TODO(@satyarohith): storage quota management
|
||||
// Note: we currently use temp_dir() to avoid managing storage size.
|
||||
std::env::temp_dir()
|
||||
.join("deno_cache")
|
||||
.join(checksum::gen(&[key.as_bytes()]))
|
||||
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
|
||||
});
|
||||
|
||||
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
|
||||
|
@ -547,14 +560,16 @@ impl CliMainWorkerFactory {
|
|||
npm_process_state_provider: Some(shared.npm_process_state_provider()),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
fetch_dns_resolver: Default::default(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
feature_checker,
|
||||
permissions,
|
||||
v8_code_cache: shared.code_cache.clone(),
|
||||
v8_code_cache: shared.code_cache.clone().map(|c| c.as_code_cache()),
|
||||
};
|
||||
|
||||
let options = WorkerOptions {
|
||||
bootstrap: BootstrapOptions {
|
||||
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
|
@ -585,7 +600,7 @@ impl CliMainWorkerFactory {
|
|||
},
|
||||
extensions: custom_extensions,
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
create_params: None,
|
||||
create_params: create_isolate_create_params(),
|
||||
unsafely_ignore_certificate_errors: shared
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
|
@ -602,6 +617,8 @@ impl CliMainWorkerFactory {
|
|||
origin_storage_dir,
|
||||
stdio,
|
||||
skip_op_registration: shared.options.skip_op_registration,
|
||||
enable_stack_trace_arg_in_ops: crate::args::has_trace_permissions_enabled(
|
||||
),
|
||||
};
|
||||
|
||||
let mut worker = MainWorker::bootstrap_from_options(
|
||||
|
@ -718,10 +735,7 @@ fn create_web_worker_callback(
|
|||
.resolve_storage_key(&args.main_module);
|
||||
let cache_storage_dir = maybe_storage_key.map(|key| {
|
||||
// TODO(@satyarohith): storage quota management
|
||||
// Note: we currently use temp_dir() to avoid managing storage size.
|
||||
std::env::temp_dir()
|
||||
.join("deno_cache")
|
||||
.join(checksum::gen(&[key.as_bytes()]))
|
||||
get_cache_storage_dir().join(checksum::gen(&[key.as_bytes()]))
|
||||
});
|
||||
|
||||
// TODO(bartlomieju): this is cruft, update FeatureChecker to spit out
|
||||
|
@ -786,6 +800,7 @@ fn create_web_worker_callback(
|
|||
},
|
||||
extensions: vec![],
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
create_params: create_isolate_create_params(),
|
||||
unsafely_ignore_certificate_errors: shared
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
|
@ -800,12 +815,25 @@ fn create_web_worker_callback(
|
|||
strace_ops: shared.options.strace_ops.clone(),
|
||||
close_on_idle: args.close_on_idle,
|
||||
maybe_worker_metadata: args.maybe_worker_metadata,
|
||||
enable_stack_trace_arg_in_ops: crate::args::has_trace_permissions_enabled(
|
||||
),
|
||||
};
|
||||
|
||||
WebWorker::bootstrap_from_options(services, options)
|
||||
})
|
||||
}
|
||||
|
||||
/// By default V8 uses 1.4Gb heap limit which is meant for browser tabs.
|
||||
/// Instead probe for the total memory on the system and use it instead
|
||||
/// as a default.
|
||||
pub fn create_isolate_create_params() -> Option<v8::CreateParams> {
|
||||
let maybe_mem_info = deno_runtime::sys_info::mem_info();
|
||||
maybe_mem_info.map(|mem_info| {
|
||||
v8::CreateParams::default()
|
||||
.heap_limits_from_system_memory(mem_info.total, 0)
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[cfg(test)]
|
||||
|
@ -842,6 +870,7 @@ mod tests {
|
|||
node_services: Default::default(),
|
||||
npm_process_state_provider: Default::default(),
|
||||
root_cert_store_provider: Default::default(),
|
||||
fetch_dns_resolver: Default::default(),
|
||||
shared_array_buffer_store: Default::default(),
|
||||
compiled_wasm_module_store: Default::default(),
|
||||
v8_code_cache: Default::default(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.171.0"
|
||||
version = "0.173.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.109.0"
|
||||
version = "0.111.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
9
ext/cache/sqlite.rs
vendored
9
ext/cache/sqlite.rs
vendored
|
@ -8,6 +8,7 @@ use std::time::SystemTime;
|
|||
use std::time::UNIX_EPOCH;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::poll_fn;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -45,7 +46,13 @@ impl SqliteBackedCache {
|
|||
pub fn new(cache_storage_dir: PathBuf) -> Result<Self, CacheError> {
|
||||
{
|
||||
std::fs::create_dir_all(&cache_storage_dir)
|
||||
.expect("failed to create cache dir");
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to create cache storage directory {}",
|
||||
cache_storage_dir.display()
|
||||
)
|
||||
})
|
||||
.map_err(CacheError::Other)?;
|
||||
let path = cache_storage_dir.join("cache_metadata.db");
|
||||
let connection = rusqlite::Connection::open(&path).unwrap_or_else(|_| {
|
||||
panic!("failed to open cache db at {}", path.display())
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.46.0"
|
||||
version = "0.48.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.177.0"
|
||||
version = "0.179.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
3
ext/console/internal.d.ts
vendored
3
ext/console/internal.d.ts
vendored
|
@ -9,4 +9,7 @@ declare module "ext:deno_console/01_console.js" {
|
|||
keys: (keyof TObject)[];
|
||||
evaluate: boolean;
|
||||
}): Record<string, unknown>;
|
||||
|
||||
class Console {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.57.0"
|
||||
version = "0.59.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.191.0"
|
||||
version = "0.193.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -15,6 +15,7 @@ import { core, primordials } from "ext:core/mod.js";
|
|||
const {
|
||||
isAnyArrayBuffer,
|
||||
isArrayBuffer,
|
||||
isStringObject,
|
||||
} = core;
|
||||
const {
|
||||
ArrayBufferIsView,
|
||||
|
@ -466,6 +467,8 @@ function extractBody(object) {
|
|||
if (object.locked || isReadableStreamDisturbed(object)) {
|
||||
throw new TypeError("ReadableStream is locked or disturbed");
|
||||
}
|
||||
} else if (object[webidl.AsyncIterable] === webidl.AsyncIterable) {
|
||||
stream = ReadableStream.from(object.open());
|
||||
}
|
||||
if (typeof source === "string") {
|
||||
// WARNING: this deviates from spec (expects length to be set)
|
||||
|
@ -483,6 +486,9 @@ function extractBody(object) {
|
|||
return { body, contentType };
|
||||
}
|
||||
|
||||
webidl.converters["async iterable<Uint8Array>"] = webidl
|
||||
.createAsyncIterableConverter(webidl.converters.Uint8Array);
|
||||
|
||||
webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => {
|
||||
// Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString)
|
||||
if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, V)) {
|
||||
|
@ -501,6 +507,14 @@ webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => {
|
|||
if (ArrayBufferIsView(V)) {
|
||||
return webidl.converters["ArrayBufferView"](V, prefix, context, opts);
|
||||
}
|
||||
if (webidl.isAsyncIterable(V) && !isStringObject(V)) {
|
||||
return webidl.converters["async iterable<Uint8Array>"](
|
||||
V,
|
||||
prefix,
|
||||
context,
|
||||
opts,
|
||||
);
|
||||
}
|
||||
}
|
||||
// BodyInit conversion is passed to extractBody(), which calls core.encode().
|
||||
// core.encode() will UTF-8 encode strings with replacement, being equivalent to the USV normalization.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.201.0"
|
||||
version = "0.203.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -22,6 +22,7 @@ deno_permissions.workspace = true
|
|||
deno_tls.workspace = true
|
||||
dyn-clone = "1"
|
||||
error_reporter = "1"
|
||||
hickory-resolver.workspace = true
|
||||
http.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper.workspace = true
|
||||
|
|
116
ext/fetch/dns.rs
Normal file
116
ext/fetch/dns.rs
Normal file
|
@ -0,0 +1,116 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use std::future::Future;
|
||||
use std::io;
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use std::task::Poll;
|
||||
use std::task::{self};
|
||||
use std::vec;
|
||||
|
||||
use hickory_resolver::error::ResolveError;
|
||||
use hickory_resolver::name_server::GenericConnector;
|
||||
use hickory_resolver::name_server::TokioRuntimeProvider;
|
||||
use hickory_resolver::AsyncResolver;
|
||||
use hyper_util::client::legacy::connect::dns::GaiResolver;
|
||||
use hyper_util::client::legacy::connect::dns::Name;
|
||||
use tokio::task::JoinHandle;
|
||||
use tower::Service;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Resolver {
|
||||
/// A resolver using blocking `getaddrinfo` calls in a threadpool.
|
||||
Gai(GaiResolver),
|
||||
/// hickory-resolver's userspace resolver.
|
||||
Hickory(AsyncResolver<GenericConnector<TokioRuntimeProvider>>),
|
||||
}
|
||||
|
||||
impl Default for Resolver {
|
||||
fn default() -> Self {
|
||||
Self::gai()
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
pub fn gai() -> Self {
|
||||
Self::Gai(GaiResolver::new())
|
||||
}
|
||||
|
||||
/// Create a [`AsyncResolver`] from system conf.
|
||||
pub fn hickory() -> Result<Self, ResolveError> {
|
||||
Ok(Self::Hickory(
|
||||
hickory_resolver::AsyncResolver::tokio_from_system_conf()?,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn hickory_from_async_resolver(
|
||||
resolver: AsyncResolver<GenericConnector<TokioRuntimeProvider>>,
|
||||
) -> Self {
|
||||
Self::Hickory(resolver)
|
||||
}
|
||||
}
|
||||
|
||||
type SocketAddrs = vec::IntoIter<SocketAddr>;
|
||||
|
||||
pub struct ResolveFut {
|
||||
inner: JoinHandle<Result<SocketAddrs, io::Error>>,
|
||||
}
|
||||
|
||||
impl Future for ResolveFut {
|
||||
type Output = Result<SocketAddrs, io::Error>;
|
||||
|
||||
fn poll(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut task::Context<'_>,
|
||||
) -> Poll<Self::Output> {
|
||||
Pin::new(&mut self.inner).poll(cx).map(|res| match res {
|
||||
Ok(Ok(addrs)) => Ok(addrs),
|
||||
Ok(Err(e)) => Err(e),
|
||||
Err(join_err) => {
|
||||
if join_err.is_cancelled() {
|
||||
Err(io::Error::new(io::ErrorKind::Interrupted, join_err))
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, join_err))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Service<Name> for Resolver {
|
||||
type Response = SocketAddrs;
|
||||
type Error = io::Error;
|
||||
type Future = ResolveFut;
|
||||
|
||||
fn poll_ready(
|
||||
&mut self,
|
||||
_cx: &mut task::Context<'_>,
|
||||
) -> Poll<Result<(), io::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, name: Name) -> Self::Future {
|
||||
let task = match self {
|
||||
Resolver::Gai(gai_resolver) => {
|
||||
let mut resolver = gai_resolver.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = resolver.call(name).await?;
|
||||
let x: Vec<_> = result.into_iter().collect();
|
||||
let iter: SocketAddrs = x.into_iter();
|
||||
Ok(iter)
|
||||
})
|
||||
}
|
||||
Resolver::Hickory(async_resolver) => {
|
||||
let resolver = async_resolver.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = resolver.lookup_ip(name.as_str()).await?;
|
||||
|
||||
let x: Vec<_> =
|
||||
result.into_iter().map(|x| SocketAddr::new(x, 0)).collect();
|
||||
let iter: SocketAddrs = x.into_iter();
|
||||
Ok(iter)
|
||||
})
|
||||
}
|
||||
};
|
||||
ResolveFut { inner: task }
|
||||
}
|
||||
}
|
2
ext/fetch/lib.deno_fetch.d.ts
vendored
2
ext/fetch/lib.deno_fetch.d.ts
vendored
|
@ -163,6 +163,8 @@ type BodyInit =
|
|||
| FormData
|
||||
| URLSearchParams
|
||||
| ReadableStream<Uint8Array>
|
||||
| Iterable<Uint8Array>
|
||||
| AsyncIterable<Uint8Array>
|
||||
| string;
|
||||
/** @category Fetch */
|
||||
type RequestDestination =
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
pub mod dns;
|
||||
mod fs_fetch_handler;
|
||||
mod proxy;
|
||||
#[cfg(test)]
|
||||
|
@ -66,6 +67,7 @@ use http_body_util::BodyExt;
|
|||
use hyper::body::Frame;
|
||||
use hyper_util::client::legacy::connect::HttpConnector;
|
||||
use hyper_util::client::legacy::connect::HttpInfo;
|
||||
use hyper_util::client::legacy::Builder as HyperClientBuilder;
|
||||
use hyper_util::rt::TokioExecutor;
|
||||
use hyper_util::rt::TokioTimer;
|
||||
use serde::Deserialize;
|
||||
|
@ -84,6 +86,16 @@ pub struct Options {
|
|||
pub user_agent: String,
|
||||
pub root_cert_store_provider: Option<Arc<dyn RootCertStoreProvider>>,
|
||||
pub proxy: Option<Proxy>,
|
||||
/// A callback to customize HTTP client configuration.
|
||||
///
|
||||
/// The settings applied with this hook may be overridden by the options
|
||||
/// provided through `Deno.createHttpClient()` API. For instance, if the hook
|
||||
/// calls [`hyper_util::client::legacy::Builder::pool_max_idle_per_host`] with
|
||||
/// a value of 99, and a user calls `Deno.createHttpClient({ poolMaxIdlePerHost: 42 })`,
|
||||
/// the value that will take effect is 42.
|
||||
///
|
||||
/// For more info on what can be configured, see [`hyper_util::client::legacy::Builder`].
|
||||
pub client_builder_hook: Option<fn(HyperClientBuilder) -> HyperClientBuilder>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub request_builder_hook: Option<
|
||||
fn(&mut http::Request<ReqBody>) -> Result<(), deno_core::error::AnyError>,
|
||||
|
@ -91,6 +103,7 @@ pub struct Options {
|
|||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub client_cert_chain_and_key: TlsKeys,
|
||||
pub file_fetch_handler: Rc<dyn FetchHandler>,
|
||||
pub resolver: dns::Resolver,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
|
@ -110,10 +123,12 @@ impl Default for Options {
|
|||
user_agent: "".to_string(),
|
||||
root_cert_store_provider: None,
|
||||
proxy: None,
|
||||
client_builder_hook: None,
|
||||
request_builder_hook: None,
|
||||
unsafely_ignore_certificate_errors: None,
|
||||
client_cert_chain_and_key: TlsKeys::Null,
|
||||
file_fetch_handler: Rc::new(DefaultFileFetchHandler),
|
||||
resolver: dns::Resolver::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -255,6 +270,7 @@ pub fn create_client_from_options(
|
|||
.map_err(HttpClientCreateError::RootCertStore)?,
|
||||
ca_certs: vec![],
|
||||
proxy: options.proxy.clone(),
|
||||
dns_resolver: options.resolver.clone(),
|
||||
unsafely_ignore_certificate_errors: options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
|
@ -267,6 +283,7 @@ pub fn create_client_from_options(
|
|||
pool_idle_timeout: None,
|
||||
http1: true,
|
||||
http2: true,
|
||||
client_builder_hook: options.client_builder_hook,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -380,7 +397,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer {
|
|||
}
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn op_fetch<FP>(
|
||||
|
@ -835,6 +852,8 @@ pub struct CreateHttpClientArgs {
|
|||
proxy: Option<Proxy>,
|
||||
pool_max_idle_per_host: Option<usize>,
|
||||
pool_idle_timeout: Option<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
use_hickory_resolver: bool,
|
||||
#[serde(default = "default_true")]
|
||||
http1: bool,
|
||||
#[serde(default = "default_true")]
|
||||
|
@ -847,7 +866,7 @@ fn default_true() -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[smi]
|
||||
pub fn op_fetch_custom_client<FP>(
|
||||
state: &mut OpState,
|
||||
|
@ -878,6 +897,13 @@ where
|
|||
.map_err(HttpClientCreateError::RootCertStore)?,
|
||||
ca_certs,
|
||||
proxy: args.proxy,
|
||||
dns_resolver: if args.use_hickory_resolver {
|
||||
dns::Resolver::hickory()
|
||||
.map_err(deno_core::error::AnyError::new)
|
||||
.map_err(FetchError::Resource)?
|
||||
} else {
|
||||
dns::Resolver::default()
|
||||
},
|
||||
unsafely_ignore_certificate_errors: options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
|
@ -895,6 +921,7 @@ where
|
|||
),
|
||||
http1: args.http1,
|
||||
http2: args.http2,
|
||||
client_builder_hook: options.client_builder_hook,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -909,12 +936,14 @@ pub struct CreateHttpClientOptions {
|
|||
pub root_cert_store: Option<RootCertStore>,
|
||||
pub ca_certs: Vec<Vec<u8>>,
|
||||
pub proxy: Option<Proxy>,
|
||||
pub dns_resolver: dns::Resolver,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub client_cert_chain_and_key: Option<TlsKey>,
|
||||
pub pool_max_idle_per_host: Option<usize>,
|
||||
pub pool_idle_timeout: Option<Option<u64>>,
|
||||
pub http1: bool,
|
||||
pub http2: bool,
|
||||
pub client_builder_hook: Option<fn(HyperClientBuilder) -> HyperClientBuilder>,
|
||||
}
|
||||
|
||||
impl Default for CreateHttpClientOptions {
|
||||
|
@ -923,12 +952,14 @@ impl Default for CreateHttpClientOptions {
|
|||
root_cert_store: None,
|
||||
ca_certs: vec![],
|
||||
proxy: None,
|
||||
dns_resolver: dns::Resolver::default(),
|
||||
unsafely_ignore_certificate_errors: None,
|
||||
client_cert_chain_and_key: None,
|
||||
pool_max_idle_per_host: None,
|
||||
pool_idle_timeout: None,
|
||||
http1: true,
|
||||
http2: true,
|
||||
client_builder_hook: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -976,18 +1007,22 @@ pub fn create_http_client(
|
|||
tls_config.alpn_protocols = alpn_protocols;
|
||||
let tls_config = Arc::from(tls_config);
|
||||
|
||||
let mut http_connector = HttpConnector::new();
|
||||
let mut http_connector =
|
||||
HttpConnector::new_with_resolver(options.dns_resolver.clone());
|
||||
http_connector.enforce_http(false);
|
||||
|
||||
let user_agent = user_agent.parse::<HeaderValue>().map_err(|_| {
|
||||
HttpClientCreateError::InvalidUserAgent(user_agent.to_string())
|
||||
})?;
|
||||
|
||||
let mut builder =
|
||||
hyper_util::client::legacy::Builder::new(TokioExecutor::new());
|
||||
let mut builder = HyperClientBuilder::new(TokioExecutor::new());
|
||||
builder.timer(TokioTimer::new());
|
||||
builder.pool_timer(TokioTimer::new());
|
||||
|
||||
if let Some(client_builder_hook) = options.client_builder_hook {
|
||||
builder = client_builder_hook(builder);
|
||||
}
|
||||
|
||||
let mut proxies = proxy::from_env();
|
||||
if let Some(proxy) = options.proxy {
|
||||
let mut intercept = proxy::Intercept::all(&proxy.url)
|
||||
|
@ -1051,7 +1086,7 @@ pub struct Client {
|
|||
user_agent: HeaderValue,
|
||||
}
|
||||
|
||||
type Connector = proxy::ProxyConnector<HttpConnector>;
|
||||
type Connector = proxy::ProxyConnector<HttpConnector<dns::Resolver>>;
|
||||
|
||||
// clippy is wrong here
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering::SeqCst;
|
||||
use std::sync::Arc;
|
||||
|
||||
use bytes::Bytes;
|
||||
|
@ -10,6 +12,8 @@ use http_body_util::BodyExt;
|
|||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
use crate::dns;
|
||||
|
||||
use super::create_http_client;
|
||||
use super::CreateHttpClientOptions;
|
||||
|
||||
|
@ -17,6 +21,53 @@ static EXAMPLE_CRT: &[u8] = include_bytes!("../tls/testdata/example1_cert.der");
|
|||
static EXAMPLE_KEY: &[u8] =
|
||||
include_bytes!("../tls/testdata/example1_prikey.der");
|
||||
|
||||
#[test]
|
||||
fn test_userspace_resolver() {
|
||||
let thread_counter = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let thread_counter_ref = thread_counter.clone();
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.on_thread_start(move || {
|
||||
thread_counter_ref.fetch_add(1, SeqCst);
|
||||
})
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
rt.block_on(async move {
|
||||
assert_eq!(thread_counter.load(SeqCst), 0);
|
||||
let src_addr = create_https_server(true).await;
|
||||
assert_eq!(src_addr.ip().to_string(), "127.0.0.1");
|
||||
// use `localhost` to ensure dns step happens.
|
||||
let addr = format!("localhost:{}", src_addr.port());
|
||||
|
||||
let hickory = hickory_resolver::AsyncResolver::tokio(
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
);
|
||||
|
||||
assert_eq!(thread_counter.load(SeqCst), 0);
|
||||
rust_test_client_with_resolver(
|
||||
None,
|
||||
addr.clone(),
|
||||
"https",
|
||||
http::Version::HTTP_2,
|
||||
dns::Resolver::hickory_from_async_resolver(hickory),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(thread_counter.load(SeqCst), 0, "userspace resolver shouldn't spawn new threads.");
|
||||
rust_test_client_with_resolver(
|
||||
None,
|
||||
addr.clone(),
|
||||
"https",
|
||||
http::Version::HTTP_2,
|
||||
dns::Resolver::gai(),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(thread_counter.load(SeqCst), 1, "getaddrinfo is called inside spawn_blocking, so tokio spawn a new worker thread for it.");
|
||||
});
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_https_proxy_http11() {
|
||||
let src_addr = create_https_server(false).await;
|
||||
|
@ -52,27 +103,30 @@ async fn test_socks_proxy_h2() {
|
|||
run_test_client(prx_addr, src_addr, "socks5", http::Version::HTTP_2).await;
|
||||
}
|
||||
|
||||
async fn run_test_client(
|
||||
prx_addr: SocketAddr,
|
||||
src_addr: SocketAddr,
|
||||
async fn rust_test_client_with_resolver(
|
||||
prx_addr: Option<SocketAddr>,
|
||||
src_addr: String,
|
||||
proto: &str,
|
||||
ver: http::Version,
|
||||
resolver: dns::Resolver,
|
||||
) {
|
||||
let client = create_http_client(
|
||||
"fetch/test",
|
||||
CreateHttpClientOptions {
|
||||
root_cert_store: None,
|
||||
ca_certs: vec![],
|
||||
proxy: Some(deno_tls::Proxy {
|
||||
url: format!("{}://{}", proto, prx_addr),
|
||||
proxy: prx_addr.map(|p| deno_tls::Proxy {
|
||||
url: format!("{}://{}", proto, p),
|
||||
basic_auth: None,
|
||||
}),
|
||||
unsafely_ignore_certificate_errors: Some(vec![]),
|
||||
client_cert_chain_and_key: None,
|
||||
pool_max_idle_per_host: None,
|
||||
pool_idle_timeout: None,
|
||||
dns_resolver: resolver,
|
||||
http1: true,
|
||||
http2: true,
|
||||
client_builder_hook: None,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -92,6 +146,22 @@ async fn run_test_client(
|
|||
assert_eq!(hello, "hello from server");
|
||||
}
|
||||
|
||||
async fn run_test_client(
|
||||
prx_addr: SocketAddr,
|
||||
src_addr: SocketAddr,
|
||||
proto: &str,
|
||||
ver: http::Version,
|
||||
) {
|
||||
rust_test_client_with_resolver(
|
||||
Some(prx_addr),
|
||||
src_addr.to_string(),
|
||||
proto,
|
||||
ver,
|
||||
Default::default(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn create_https_server(allow_h2: bool) -> SocketAddr {
|
||||
let mut tls_config = deno_tls::rustls::server::ServerConfig::builder()
|
||||
.with_no_client_auth()
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.164.0"
|
||||
version = "0.166.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -287,7 +287,7 @@ fn ffi_call(
|
|||
}
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_ffi_call_ptr_nonblocking<FP>(
|
||||
scope: &mut v8::HandleScope,
|
||||
|
@ -385,7 +385,7 @@ pub fn op_ffi_call_nonblocking(
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(reentrant)]
|
||||
#[op2(reentrant, stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_ffi_call_ptr<FP>(
|
||||
scope: &mut v8::HandleScope,
|
||||
|
|
|
@ -561,7 +561,7 @@ pub struct RegisterCallbackArgs {
|
|||
result: NativeType,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_unsafe_callback_create<FP, 'scope>(
|
||||
state: &mut OpState,
|
||||
scope: &mut v8::HandleScope<'scope>,
|
||||
|
|
|
@ -15,6 +15,7 @@ use dlopen2::raw::Library;
|
|||
use serde::Deserialize;
|
||||
use serde_value::ValueDeserializer;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::c_void;
|
||||
use std::rc::Rc;
|
||||
|
@ -123,17 +124,20 @@ pub struct FfiLoadArgs {
|
|||
symbols: HashMap<String, ForeignSymbol>,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_load<'scope, FP>(
|
||||
scope: &mut v8::HandleScope<'scope>,
|
||||
state: &mut OpState,
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[serde] args: FfiLoadArgs,
|
||||
) -> Result<v8::Local<'scope, v8::Value>, DlfcnError>
|
||||
where
|
||||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let path = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
let path = permissions.check_partial_with_path(&args.path)?;
|
||||
permissions.check_partial_with_path(&args.path)?
|
||||
};
|
||||
|
||||
let lib = Library::open(&path).map_err(|e| {
|
||||
dlopen2::Error::OpeningLibraryError(std::io::Error::new(
|
||||
|
@ -215,6 +219,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
let mut state = state.borrow_mut();
|
||||
let out = v8::Array::new(scope, 2);
|
||||
let rid = state.resource_table.add(resource);
|
||||
let rid_v8 = v8::Integer::new_from_unsigned(scope, rid);
|
||||
|
|
|
@ -49,7 +49,7 @@ pub enum ReprError {
|
|||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_ptr_create<FP>(
|
||||
state: &mut OpState,
|
||||
#[bigint] ptr_number: usize,
|
||||
|
@ -63,7 +63,7 @@ where
|
|||
Ok(ptr_number as *mut c_void)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_ptr_equals<FP>(
|
||||
state: &mut OpState,
|
||||
a: *const c_void,
|
||||
|
@ -78,7 +78,7 @@ where
|
|||
Ok(a == b)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_ptr_of<FP>(
|
||||
state: &mut OpState,
|
||||
#[anybuffer] buf: *const u8,
|
||||
|
@ -92,7 +92,7 @@ where
|
|||
Ok(buf as *mut c_void)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_ptr_of_exact<FP>(
|
||||
state: &mut OpState,
|
||||
buf: v8::Local<v8::ArrayBufferView>,
|
||||
|
@ -112,7 +112,7 @@ where
|
|||
Ok(buf.as_ptr() as _)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_ptr_offset<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -142,7 +142,7 @@ unsafe extern "C" fn noop_deleter_callback(
|
|||
) {
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
#[bigint]
|
||||
pub fn op_ffi_ptr_value<FP>(
|
||||
state: &mut OpState,
|
||||
|
@ -157,7 +157,7 @@ where
|
|||
Ok(ptr as usize)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_get_buf<FP, 'scope>(
|
||||
scope: &mut v8::HandleScope<'scope>,
|
||||
state: &mut OpState,
|
||||
|
@ -189,7 +189,7 @@ where
|
|||
Ok(array_buffer)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_buf_copy_into<FP>(
|
||||
state: &mut OpState,
|
||||
src: *mut c_void,
|
||||
|
@ -219,7 +219,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_ffi_cstr_read<FP, 'scope>(
|
||||
scope: &mut v8::HandleScope<'scope>,
|
||||
state: &mut OpState,
|
||||
|
@ -244,7 +244,7 @@ where
|
|||
Ok(value)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_bool<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -264,7 +264,7 @@ where
|
|||
Ok(unsafe { ptr::read_unaligned::<bool>(ptr.offset(offset) as *const bool) })
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_u8<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -286,7 +286,7 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_i8<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -308,7 +308,7 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_u16<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -330,7 +330,7 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_i16<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -352,7 +352,7 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_u32<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -372,7 +372,7 @@ where
|
|||
Ok(unsafe { ptr::read_unaligned::<u32>(ptr.offset(offset) as *const u32) })
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_i32<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -392,7 +392,7 @@ where
|
|||
Ok(unsafe { ptr::read_unaligned::<i32>(ptr.offset(offset) as *const i32) })
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
#[bigint]
|
||||
pub fn op_ffi_read_u64<FP>(
|
||||
state: &mut OpState,
|
||||
|
@ -418,7 +418,7 @@ where
|
|||
Ok(value)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
#[bigint]
|
||||
pub fn op_ffi_read_i64<FP>(
|
||||
state: &mut OpState,
|
||||
|
@ -444,7 +444,7 @@ where
|
|||
Ok(value)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_f32<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -464,7 +464,7 @@ where
|
|||
Ok(unsafe { ptr::read_unaligned::<f32>(ptr.offset(offset) as *const f32) })
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_f64<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
@ -484,7 +484,7 @@ where
|
|||
Ok(unsafe { ptr::read_unaligned::<f64>(ptr.offset(offset) as *const f64) })
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_ffi_read_ptr<FP>(
|
||||
state: &mut OpState,
|
||||
ptr: *mut c_void,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.87.0"
|
||||
version = "0.89.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -19,6 +19,7 @@ sync_fs = []
|
|||
[dependencies]
|
||||
async-trait.workspace = true
|
||||
base32.workspace = true
|
||||
boxed_error.workspace = true
|
||||
deno_core.workspace = true
|
||||
deno_io.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
|
|
|
@ -15,6 +15,7 @@ pub use crate::interface::FsDirEntry;
|
|||
pub use crate::interface::FsFileType;
|
||||
pub use crate::interface::OpenOptions;
|
||||
pub use crate::ops::FsOpsError;
|
||||
pub use crate::ops::FsOpsErrorKind;
|
||||
pub use crate::ops::OperationError;
|
||||
pub use crate::std_fs::RealFs;
|
||||
pub use crate::sync::MaybeSend;
|
||||
|
|
153
ext/fs/ops.rs
153
ext/fs/ops.rs
|
@ -16,6 +16,7 @@ use crate::interface::FsDirEntry;
|
|||
use crate::interface::FsFileType;
|
||||
use crate::FsPermissions;
|
||||
use crate::OpenOptions;
|
||||
use boxed_error::Boxed;
|
||||
use deno_core::op2;
|
||||
use deno_core::CancelFuture;
|
||||
use deno_core::CancelHandle;
|
||||
|
@ -32,8 +33,11 @@ use rand::thread_rng;
|
|||
use rand::Rng;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Debug, Boxed)]
|
||||
pub struct FsOpsError(pub Box<FsOpsErrorKind>);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FsOpsError {
|
||||
pub enum FsOpsErrorKind {
|
||||
#[error("{0}")]
|
||||
Io(#[source] std::io::Error),
|
||||
#[error("{0}")]
|
||||
|
@ -73,15 +77,16 @@ pub enum FsOpsError {
|
|||
impl From<FsError> for FsOpsError {
|
||||
fn from(err: FsError) -> Self {
|
||||
match err {
|
||||
FsError::Io(err) => FsOpsError::Io(err),
|
||||
FsError::Io(err) => FsOpsErrorKind::Io(err),
|
||||
FsError::FileBusy => {
|
||||
FsOpsError::Other(deno_core::error::resource_unavailable())
|
||||
FsOpsErrorKind::Other(deno_core::error::resource_unavailable())
|
||||
}
|
||||
FsError::NotSupported => {
|
||||
FsOpsError::Other(deno_core::error::not_supported())
|
||||
FsOpsErrorKind::Other(deno_core::error::not_supported())
|
||||
}
|
||||
FsError::NotCapable(err) => FsOpsError::NotCapable(err),
|
||||
FsError::NotCapable(err) => FsOpsErrorKind::NotCapable(err),
|
||||
}
|
||||
.into_box()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -127,11 +132,12 @@ fn map_permission_error(
|
|||
(path.as_str(), "")
|
||||
};
|
||||
|
||||
FsOpsError::NotCapableAccess {
|
||||
FsOpsErrorKind::NotCapableAccess {
|
||||
standalone: deno_permissions::is_standalone(),
|
||||
err,
|
||||
path: format!("{path}{truncated}"),
|
||||
}
|
||||
.into_box()
|
||||
}
|
||||
err => Err::<(), _>(err)
|
||||
.context_path(operation, path)
|
||||
|
@ -140,7 +146,7 @@ fn map_permission_error(
|
|||
}
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_cwd<P>(state: &mut OpState) -> Result<String, FsOpsError>
|
||||
where
|
||||
|
@ -155,7 +161,7 @@ where
|
|||
Ok(path_str)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_chdir<P>(
|
||||
state: &mut OpState,
|
||||
#[string] directory: &str,
|
||||
|
@ -182,7 +188,7 @@ where
|
|||
state.borrow::<FileSystemRc>().umask(mask).context("umask")
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[smi]
|
||||
pub fn op_fs_open_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -209,7 +215,7 @@ where
|
|||
Ok(rid)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[smi]
|
||||
pub async fn op_fs_open_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -237,7 +243,7 @@ where
|
|||
Ok(rid)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_fs_mkdir_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -260,7 +266,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_mkdir_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -285,7 +291,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_chmod_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -302,7 +308,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_chmod_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -322,7 +328,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_fs_chown_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -341,7 +347,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_chown_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -362,7 +368,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_remove_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: &str,
|
||||
|
@ -382,7 +388,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_remove_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -413,7 +419,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_copy_file_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] from: &str,
|
||||
|
@ -433,7 +439,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_copy_file_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] from: String,
|
||||
|
@ -457,7 +463,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_stat_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -476,7 +482,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[serde]
|
||||
pub async fn op_fs_stat_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -498,7 +504,7 @@ where
|
|||
Ok(SerializableStat::from(stat))
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_lstat_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -517,7 +523,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[serde]
|
||||
pub async fn op_fs_lstat_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -539,7 +545,7 @@ where
|
|||
Ok(SerializableStat::from(stat))
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_realpath_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -562,7 +568,7 @@ where
|
|||
Ok(path_string)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[string]
|
||||
pub async fn op_fs_realpath_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -590,7 +596,7 @@ where
|
|||
Ok(path_string)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_fs_read_dir_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -609,7 +615,7 @@ where
|
|||
Ok(entries)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[serde]
|
||||
pub async fn op_fs_read_dir_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -634,7 +640,7 @@ where
|
|||
Ok(entries)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_rename_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] oldpath: String,
|
||||
|
@ -655,7 +661,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_rename_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] oldpath: String,
|
||||
|
@ -680,7 +686,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_link_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] oldpath: &str,
|
||||
|
@ -702,7 +708,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_link_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] oldpath: String,
|
||||
|
@ -728,7 +734,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_fs_symlink_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] oldpath: &str,
|
||||
|
@ -752,7 +758,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_symlink_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] oldpath: String,
|
||||
|
@ -780,7 +786,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_read_link_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -800,7 +806,7 @@ where
|
|||
Ok(target_string)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[string]
|
||||
pub async fn op_fs_read_link_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -825,7 +831,7 @@ where
|
|||
Ok(target_string)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_truncate_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: &str,
|
||||
|
@ -845,7 +851,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_truncate_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -869,7 +875,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[op2(fast, stack_trace)]
|
||||
pub fn op_fs_utime_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: &str,
|
||||
|
@ -890,7 +896,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
pub async fn op_fs_utime_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
|
@ -921,7 +927,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_make_temp_dir_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -963,7 +969,7 @@ where
|
|||
.context("tmpdir")
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[string]
|
||||
pub async fn op_fs_make_temp_dir_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -1009,7 +1015,7 @@ where
|
|||
.context("tmpdir")
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_make_temp_file_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -1057,7 +1063,7 @@ where
|
|||
.context("tmpfile")
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[string]
|
||||
pub async fn op_fs_make_temp_file_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -1176,7 +1182,9 @@ fn validate_temporary_filename_component(
|
|||
) -> Result<(), FsOpsError> {
|
||||
// Ban ASCII and Unicode control characters: these will often fail
|
||||
if let Some(c) = component.matches(|c: char| c.is_control()).next() {
|
||||
return Err(FsOpsError::InvalidControlCharacter(c.to_string()));
|
||||
return Err(
|
||||
FsOpsErrorKind::InvalidControlCharacter(c.to_string()).into_box(),
|
||||
);
|
||||
}
|
||||
// Windows has the most restrictive filenames. As temp files aren't normal files, we just
|
||||
// use this set of banned characters for all platforms because wildcard-like files can also
|
||||
|
@ -1192,13 +1200,13 @@ fn validate_temporary_filename_component(
|
|||
.matches(|c: char| "<>:\"/\\|?*".contains(c))
|
||||
.next()
|
||||
{
|
||||
return Err(FsOpsError::InvalidCharacter(c.to_string()));
|
||||
return Err(FsOpsErrorKind::InvalidCharacter(c.to_string()).into_box());
|
||||
}
|
||||
|
||||
// This check is only for Windows
|
||||
#[cfg(windows)]
|
||||
if suffix && component.ends_with(|c: char| ". ".contains(c)) {
|
||||
return Err(FsOpsError::InvalidTrailingCharacter);
|
||||
return Err(FsOpsErrorKind::InvalidTrailingCharacter.into_box());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1227,7 +1235,7 @@ fn tmp_name(
|
|||
Ok(path)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
pub fn op_fs_write_file_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
|
@ -1253,7 +1261,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn op_fs_write_file_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -1307,7 +1315,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
pub fn op_fs_read_file_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -1328,7 +1336,7 @@ where
|
|||
Ok(buf.into())
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[serde]
|
||||
pub async fn op_fs_read_file_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -1370,7 +1378,7 @@ where
|
|||
Ok(buf.into())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[op2(stack_trace)]
|
||||
#[string]
|
||||
pub fn op_fs_read_file_text_sync<P>(
|
||||
state: &mut OpState,
|
||||
|
@ -1391,7 +1399,7 @@ where
|
|||
Ok(str)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2(async, stack_trace)]
|
||||
#[string]
|
||||
pub async fn op_fs_read_file_text_async<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
|
@ -1440,7 +1448,7 @@ fn to_seek_from(offset: i64, whence: i32) -> Result<SeekFrom, FsOpsError> {
|
|||
1 => SeekFrom::Current(offset),
|
||||
2 => SeekFrom::End(offset),
|
||||
_ => {
|
||||
return Err(FsOpsError::InvalidSeekMode(whence));
|
||||
return Err(FsOpsErrorKind::InvalidSeekMode(whence).into_box());
|
||||
}
|
||||
};
|
||||
Ok(seek_from)
|
||||
|
@ -1456,7 +1464,7 @@ pub fn op_fs_seek_sync(
|
|||
) -> Result<u64, FsOpsError> {
|
||||
let pos = to_seek_from(offset, whence)?;
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
let cursor = file.seek_sync(pos)?;
|
||||
Ok(cursor)
|
||||
}
|
||||
|
@ -1471,7 +1479,7 @@ pub async fn op_fs_seek_async(
|
|||
) -> Result<u64, FsOpsError> {
|
||||
let pos = to_seek_from(offset, whence)?;
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
let cursor = file.seek_async(pos).await?;
|
||||
Ok(cursor)
|
||||
}
|
||||
|
@ -1482,7 +1490,7 @@ pub fn op_fs_file_sync_data_sync(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.datasync_sync()?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1493,7 +1501,7 @@ pub async fn op_fs_file_sync_data_async(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file.datasync_async().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1504,7 +1512,7 @@ pub fn op_fs_file_sync_sync(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.sync_sync()?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1515,7 +1523,7 @@ pub async fn op_fs_file_sync_async(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file.sync_async().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1527,7 +1535,7 @@ pub fn op_fs_file_stat_sync(
|
|||
#[buffer] stat_out_buf: &mut [u32],
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
let stat = file.stat_sync()?;
|
||||
let serializable_stat = SerializableStat::from(stat);
|
||||
serializable_stat.write(stat_out_buf);
|
||||
|
@ -1541,7 +1549,7 @@ pub async fn op_fs_file_stat_async(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<SerializableStat, FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
let stat = file.stat_async().await?;
|
||||
Ok(stat.into())
|
||||
}
|
||||
|
@ -1553,7 +1561,7 @@ pub fn op_fs_flock_sync(
|
|||
exclusive: bool,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.lock_sync(exclusive)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1565,7 +1573,7 @@ pub async fn op_fs_flock_async(
|
|||
exclusive: bool,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file.lock_async(exclusive).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1576,7 +1584,7 @@ pub fn op_fs_funlock_sync(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.unlock_sync()?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1587,7 +1595,7 @@ pub async fn op_fs_funlock_async(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file.unlock_async().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1599,7 +1607,7 @@ pub fn op_fs_ftruncate_sync(
|
|||
#[number] len: u64,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.truncate_sync(len)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1611,7 +1619,7 @@ pub async fn op_fs_file_truncate_async(
|
|||
#[number] len: u64,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file.truncate_async(len).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1626,7 +1634,7 @@ pub fn op_fs_futime_sync(
|
|||
#[smi] mtime_nanos: u32,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file =
|
||||
FileResource::get_file(state, rid).map_err(FsOpsError::Resource)?;
|
||||
FileResource::get_file(state, rid).map_err(FsOpsErrorKind::Resource)?;
|
||||
file.utime_sync(atime_secs, atime_nanos, mtime_secs, mtime_nanos)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1641,7 +1649,7 @@ pub async fn op_fs_futime_async(
|
|||
#[smi] mtime_nanos: u32,
|
||||
) -> Result<(), FsOpsError> {
|
||||
let file = FileResource::get_file(&state.borrow(), rid)
|
||||
.map_err(FsOpsError::Resource)?;
|
||||
.map_err(FsOpsErrorKind::Resource)?;
|
||||
file
|
||||
.utime_async(atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await?;
|
||||
|
@ -1717,7 +1725,7 @@ impl<T> MapErrContext for Result<T, FsError> {
|
|||
where
|
||||
F: FnOnce(FsError) -> OperationError,
|
||||
{
|
||||
self.map_err(|err| FsOpsError::OperationError(f(err)))
|
||||
self.map_err(|err| FsOpsErrorKind::OperationError(f(err)).into_box())
|
||||
}
|
||||
|
||||
fn context(self, operation: &'static str) -> Self::R {
|
||||
|
@ -1754,7 +1762,8 @@ impl<T> MapErrContext for Result<T, FsError> {
|
|||
}
|
||||
|
||||
fn path_into_string(s: std::ffi::OsString) -> Result<String, FsOpsError> {
|
||||
s.into_string().map_err(FsOpsError::InvalidUtf8)
|
||||
s.into_string()
|
||||
.map_err(|e| FsOpsErrorKind::InvalidUtf8(e).into_box())
|
||||
}
|
||||
|
||||
macro_rules! create_struct_writer {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.175.0"
|
||||
version = "0.177.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -18,6 +18,7 @@ use crate::service::HttpServerState;
|
|||
use crate::service::SignallingRc;
|
||||
use crate::websocket_upgrade::WebSocketUpgrade;
|
||||
use crate::LocalExecutor;
|
||||
use crate::Options;
|
||||
use cache_control::CacheControl;
|
||||
use deno_core::external;
|
||||
use deno_core::futures::future::poll_fn;
|
||||
|
@ -821,10 +822,16 @@ fn serve_http11_unconditional(
|
|||
io: impl HttpServeStream,
|
||||
svc: impl HttpService<Incoming, ResBody = HttpRecordResponse> + 'static,
|
||||
cancel: Rc<CancelHandle>,
|
||||
http1_builder_hook: Option<fn(http1::Builder) -> http1::Builder>,
|
||||
) -> impl Future<Output = Result<(), hyper::Error>> + 'static {
|
||||
let conn = http1::Builder::new()
|
||||
.keep_alive(true)
|
||||
.writev(*USE_WRITEV)
|
||||
let mut builder = http1::Builder::new();
|
||||
builder.keep_alive(true).writev(*USE_WRITEV);
|
||||
|
||||
if let Some(http1_builder_hook) = http1_builder_hook {
|
||||
builder = http1_builder_hook(builder);
|
||||
}
|
||||
|
||||
let conn = builder
|
||||
.serve_connection(TokioIo::new(io), svc)
|
||||
.with_upgrades();
|
||||
|
||||
|
@ -843,9 +850,17 @@ fn serve_http2_unconditional(
|
|||
io: impl HttpServeStream,
|
||||
svc: impl HttpService<Incoming, ResBody = HttpRecordResponse> + 'static,
|
||||
cancel: Rc<CancelHandle>,
|
||||
http2_builder_hook: Option<
|
||||
fn(http2::Builder<LocalExecutor>) -> http2::Builder<LocalExecutor>,
|
||||
>,
|
||||
) -> impl Future<Output = Result<(), hyper::Error>> + 'static {
|
||||
let conn =
|
||||
http2::Builder::new(LocalExecutor).serve_connection(TokioIo::new(io), svc);
|
||||
let mut builder = http2::Builder::new(LocalExecutor);
|
||||
|
||||
if let Some(http2_builder_hook) = http2_builder_hook {
|
||||
builder = http2_builder_hook(builder);
|
||||
}
|
||||
|
||||
let conn = builder.serve_connection(TokioIo::new(io), svc);
|
||||
async {
|
||||
match conn.or_abort(cancel).await {
|
||||
Err(mut conn) => {
|
||||
|
@ -861,15 +876,16 @@ async fn serve_http2_autodetect(
|
|||
io: impl HttpServeStream,
|
||||
svc: impl HttpService<Incoming, ResBody = HttpRecordResponse> + 'static,
|
||||
cancel: Rc<CancelHandle>,
|
||||
options: Options,
|
||||
) -> Result<(), HttpNextError> {
|
||||
let prefix = NetworkStreamPrefixCheck::new(io, HTTP2_PREFIX);
|
||||
let (matches, io) = prefix.match_prefix().await?;
|
||||
if matches {
|
||||
serve_http2_unconditional(io, svc, cancel)
|
||||
serve_http2_unconditional(io, svc, cancel, options.http2_builder_hook)
|
||||
.await
|
||||
.map_err(HttpNextError::Hyper)
|
||||
} else {
|
||||
serve_http11_unconditional(io, svc, cancel)
|
||||
serve_http11_unconditional(io, svc, cancel, options.http1_builder_hook)
|
||||
.await
|
||||
.map_err(HttpNextError::Hyper)
|
||||
}
|
||||
|
@ -880,6 +896,7 @@ fn serve_https(
|
|||
request_info: HttpConnectionProperties,
|
||||
lifetime: HttpLifetime,
|
||||
tx: tokio::sync::mpsc::Sender<Rc<HttpRecord>>,
|
||||
options: Options,
|
||||
) -> JoinHandle<Result<(), HttpNextError>> {
|
||||
let HttpLifetime {
|
||||
server_state,
|
||||
|
@ -891,21 +908,31 @@ fn serve_https(
|
|||
handle_request(req, request_info.clone(), server_state.clone(), tx.clone())
|
||||
});
|
||||
spawn(
|
||||
async {
|
||||
async move {
|
||||
let handshake = io.handshake().await?;
|
||||
// If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect
|
||||
// based on the prefix bytes
|
||||
let handshake = handshake.alpn;
|
||||
if Some(TLS_ALPN_HTTP_2) == handshake.as_deref() {
|
||||
serve_http2_unconditional(io, svc, listen_cancel_handle)
|
||||
serve_http2_unconditional(
|
||||
io,
|
||||
svc,
|
||||
listen_cancel_handle,
|
||||
options.http2_builder_hook,
|
||||
)
|
||||
.await
|
||||
.map_err(HttpNextError::Hyper)
|
||||
} else if Some(TLS_ALPN_HTTP_11) == handshake.as_deref() {
|
||||
serve_http11_unconditional(io, svc, listen_cancel_handle)
|
||||
serve_http11_unconditional(
|
||||
io,
|
||||
svc,
|
||||
listen_cancel_handle,
|
||||
options.http1_builder_hook,
|
||||
)
|
||||
.await
|
||||
.map_err(HttpNextError::Hyper)
|
||||
} else {
|
||||
serve_http2_autodetect(io, svc, listen_cancel_handle).await
|
||||
serve_http2_autodetect(io, svc, listen_cancel_handle, options).await
|
||||
}
|
||||
}
|
||||
.try_or_cancel(connection_cancel_handle),
|
||||
|
@ -917,6 +944,7 @@ fn serve_http(
|
|||
request_info: HttpConnectionProperties,
|
||||
lifetime: HttpLifetime,
|
||||
tx: tokio::sync::mpsc::Sender<Rc<HttpRecord>>,
|
||||
options: Options,
|
||||
) -> JoinHandle<Result<(), HttpNextError>> {
|
||||
let HttpLifetime {
|
||||
server_state,
|
||||
|
@ -928,7 +956,7 @@ fn serve_http(
|
|||
handle_request(req, request_info.clone(), server_state.clone(), tx.clone())
|
||||
});
|
||||
spawn(
|
||||
serve_http2_autodetect(io, svc, listen_cancel_handle)
|
||||
serve_http2_autodetect(io, svc, listen_cancel_handle, options)
|
||||
.try_or_cancel(connection_cancel_handle),
|
||||
)
|
||||
}
|
||||
|
@ -938,6 +966,7 @@ fn serve_http_on<HTTP>(
|
|||
listen_properties: &HttpListenProperties,
|
||||
lifetime: HttpLifetime,
|
||||
tx: tokio::sync::mpsc::Sender<Rc<HttpRecord>>,
|
||||
options: Options,
|
||||
) -> JoinHandle<Result<(), HttpNextError>>
|
||||
where
|
||||
HTTP: HttpPropertyExtractor,
|
||||
|
@ -949,14 +978,14 @@ where
|
|||
|
||||
match network_stream {
|
||||
NetworkStream::Tcp(conn) => {
|
||||
serve_http(conn, connection_properties, lifetime, tx)
|
||||
serve_http(conn, connection_properties, lifetime, tx, options)
|
||||
}
|
||||
NetworkStream::Tls(conn) => {
|
||||
serve_https(conn, connection_properties, lifetime, tx)
|
||||
serve_https(conn, connection_properties, lifetime, tx, options)
|
||||
}
|
||||
#[cfg(unix)]
|
||||
NetworkStream::Unix(conn) => {
|
||||
serve_http(conn, connection_properties, lifetime, tx)
|
||||
serve_http(conn, connection_properties, lifetime, tx, options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1045,6 +1074,11 @@ where
|
|||
|
||||
let lifetime = resource.lifetime();
|
||||
|
||||
let options = {
|
||||
let state = state.borrow();
|
||||
*state.borrow::<Options>()
|
||||
};
|
||||
|
||||
let listen_properties_clone: HttpListenProperties = listen_properties.clone();
|
||||
let handle = spawn(async move {
|
||||
loop {
|
||||
|
@ -1057,6 +1091,7 @@ where
|
|||
&listen_properties_clone,
|
||||
lifetime.clone(),
|
||||
tx.clone(),
|
||||
options,
|
||||
);
|
||||
}
|
||||
#[allow(unreachable_code)]
|
||||
|
@ -1093,11 +1128,17 @@ where
|
|||
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
||||
let resource: Rc<HttpJoinHandle> = Rc::new(HttpJoinHandle::new(rx));
|
||||
|
||||
let options = {
|
||||
let state = state.borrow();
|
||||
*state.borrow::<Options>()
|
||||
};
|
||||
|
||||
let handle = serve_http_on::<HTTP>(
|
||||
connection,
|
||||
&listen_properties,
|
||||
resource.lifetime(),
|
||||
tx,
|
||||
options,
|
||||
);
|
||||
|
||||
// Set the handle after we start the future
|
||||
|
|
|
@ -39,6 +39,8 @@ use deno_net::raw::NetworkStream;
|
|||
use deno_websocket::ws_create_server_stream;
|
||||
use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::server::conn::http2;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use hyper_v014::body::Bytes;
|
||||
use hyper_v014::body::HttpBody;
|
||||
|
@ -96,6 +98,25 @@ pub use request_properties::HttpRequestProperties;
|
|||
pub use service::UpgradeUnavailableError;
|
||||
pub use websocket_upgrade::WebSocketUpgradeError;
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct Options {
|
||||
/// By passing a hook function, the caller can customize various configuration
|
||||
/// options for the HTTP/2 server.
|
||||
/// See [`http2::Builder`] for what parameters can be customized.
|
||||
///
|
||||
/// If `None`, the default configuration provided by hyper will be used. Note
|
||||
/// that the default configuration is subject to change in future versions.
|
||||
pub http2_builder_hook:
|
||||
Option<fn(http2::Builder<LocalExecutor>) -> http2::Builder<LocalExecutor>>,
|
||||
/// By passing a hook function, the caller can customize various configuration
|
||||
/// options for the HTTP/1 server.
|
||||
/// See [`http1::Builder`] for what parameters can be customized.
|
||||
///
|
||||
/// If `None`, the default configuration provided by hyper will be used. Note
|
||||
/// that the default configuration is subject to change in future versions.
|
||||
pub http1_builder_hook: Option<fn(http1::Builder) -> http1::Builder>,
|
||||
}
|
||||
|
||||
deno_core::extension!(
|
||||
deno_http,
|
||||
deps = [deno_web, deno_net, deno_fetch, deno_websocket],
|
||||
|
@ -135,6 +156,12 @@ deno_core::extension!(
|
|||
http_next::op_http_cancel,
|
||||
],
|
||||
esm = ["00_serve.ts", "01_http.js", "02_websocket.ts"],
|
||||
options = {
|
||||
options: Options,
|
||||
},
|
||||
state = |state, options| {
|
||||
state.put::<Options>(options.options);
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
@ -1117,7 +1144,7 @@ async fn op_http_upgrade_websocket(
|
|||
|
||||
// Needed so hyper can use non Send futures
|
||||
#[derive(Clone)]
|
||||
struct LocalExecutor;
|
||||
pub struct LocalExecutor;
|
||||
|
||||
impl<Fut> hyper_v014::rt::Executor<Fut> for LocalExecutor
|
||||
where
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.87.0"
|
||||
version = "0.89.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue