mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
Merge branch 'main' into lint_skip_minified_files
This commit is contained in:
commit
4b695e2db0
263 changed files with 6691 additions and 1883 deletions
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 24;
|
||||
const cacheVersion = 25;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -361,8 +361,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -685,7 +685,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-24.04
|
||||
|
|
714
Cargo.lock
generated
714
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
76
Cargo.toml
76
Cargo.toml
|
@ -46,52 +46,52 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
|
||||
deno_core = { version = "0.318.0" }
|
||||
deno_core = { version = "0.319.0" }
|
||||
|
||||
deno_bench_util = { version = "0.170.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.171.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.4"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.36.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.185.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.37.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.186.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.16"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.106.0", path = "./ext/napi/sym" }
|
||||
napi_sym = { version = "0.107.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
denokv_remote = "0.8.1"
|
||||
denokv_proto = "0.8.4"
|
||||
denokv_remote = "0.8.4"
|
||||
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
|
||||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
denokv_sqlite = { default-features = false, version = "0.8.4" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.170.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.108.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.45.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.176.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.56.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.190.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.200.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.163.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.86.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.174.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.86.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.84.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.107.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.168.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.113.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.163.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.176.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.207.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.143.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.176.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.181.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.171.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.109.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.46.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.177.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.57.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.191.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.201.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.164.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.87.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.175.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.87.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.85.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.108.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.169.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.114.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.164.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.177.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.208.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.144.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.177.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.182.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.172.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.8.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.15.0", path = "./resolvers/node" }
|
||||
deno_resolver = { version = "0.9.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.16.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -157,8 +157,8 @@ percent-encoding = "2.3.0"
|
|||
phf = { version = "0.11", features = ["macros"] }
|
||||
pin-project = "1.0.11" # don't pin because they yank crates from cargo
|
||||
pretty_assertions = "=1.4.0"
|
||||
prost = "0.11"
|
||||
prost-build = "0.11"
|
||||
prost = "0.13"
|
||||
prost-build = "0.13"
|
||||
rand = "=0.8.5"
|
||||
regex = "^1.7.0"
|
||||
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
|
||||
|
@ -204,9 +204,15 @@ webpki-root-certs = "0.26.5"
|
|||
webpki-roots = "0.26"
|
||||
which = "4.2.5"
|
||||
yoke = { version = "0.7.4", features = ["derive"] }
|
||||
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zstd = "=0.12.4"
|
||||
|
||||
opentelemetry = "0.27.0"
|
||||
opentelemetry-http = "0.27.0"
|
||||
opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] }
|
||||
opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] }
|
||||
opentelemetry_sdk = "0.27.0"
|
||||
|
||||
# crypto
|
||||
hkdf = "0.12.3"
|
||||
rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node
|
||||
|
|
12
Releases.md
12
Releases.md
|
@ -6,6 +6,18 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.0.6 / 2024.11.10
|
||||
|
||||
- feat(ext/http): abort event when request is cancelled (#26781)
|
||||
- feat(ext/http): abort signal when request is cancelled (#26761)
|
||||
- feat(lsp): auto-import completions from byonm dependencies (#26680)
|
||||
- fix(ext/cache): don't panic when creating cache (#26780)
|
||||
- fix(ext/node): better inspector support (#26471)
|
||||
- fix(fmt): don't use self-closing tags in HTML (#26754)
|
||||
- fix(install): cache jsr deps from all workspace config files (#26779)
|
||||
- fix(node:zlib): gzip & gzipSync should accept ArrayBuffer (#26762)
|
||||
- fix: performance.timeOrigin (#26787)
|
||||
|
||||
### 2.0.5 / 2024.11.05
|
||||
|
||||
- fix(add): better error message when adding package that only has pre-release
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.170.0"
|
||||
version = "0.171.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.5"
|
||||
version = "2.0.6"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -122,7 +122,7 @@ hyper-util.workspace = true
|
|||
import_map = { version = "=0.20.1", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser = { workspace = true, features = ["cst", "serde"] }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
|
||||
jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] }
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
libz-sys.workspace = true
|
||||
|
|
|
@ -576,7 +576,6 @@ pub struct UnstableConfig {
|
|||
// TODO(bartlomieju): remove in Deno 2.5
|
||||
pub legacy_flag_enabled: bool, // --unstable
|
||||
pub bare_node_builtins: bool,
|
||||
pub detect_cjs: bool,
|
||||
pub sloppy_imports: bool,
|
||||
pub features: Vec<String>, // --unstabe-kv --unstable-cron
|
||||
}
|
||||
|
@ -5720,7 +5719,6 @@ fn unstable_args_parse(
|
|||
|
||||
flags.unstable_config.bare_node_builtins =
|
||||
matches.get_flag("unstable-bare-node-builtins");
|
||||
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
|
||||
flags.unstable_config.sloppy_imports =
|
||||
matches.get_flag("unstable-sloppy-imports");
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ mod import_map;
|
|||
mod lockfile;
|
||||
mod package_json;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::SourceMapOption;
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_config::workspace::CreateResolverOptions;
|
||||
|
@ -27,13 +28,13 @@ use deno_npm::npm_rc::ResolvedNpmRc;
|
|||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_runtime::ops::otel::OtelConfig;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::resolve_import_map_value_from_specifier;
|
||||
|
||||
pub use deno_config::deno_json::BenchConfig;
|
||||
pub use deno_config::deno_json::ConfigFile;
|
||||
pub use deno_config::deno_json::FmtOptionsConfig;
|
||||
pub use deno_config::deno_json::JsxImportSourceConfig;
|
||||
pub use deno_config::deno_json::LintRulesConfig;
|
||||
pub use deno_config::deno_json::ProseWrap;
|
||||
pub use deno_config::deno_json::TsConfig;
|
||||
|
@ -1129,6 +1130,23 @@ impl CliOptions {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn otel_config(&self) -> Option<OtelConfig> {
|
||||
if self
|
||||
.flags
|
||||
.unstable_config
|
||||
.features
|
||||
.contains(&String::from("otel"))
|
||||
{
|
||||
Some(OtelConfig {
|
||||
runtime_name: Cow::Borrowed("deno"),
|
||||
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
|
||||
..Default::default()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn env_file_name(&self) -> Option<&String> {
|
||||
self.flags.env_file.as_ref()
|
||||
}
|
||||
|
@ -1137,21 +1155,34 @@ impl CliOptions {
|
|||
self
|
||||
.main_module_cell
|
||||
.get_or_init(|| {
|
||||
let main_module = match &self.flags.subcommand {
|
||||
Ok(match &self.flags.subcommand {
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Eval(_) => {
|
||||
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
|
||||
resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Repl(_) => {
|
||||
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
|
||||
resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Run(run_flags) => {
|
||||
if run_flags.is_stdin() {
|
||||
resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())?
|
||||
resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())?
|
||||
} else {
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
|
||||
let url =
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?;
|
||||
if self.is_node_main()
|
||||
&& url.scheme() == "file"
|
||||
&& MediaType::from_specifier(&url) == MediaType::Unknown
|
||||
{
|
||||
try_resolve_node_binary_main_entrypoint(
|
||||
&run_flags.script,
|
||||
self.initial_cwd(),
|
||||
)?
|
||||
.unwrap_or(url)
|
||||
} else {
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
DenoSubcommand::Serve(run_flags) => {
|
||||
|
@ -1160,9 +1191,7 @@ impl CliOptions {
|
|||
_ => {
|
||||
bail!("No main module.")
|
||||
}
|
||||
};
|
||||
|
||||
Ok(main_module)
|
||||
})
|
||||
})
|
||||
.as_ref()
|
||||
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
|
||||
|
@ -1211,7 +1240,7 @@ impl CliOptions {
|
|||
// This is triggered via a secret environment variable which is used
|
||||
// for functionality like child_process.fork. Users should NOT depend
|
||||
// on this functionality.
|
||||
pub fn is_npm_main(&self) -> bool {
|
||||
pub fn is_node_main(&self) -> bool {
|
||||
NPM_PROCESS_STATE.is_some()
|
||||
}
|
||||
|
||||
|
@ -1589,9 +1618,11 @@ impl CliOptions {
|
|||
|| self.workspace().has_unstable("bare-node-builtins")
|
||||
}
|
||||
|
||||
pub fn unstable_detect_cjs(&self) -> bool {
|
||||
self.flags.unstable_config.detect_cjs
|
||||
|| self.workspace().has_unstable("detect-cjs")
|
||||
pub fn detect_cjs(&self) -> bool {
|
||||
// only enabled when there's a package.json in order to not have a
|
||||
// perf penalty for non-npm Deno projects of searching for the closest
|
||||
// package.json beside each module
|
||||
self.workspace().package_jsons().next().is_some() || self.is_node_main()
|
||||
}
|
||||
|
||||
fn byonm_enabled(&self) -> bool {
|
||||
|
@ -1655,7 +1686,6 @@ impl CliOptions {
|
|||
"byonm",
|
||||
"bare-node-builtins",
|
||||
"fmt-component",
|
||||
"detect-cjs",
|
||||
])
|
||||
.collect();
|
||||
|
||||
|
@ -1793,6 +1823,36 @@ fn resolve_node_modules_folder(
|
|||
Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
|
||||
}
|
||||
|
||||
fn try_resolve_node_binary_main_entrypoint(
|
||||
specifier: &str,
|
||||
initial_cwd: &Path,
|
||||
) -> Result<Option<Url>, AnyError> {
|
||||
// node allows running files at paths without a `.js` extension
|
||||
// or at directories with an index.js file
|
||||
let path = deno_core::normalize_path(initial_cwd.join(specifier));
|
||||
if path.is_dir() {
|
||||
let index_file = path.join("index.js");
|
||||
Ok(if index_file.is_file() {
|
||||
Some(deno_path_util::url_from_file_path(&index_file)?)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
} else {
|
||||
let path = path.with_extension(
|
||||
path
|
||||
.extension()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(|s| format!("{}.js", s))
|
||||
.unwrap_or("js".to_string()),
|
||||
);
|
||||
if path.is_file() {
|
||||
Ok(Some(deno_path_util::url_from_file_path(&path)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_import_map_specifier(
|
||||
maybe_import_map_path: Option<&str>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
|
|
|
@ -181,7 +181,6 @@ impl Emitter {
|
|||
pub async fn load_and_emit_for_hmr(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
module_kind: deno_ast::ModuleKind,
|
||||
) -> Result<String, AnyError> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
let source_code = tokio::fs::read_to_string(
|
||||
|
@ -203,11 +202,16 @@ impl Emitter {
|
|||
// this statement is probably wrong)
|
||||
let mut options = self.transpile_and_emit_options.1.clone();
|
||||
options.source_map = SourceMapOption::None;
|
||||
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
media_type,
|
||||
parsed_source.compute_is_script(),
|
||||
)?;
|
||||
let transpiled_source = parsed_source
|
||||
.transpile(
|
||||
&self.transpile_and_emit_options.0,
|
||||
&deno_ast::TranspileModuleOptions {
|
||||
module_kind: Some(module_kind),
|
||||
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
|
||||
},
|
||||
&options,
|
||||
)?
|
||||
|
|
|
@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
|
||||
"TypeError"
|
||||
}
|
||||
|
||||
pub fn get_error_class_name(e: &AnyError) -> &'static str {
|
||||
deno_runtime::errors::get_error_class_name(e)
|
||||
.or_else(|| {
|
||||
|
@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
|
|||
e.downcast_ref::<ResolutionError>()
|
||||
.map(get_resolution_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<std::num::TryFromIntError>()
|
||||
.map(get_try_from_int_error_class)
|
||||
})
|
||||
.unwrap_or("Error")
|
||||
}
|
||||
|
|
|
@ -42,12 +42,12 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CjsTrackerOptions;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliResolverOptions;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::IsCjsResolverOptions;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::standalone::DenoCompileBinaryWriter;
|
||||
|
@ -201,7 +201,7 @@ struct CliFactoryServices {
|
|||
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
|
||||
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
|
||||
pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>,
|
||||
resolver: Deferred<Arc<CliGraphResolver>>,
|
||||
resolver: Deferred<Arc<CliResolver>>,
|
||||
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
|
||||
root_permissions_container: Deferred<PermissionsContainer>,
|
||||
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
|
||||
|
@ -523,14 +523,14 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> {
|
||||
pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.resolver
|
||||
.get_or_try_init_async(
|
||||
async {
|
||||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
|
||||
Ok(Arc::new(CliResolver::new(CliResolverOptions {
|
||||
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
|
||||
node_resolver: Some(self.cli_node_resolver().await?.clone()),
|
||||
npm_resolver: if cli_options.no_npm() {
|
||||
|
@ -541,9 +541,6 @@ impl CliFactory {
|
|||
workspace_resolver: self.workspace_resolver().await?.clone(),
|
||||
bare_node_builtins_enabled: cli_options
|
||||
.unstable_bare_node_builtins(),
|
||||
maybe_jsx_import_source_config: cli_options
|
||||
.workspace()
|
||||
.to_maybe_jsx_import_source_config()?,
|
||||
maybe_vendor_dir: cli_options.vendor_dir_path(),
|
||||
})))
|
||||
}
|
||||
|
@ -652,7 +649,6 @@ impl CliFactory {
|
|||
self.cjs_tracker()?.clone(),
|
||||
self.fs().clone(),
|
||||
Some(self.parsed_source_cache().clone()),
|
||||
self.cli_options()?.is_npm_main(),
|
||||
);
|
||||
|
||||
Ok(Arc::new(NodeCodeTranslator::new(
|
||||
|
@ -706,6 +702,7 @@ impl CliFactory {
|
|||
let cli_options = self.cli_options()?;
|
||||
Ok(Arc::new(ModuleGraphBuilder::new(
|
||||
self.caches()?.clone(),
|
||||
self.cjs_tracker()?.clone(),
|
||||
cli_options.clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
self.fs().clone(),
|
||||
|
@ -794,8 +791,9 @@ impl CliFactory {
|
|||
Ok(Arc::new(CjsTracker::new(
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
self.pkg_json_resolver().clone(),
|
||||
CjsTrackerOptions {
|
||||
unstable_detect_cjs: options.unstable_detect_cjs(),
|
||||
IsCjsResolverOptions {
|
||||
detect_cjs: options.detect_cjs(),
|
||||
is_node_main: options.is_node_main(),
|
||||
},
|
||||
)))
|
||||
})
|
||||
|
@ -809,7 +807,6 @@ impl CliFactory {
|
|||
.cli_node_resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(CliNodeResolver::new(
|
||||
self.cjs_tracker()?.clone(),
|
||||
self.fs().clone(),
|
||||
self.in_npm_pkg_checker()?.clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
|
@ -939,6 +936,7 @@ impl CliFactory {
|
|||
StorageKeyResolver::from_options(cli_options),
|
||||
cli_options.sub_command().clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
self.cli_options()?.otel_config(),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -949,10 +947,8 @@ impl CliFactory {
|
|||
let create_hmr_runner = if cli_options.has_hmr() {
|
||||
let watcher_communicator = self.watcher_communicator.clone().unwrap();
|
||||
let emitter = self.emitter()?.clone();
|
||||
let cjs_tracker = self.cjs_tracker()?.clone();
|
||||
let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| {
|
||||
Box::new(HmrRunner::new(
|
||||
cjs_tracker.clone(),
|
||||
emitter.clone(),
|
||||
session,
|
||||
watcher_communicator.clone(),
|
||||
|
|
|
@ -13,16 +13,19 @@ use crate::colors;
|
|||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::FillFromLockfileOptions;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
|
@ -379,6 +382,7 @@ pub struct BuildFastCheckGraphOptions<'a> {
|
|||
|
||||
pub struct ModuleGraphBuilder {
|
||||
caches: Arc<cache::Caches>,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
|
@ -389,7 +393,7 @@ pub struct ModuleGraphBuilder {
|
|||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
}
|
||||
|
||||
|
@ -397,6 +401,7 @@ impl ModuleGraphBuilder {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
caches: Arc<cache::Caches>,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
|
@ -407,11 +412,12 @@ impl ModuleGraphBuilder {
|
|||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
) -> Self {
|
||||
Self {
|
||||
caches,
|
||||
cjs_tracker,
|
||||
cli_options,
|
||||
file_fetcher,
|
||||
fs,
|
||||
|
@ -518,7 +524,7 @@ impl ModuleGraphBuilder {
|
|||
None => MutLoaderRef::Owned(self.create_graph_loader()),
|
||||
};
|
||||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_resolver = self.create_graph_resolver()?;
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
let maybe_file_watcher_reporter = self
|
||||
.maybe_file_watcher_reporter
|
||||
|
@ -543,7 +549,7 @@ impl ModuleGraphBuilder {
|
|||
npm_resolver: Some(&graph_npm_resolver),
|
||||
module_analyzer: &analyzer,
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
resolver: Some(graph_resolver),
|
||||
resolver: Some(&graph_resolver),
|
||||
locker: locker.as_mut().map(|l| l as _),
|
||||
},
|
||||
)
|
||||
|
@ -666,7 +672,7 @@ impl ModuleGraphBuilder {
|
|||
};
|
||||
let parser = self.parsed_source_cache.as_capturing_parser();
|
||||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_resolver = self.create_graph_resolver()?;
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
|
||||
graph.build_fast_check_type_graph(
|
||||
|
@ -675,7 +681,7 @@ impl ModuleGraphBuilder {
|
|||
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
|
||||
fast_check_dts: false,
|
||||
jsr_url_provider: &CliJsrUrlProvider,
|
||||
resolver: Some(graph_resolver),
|
||||
resolver: Some(&graph_resolver),
|
||||
npm_resolver: Some(&graph_npm_resolver),
|
||||
workspace_fast_check: options.workspace_fast_check,
|
||||
},
|
||||
|
@ -739,6 +745,18 @@ impl ModuleGraphBuilder {
|
|||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
|
||||
let jsx_import_source_config = self
|
||||
.cli_options
|
||||
.workspace()
|
||||
.to_maybe_jsx_import_source_config()?;
|
||||
Ok(CliGraphResolver {
|
||||
cjs_tracker: &self.cjs_tracker,
|
||||
resolver: &self.resolver,
|
||||
jsx_import_source_config,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds more explanatory information to a resolution error.
|
||||
|
@ -1143,6 +1161,53 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
|
|||
message
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CliGraphResolver<'a> {
|
||||
cjs_tracker: &'a CjsTracker,
|
||||
resolver: &'a CliResolver,
|
||||
jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
|
||||
fn default_jsx_import_source(&self) -> Option<String> {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.as_ref()
|
||||
.and_then(|c| c.default_specifier.clone())
|
||||
}
|
||||
|
||||
fn default_jsx_import_source_types(&self) -> Option<String> {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.as_ref()
|
||||
.and_then(|c| c.default_types_specifier.clone())
|
||||
}
|
||||
|
||||
fn jsx_import_source_module(&self) -> &str {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.as_ref()
|
||||
.map(|c| c.module.as_str())
|
||||
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
&self,
|
||||
raw_specifier: &str,
|
||||
referrer_range: &deno_graph::Range,
|
||||
mode: ResolutionMode,
|
||||
) -> Result<ModuleSpecifier, ResolveError> {
|
||||
self.resolver.resolve(
|
||||
raw_specifier,
|
||||
referrer_range,
|
||||
self
|
||||
.cjs_tracker
|
||||
.get_referrer_kind(&referrer_range.specifier),
|
||||
mode,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::sync::Arc;
|
||||
|
|
|
@ -10,6 +10,7 @@ use super::tsc;
|
|||
use super::urls::url_to_uri;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::lsp::search::PackageSearchApi;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use crate::util::path::relative_specifier;
|
||||
|
@ -38,6 +39,7 @@ use deno_semver::package::PackageReq;
|
|||
use deno_semver::package::PackageReqReference;
|
||||
use deno_semver::Version;
|
||||
use import_map::ImportMap;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
|
@ -466,6 +468,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
) -> Option<String> {
|
||||
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
|
||||
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
|
||||
|
@ -476,7 +479,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
for specifier in specifiers {
|
||||
if let Some(specifier) = self
|
||||
.resolver
|
||||
.as_graph_resolver(Some(&self.file_referrer))
|
||||
.as_cli_resolver(Some(&self.file_referrer))
|
||||
.resolve(
|
||||
&specifier,
|
||||
&deno_graph::Range {
|
||||
|
@ -484,6 +487,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
referrer_kind,
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
.ok()
|
||||
|
@ -506,10 +510,11 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
&self,
|
||||
specifier_text: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
) -> bool {
|
||||
self
|
||||
.resolver
|
||||
.as_graph_resolver(Some(&self.file_referrer))
|
||||
.as_cli_resolver(Some(&self.file_referrer))
|
||||
.resolve(
|
||||
specifier_text,
|
||||
&deno_graph::Range {
|
||||
|
@ -517,6 +522,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
referrer_kind,
|
||||
deno_graph::source::ResolutionMode::Types,
|
||||
)
|
||||
.is_ok()
|
||||
|
@ -585,6 +591,7 @@ fn try_reverse_map_package_json_exports(
|
|||
/// like an import and rewrite the import specifier to include the extension
|
||||
pub fn fix_ts_import_changes(
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
changes: &[tsc::FileTextChanges],
|
||||
language_server: &language_server::Inner,
|
||||
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
|
||||
|
@ -601,8 +608,8 @@ pub fn fix_ts_import_changes(
|
|||
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
|
||||
let specifier =
|
||||
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
if let Some(new_specifier) = import_mapper
|
||||
.check_unresolved_specifier(specifier, referrer, referrer_kind)
|
||||
{
|
||||
line.replace(specifier, &new_specifier)
|
||||
} else {
|
||||
|
@ -632,6 +639,7 @@ pub fn fix_ts_import_changes(
|
|||
/// resolution by Deno (includes the extension).
|
||||
fn fix_ts_import_action<'a>(
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
action: &'a tsc::CodeFixAction,
|
||||
language_server: &language_server::Inner,
|
||||
) -> Option<Cow<'a, tsc::CodeFixAction>> {
|
||||
|
@ -651,7 +659,7 @@ fn fix_ts_import_action<'a>(
|
|||
};
|
||||
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind)
|
||||
{
|
||||
let description = action.description.replace(specifier, &new_specifier);
|
||||
let changes = action
|
||||
|
@ -682,7 +690,7 @@ fn fix_ts_import_action<'a>(
|
|||
fix_id: None,
|
||||
fix_all_description: None,
|
||||
}))
|
||||
} else if !import_mapper.is_valid_import(specifier, referrer) {
|
||||
} else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) {
|
||||
None
|
||||
} else {
|
||||
Some(Cow::Borrowed(action))
|
||||
|
@ -747,8 +755,14 @@ pub fn ts_changes_to_edit(
|
|||
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
|
||||
let mut text_document_edits = Vec::new();
|
||||
for change in changes {
|
||||
let text_document_edit = change.to_text_document_edit(language_server)?;
|
||||
text_document_edits.push(text_document_edit);
|
||||
let edit = match change.to_text_document_edit(language_server) {
|
||||
Ok(e) => e,
|
||||
Err(err) => {
|
||||
lsp_warn!("Couldn't covert text document edit: {:#}", err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
text_document_edits.push(edit);
|
||||
}
|
||||
Ok(Some(lsp::WorkspaceEdit {
|
||||
changes: None,
|
||||
|
@ -1010,6 +1024,7 @@ impl CodeActionCollection {
|
|||
pub fn add_ts_fix_action(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
specifier_kind: NodeModuleKind,
|
||||
action: &tsc::CodeFixAction,
|
||||
diagnostic: &lsp::Diagnostic,
|
||||
language_server: &language_server::Inner,
|
||||
|
@ -1027,7 +1042,8 @@ impl CodeActionCollection {
|
|||
"The action returned from TypeScript is unsupported.",
|
||||
));
|
||||
}
|
||||
let Some(action) = fix_ts_import_action(specifier, action, language_server)
|
||||
let Some(action) =
|
||||
fix_ts_import_action(specifier, specifier_kind, action, language_server)
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
@ -1269,6 +1285,9 @@ impl CodeActionCollection {
|
|||
import_start_from_specifier(document, i)
|
||||
})?;
|
||||
let referrer = document.specifier();
|
||||
let referrer_kind = language_server
|
||||
.is_cjs_resolver
|
||||
.get_doc_module_kind(document);
|
||||
let file_referrer = document.file_referrer();
|
||||
let config_data = language_server
|
||||
.config
|
||||
|
@ -1291,10 +1310,11 @@ impl CodeActionCollection {
|
|||
if !config_data.byonm {
|
||||
return None;
|
||||
}
|
||||
if !language_server
|
||||
.resolver
|
||||
.is_bare_package_json_dep(&dep_key, referrer)
|
||||
{
|
||||
if !language_server.resolver.is_bare_package_json_dep(
|
||||
&dep_key,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
|
||||
|
@ -1313,7 +1333,7 @@ impl CodeActionCollection {
|
|||
}
|
||||
if language_server
|
||||
.resolver
|
||||
.npm_to_file_url(&npm_ref, document.specifier(), file_referrer)
|
||||
.npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
|
||||
.is_some()
|
||||
{
|
||||
// The package import has types.
|
||||
|
|
|
@ -9,6 +9,7 @@ use super::jsr::CliJsrSearchApi;
|
|||
use super::lsp_custom;
|
||||
use super::npm::CliNpmSearchApi;
|
||||
use super::registries::ModuleRegistry;
|
||||
use super::resolver::LspIsCjsResolver;
|
||||
use super::resolver::LspResolver;
|
||||
use super::search::PackageSearchApi;
|
||||
use super::tsc;
|
||||
|
@ -35,6 +36,7 @@ use deno_semver::package::PackageNv;
|
|||
use import_map::ImportMap;
|
||||
use indexmap::IndexSet;
|
||||
use lsp_types::CompletionList;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
@ -159,15 +161,17 @@ pub async fn get_import_completions(
|
|||
jsr_search_api: &CliJsrSearchApi,
|
||||
npm_search_api: &CliNpmSearchApi,
|
||||
documents: &Documents,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: &LspResolver,
|
||||
maybe_import_map: Option<&ImportMap>,
|
||||
) -> Option<lsp::CompletionResponse> {
|
||||
let document = documents.get(specifier)?;
|
||||
let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document);
|
||||
let file_referrer = document.file_referrer();
|
||||
let (text, _, range) = document.get_maybe_dependency(position)?;
|
||||
let range = to_narrow_lsp_range(document.text_info(), &range);
|
||||
let resolved = resolver
|
||||
.as_graph_resolver(file_referrer)
|
||||
.as_cli_resolver(file_referrer)
|
||||
.resolve(
|
||||
&text,
|
||||
&Range {
|
||||
|
@ -175,6 +179,7 @@ pub async fn get_import_completions(
|
|||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
specifier_kind,
|
||||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok();
|
||||
|
@ -201,7 +206,7 @@ pub async fn get_import_completions(
|
|||
// completions for import map specifiers
|
||||
Some(lsp::CompletionResponse::List(completion_list))
|
||||
} else if let Some(completion_list) =
|
||||
get_local_completions(specifier, &text, &range, resolver)
|
||||
get_local_completions(specifier, specifier_kind, &text, &range, resolver)
|
||||
{
|
||||
// completions for local relative modules
|
||||
Some(lsp::CompletionResponse::List(completion_list))
|
||||
|
@ -355,24 +360,26 @@ fn get_import_map_completions(
|
|||
|
||||
/// Return local completions that are relative to the base specifier.
|
||||
fn get_local_completions(
|
||||
base: &ModuleSpecifier,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
text: &str,
|
||||
range: &lsp::Range,
|
||||
resolver: &LspResolver,
|
||||
) -> Option<CompletionList> {
|
||||
if base.scheme() != "file" {
|
||||
if referrer.scheme() != "file" {
|
||||
return None;
|
||||
}
|
||||
let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
|
||||
let resolved_parent = resolver
|
||||
.as_graph_resolver(Some(base))
|
||||
.as_cli_resolver(Some(referrer))
|
||||
.resolve(
|
||||
parent,
|
||||
&Range {
|
||||
specifier: base.clone(),
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
referrer_kind,
|
||||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok()?;
|
||||
|
@ -385,7 +392,7 @@ fn get_local_completions(
|
|||
let de = de.ok()?;
|
||||
let label = de.path().file_name()?.to_string_lossy().to_string();
|
||||
let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?;
|
||||
if entry_specifier == *base {
|
||||
if entry_specifier == *referrer {
|
||||
return None;
|
||||
}
|
||||
let full_text = format!("{parent}{label}");
|
||||
|
@ -905,6 +912,7 @@ mod tests {
|
|||
ModuleSpecifier::from_file_path(file_c).expect("could not create");
|
||||
let actual = get_local_completions(
|
||||
&specifier,
|
||||
NodeModuleKind::Esm,
|
||||
"./",
|
||||
&lsp::Range {
|
||||
start: lsp::Position {
|
||||
|
|
|
@ -4,6 +4,7 @@ use deno_ast::MediaType;
|
|||
use deno_config::deno_json::DenoJsonCache;
|
||||
use deno_config::deno_json::FmtConfig;
|
||||
use deno_config::deno_json::FmtOptionsConfig;
|
||||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::deno_json::LintConfig;
|
||||
use deno_config::deno_json::NodeModulesDirMode;
|
||||
use deno_config::deno_json::TestConfig;
|
||||
|
@ -1654,6 +1655,17 @@ impl ConfigData {
|
|||
self.member_dir.maybe_pkg_json()
|
||||
}
|
||||
|
||||
pub fn maybe_jsx_import_source_config(
|
||||
&self,
|
||||
) -> Option<JsxImportSourceConfig> {
|
||||
self
|
||||
.member_dir
|
||||
.workspace
|
||||
.to_maybe_jsx_import_source_config()
|
||||
.ok()
|
||||
.flatten()
|
||||
}
|
||||
|
||||
pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
specifier.as_str().starts_with(self.scope.as_str())
|
||||
|| self
|
||||
|
|
|
@ -1707,6 +1707,7 @@ mod tests {
|
|||
documents: Arc::new(documents),
|
||||
assets: Default::default(),
|
||||
config: Arc::new(config),
|
||||
is_cjs_resolver: Default::default(),
|
||||
resolver,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -3,7 +3,9 @@
|
|||
use super::cache::calculate_fs_version;
|
||||
use super::cache::LspCache;
|
||||
use super::config::Config;
|
||||
use super::resolver::LspIsCjsResolver;
|
||||
use super::resolver::LspResolver;
|
||||
use super::resolver::SingleReferrerGraphResolver;
|
||||
use super::testing::TestCollector;
|
||||
use super::testing::TestModule;
|
||||
use super::text::LineIndex;
|
||||
|
@ -33,6 +35,7 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use deno_semver::package::PackageReq;
|
||||
use indexmap::IndexMap;
|
||||
use indexmap::IndexSet;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
|
@ -293,6 +296,8 @@ pub struct Document {
|
|||
/// Contains the last-known-good set of dependencies from parsing the module.
|
||||
config: Arc<Config>,
|
||||
dependencies: Arc<IndexMap<String, deno_graph::Dependency>>,
|
||||
/// If this is maybe a CJS script and maybe not an ES module.
|
||||
is_script: Option<bool>,
|
||||
// TODO(nayeemrmn): This is unused, use it for scope attribution for remote
|
||||
// modules.
|
||||
file_referrer: Option<ModuleSpecifier>,
|
||||
|
@ -323,6 +328,7 @@ impl Document {
|
|||
maybe_lsp_version: Option<i32>,
|
||||
maybe_language_id: Option<LanguageId>,
|
||||
maybe_headers: Option<HashMap<String, String>>,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: Arc<LspResolver>,
|
||||
config: Arc<Config>,
|
||||
cache: &Arc<LspCache>,
|
||||
|
@ -342,6 +348,7 @@ impl Document {
|
|||
maybe_headers.as_ref(),
|
||||
media_type,
|
||||
file_referrer.as_ref(),
|
||||
is_cjs_resolver,
|
||||
&resolver,
|
||||
)
|
||||
} else {
|
||||
|
@ -367,6 +374,7 @@ impl Document {
|
|||
file_referrer.as_ref(),
|
||||
),
|
||||
file_referrer,
|
||||
is_script: maybe_module.as_ref().map(|m| m.is_script),
|
||||
maybe_types_dependency,
|
||||
line_index,
|
||||
maybe_language_id,
|
||||
|
@ -388,6 +396,7 @@ impl Document {
|
|||
|
||||
fn with_new_config(
|
||||
&self,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: Arc<LspResolver>,
|
||||
config: Arc<Config>,
|
||||
) -> Arc<Self> {
|
||||
|
@ -399,6 +408,7 @@ impl Document {
|
|||
let dependencies;
|
||||
let maybe_types_dependency;
|
||||
let maybe_parsed_source;
|
||||
let is_script;
|
||||
let maybe_test_module_fut;
|
||||
if media_type != self.media_type {
|
||||
let parsed_source_result =
|
||||
|
@ -408,6 +418,7 @@ impl Document {
|
|||
&parsed_source_result,
|
||||
self.maybe_headers.as_ref(),
|
||||
self.file_referrer.as_ref(),
|
||||
is_cjs_resolver,
|
||||
&resolver,
|
||||
)
|
||||
.ok();
|
||||
|
@ -415,6 +426,7 @@ impl Document {
|
|||
.as_ref()
|
||||
.map(|m| Arc::new(m.dependencies.clone()))
|
||||
.unwrap_or_default();
|
||||
is_script = maybe_module.as_ref().map(|m| m.is_script);
|
||||
maybe_types_dependency = maybe_module
|
||||
.as_ref()
|
||||
.and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?)));
|
||||
|
@ -422,10 +434,19 @@ impl Document {
|
|||
maybe_test_module_fut =
|
||||
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
|
||||
} else {
|
||||
let graph_resolver =
|
||||
resolver.as_graph_resolver(self.file_referrer.as_ref());
|
||||
let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
|
||||
let npm_resolver =
|
||||
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
|
||||
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
|
||||
let jsx_import_source_config =
|
||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||
let resolver = SingleReferrerGraphResolver {
|
||||
valid_referrer: &self.specifier,
|
||||
referrer_kind: is_cjs_resolver
|
||||
.get_lsp_referrer_kind(&self.specifier, self.is_script),
|
||||
cli_resolver,
|
||||
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
||||
};
|
||||
dependencies = Arc::new(
|
||||
self
|
||||
.dependencies
|
||||
|
@ -436,7 +457,7 @@ impl Document {
|
|||
d.with_new_resolver(
|
||||
s,
|
||||
&CliJsrUrlProvider,
|
||||
Some(graph_resolver),
|
||||
Some(&resolver),
|
||||
Some(&npm_resolver),
|
||||
),
|
||||
)
|
||||
|
@ -446,10 +467,11 @@ impl Document {
|
|||
maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| {
|
||||
Arc::new(d.with_new_resolver(
|
||||
&CliJsrUrlProvider,
|
||||
Some(graph_resolver),
|
||||
Some(&resolver),
|
||||
Some(&npm_resolver),
|
||||
))
|
||||
});
|
||||
is_script = self.is_script;
|
||||
maybe_parsed_source = self.maybe_parsed_source().cloned();
|
||||
maybe_test_module_fut = self
|
||||
.maybe_test_module_fut
|
||||
|
@ -461,6 +483,7 @@ impl Document {
|
|||
// updated properties
|
||||
dependencies,
|
||||
file_referrer: self.file_referrer.clone(),
|
||||
is_script,
|
||||
maybe_types_dependency,
|
||||
maybe_navigation_tree: Mutex::new(None),
|
||||
// maintain - this should all be copies/clones
|
||||
|
@ -485,6 +508,7 @@ impl Document {
|
|||
|
||||
fn with_change(
|
||||
&self,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
version: i32,
|
||||
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
||||
) -> Result<Arc<Self>, AnyError> {
|
||||
|
@ -518,6 +542,7 @@ impl Document {
|
|||
self.maybe_headers.as_ref(),
|
||||
media_type,
|
||||
self.file_referrer.as_ref(),
|
||||
is_cjs_resolver,
|
||||
self.resolver.as_ref(),
|
||||
)
|
||||
} else {
|
||||
|
@ -541,6 +566,7 @@ impl Document {
|
|||
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config);
|
||||
Ok(Arc::new(Self {
|
||||
config: self.config.clone(),
|
||||
is_script: maybe_module.as_ref().map(|m| m.is_script),
|
||||
specifier: self.specifier.clone(),
|
||||
file_referrer: self.file_referrer.clone(),
|
||||
maybe_fs_version: self.maybe_fs_version.clone(),
|
||||
|
@ -575,6 +601,7 @@ impl Document {
|
|||
),
|
||||
maybe_language_id: self.maybe_language_id,
|
||||
dependencies: self.dependencies.clone(),
|
||||
is_script: self.is_script,
|
||||
maybe_types_dependency: self.maybe_types_dependency.clone(),
|
||||
text: self.text.clone(),
|
||||
text_info_cell: once_cell::sync::OnceCell::new(),
|
||||
|
@ -602,6 +629,7 @@ impl Document {
|
|||
),
|
||||
maybe_language_id: self.maybe_language_id,
|
||||
dependencies: self.dependencies.clone(),
|
||||
is_script: self.is_script,
|
||||
maybe_types_dependency: self.maybe_types_dependency.clone(),
|
||||
text: self.text.clone(),
|
||||
text_info_cell: once_cell::sync::OnceCell::new(),
|
||||
|
@ -650,6 +678,13 @@ impl Document {
|
|||
})
|
||||
}
|
||||
|
||||
/// If this is maybe a CJS script and maybe not an ES module.
|
||||
///
|
||||
/// Use `LspIsCjsResolver` to determine for sure.
|
||||
pub fn is_script(&self) -> Option<bool> {
|
||||
self.is_script
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> Arc<LineIndex> {
|
||||
self.line_index.clone()
|
||||
}
|
||||
|
@ -797,6 +832,7 @@ impl FileSystemDocuments {
|
|||
pub fn get(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: &Arc<LspResolver>,
|
||||
config: &Arc<Config>,
|
||||
cache: &Arc<LspCache>,
|
||||
|
@ -820,7 +856,14 @@ impl FileSystemDocuments {
|
|||
};
|
||||
if dirty {
|
||||
// attempt to update the file on the file system
|
||||
self.refresh_document(specifier, resolver, config, cache, file_referrer)
|
||||
self.refresh_document(
|
||||
specifier,
|
||||
is_cjs_resolver,
|
||||
resolver,
|
||||
config,
|
||||
cache,
|
||||
file_referrer,
|
||||
)
|
||||
} else {
|
||||
old_doc
|
||||
}
|
||||
|
@ -831,6 +874,7 @@ impl FileSystemDocuments {
|
|||
fn refresh_document(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: &Arc<LspResolver>,
|
||||
config: &Arc<Config>,
|
||||
cache: &Arc<LspCache>,
|
||||
|
@ -847,6 +891,7 @@ impl FileSystemDocuments {
|
|||
None,
|
||||
None,
|
||||
None,
|
||||
is_cjs_resolver,
|
||||
resolver.clone(),
|
||||
config.clone(),
|
||||
cache,
|
||||
|
@ -863,6 +908,7 @@ impl FileSystemDocuments {
|
|||
None,
|
||||
None,
|
||||
None,
|
||||
is_cjs_resolver,
|
||||
resolver.clone(),
|
||||
config.clone(),
|
||||
cache,
|
||||
|
@ -890,6 +936,7 @@ impl FileSystemDocuments {
|
|||
None,
|
||||
None,
|
||||
maybe_headers,
|
||||
is_cjs_resolver,
|
||||
resolver.clone(),
|
||||
config.clone(),
|
||||
cache,
|
||||
|
@ -930,6 +977,11 @@ pub struct Documents {
|
|||
/// The DENO_DIR that the documents looks for non-file based modules.
|
||||
cache: Arc<LspCache>,
|
||||
config: Arc<Config>,
|
||||
/// Resolver for detecting if a document is CJS or ESM.
|
||||
is_cjs_resolver: Arc<LspIsCjsResolver>,
|
||||
/// A resolver that takes into account currently loaded import map and JSX
|
||||
/// settings.
|
||||
resolver: Arc<LspResolver>,
|
||||
/// A flag that indicates that stated data is potentially invalid and needs to
|
||||
/// be recalculated before being considered valid.
|
||||
dirty: bool,
|
||||
|
@ -937,9 +989,6 @@ pub struct Documents {
|
|||
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
|
||||
/// Documents stored on the file system.
|
||||
file_system_docs: Arc<FileSystemDocuments>,
|
||||
/// A resolver that takes into account currently loaded import map and JSX
|
||||
/// settings.
|
||||
resolver: Arc<LspResolver>,
|
||||
/// The npm package requirements found in npm specifiers.
|
||||
npm_reqs_by_scope:
|
||||
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
|
||||
|
@ -970,6 +1019,7 @@ impl Documents {
|
|||
// the cache for remote modules here in order to get the
|
||||
// x-typescript-types?
|
||||
None,
|
||||
&self.is_cjs_resolver,
|
||||
self.resolver.clone(),
|
||||
self.config.clone(),
|
||||
&self.cache,
|
||||
|
@ -1004,7 +1054,7 @@ impl Documents {
|
|||
))
|
||||
})?;
|
||||
self.dirty = true;
|
||||
let doc = doc.with_change(version, changes)?;
|
||||
let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
|
||||
self.open_docs.insert(doc.specifier().clone(), doc.clone());
|
||||
Ok(doc)
|
||||
}
|
||||
|
@ -1133,6 +1183,7 @@ impl Documents {
|
|||
if let Some(old_doc) = old_doc {
|
||||
self.file_system_docs.get(
|
||||
specifier,
|
||||
&self.is_cjs_resolver,
|
||||
&self.resolver,
|
||||
&self.config,
|
||||
&self.cache,
|
||||
|
@ -1157,6 +1208,7 @@ impl Documents {
|
|||
} else {
|
||||
self.file_system_docs.get(
|
||||
&specifier,
|
||||
&self.is_cjs_resolver,
|
||||
&self.resolver,
|
||||
&self.config,
|
||||
&self.cache,
|
||||
|
@ -1215,12 +1267,15 @@ impl Documents {
|
|||
referrer: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||
let document = self.get(referrer);
|
||||
let file_referrer = document
|
||||
let referrer_doc = self.get(referrer);
|
||||
let file_referrer = referrer_doc
|
||||
.as_ref()
|
||||
.and_then(|d| d.file_referrer())
|
||||
.or(file_referrer);
|
||||
let dependencies = document.as_ref().map(|d| d.dependencies());
|
||||
let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
|
||||
let referrer_kind = self
|
||||
.is_cjs_resolver
|
||||
.get_maybe_doc_module_kind(referrer, referrer_doc.as_deref());
|
||||
let mut results = Vec::new();
|
||||
for raw_specifier in raw_specifiers {
|
||||
if raw_specifier.starts_with("asset:") {
|
||||
|
@ -1237,31 +1292,35 @@ impl Documents {
|
|||
results.push(self.resolve_dependency(
|
||||
specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
file_referrer,
|
||||
));
|
||||
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
|
||||
results.push(self.resolve_dependency(
|
||||
specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
file_referrer,
|
||||
));
|
||||
} else {
|
||||
results.push(None);
|
||||
}
|
||||
} else if let Ok(specifier) =
|
||||
self.resolver.as_graph_resolver(file_referrer).resolve(
|
||||
self.resolver.as_cli_resolver(file_referrer).resolve(
|
||||
raw_specifier,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
referrer_kind,
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
{
|
||||
results.push(self.resolve_dependency(
|
||||
&specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
file_referrer,
|
||||
));
|
||||
} else {
|
||||
|
@ -1280,7 +1339,11 @@ impl Documents {
|
|||
) {
|
||||
self.config = Arc::new(config.clone());
|
||||
self.cache = Arc::new(cache.clone());
|
||||
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
|
||||
self.resolver = resolver.clone();
|
||||
|
||||
node_resolver::PackageJsonThreadLocalCache::clear();
|
||||
|
||||
{
|
||||
let fs_docs = &self.file_system_docs;
|
||||
// Clean up non-existent documents.
|
||||
|
@ -1300,14 +1363,21 @@ impl Documents {
|
|||
if !config.specifier_enabled(doc.specifier()) {
|
||||
continue;
|
||||
}
|
||||
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone());
|
||||
*doc = doc.with_new_config(
|
||||
&self.is_cjs_resolver,
|
||||
self.resolver.clone(),
|
||||
self.config.clone(),
|
||||
);
|
||||
}
|
||||
for mut doc in self.file_system_docs.docs.iter_mut() {
|
||||
if !config.specifier_enabled(doc.specifier()) {
|
||||
continue;
|
||||
}
|
||||
*doc.value_mut() =
|
||||
doc.with_new_config(self.resolver.clone(), self.config.clone());
|
||||
*doc.value_mut() = doc.with_new_config(
|
||||
&self.is_cjs_resolver,
|
||||
self.resolver.clone(),
|
||||
self.config.clone(),
|
||||
);
|
||||
}
|
||||
self.open_docs = open_docs;
|
||||
let mut preload_count = 0;
|
||||
|
@ -1324,6 +1394,7 @@ impl Documents {
|
|||
{
|
||||
fs_docs.refresh_document(
|
||||
specifier,
|
||||
&self.is_cjs_resolver,
|
||||
&self.resolver,
|
||||
&self.config,
|
||||
&self.cache,
|
||||
|
@ -1409,6 +1480,7 @@ impl Documents {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
|
||||
|
@ -1422,10 +1494,12 @@ impl Documents {
|
|||
let mut specifier = specifier.clone();
|
||||
let mut media_type = None;
|
||||
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) {
|
||||
let (s, mt) =
|
||||
self
|
||||
.resolver
|
||||
.npm_to_file_url(&npm_ref, referrer, file_referrer)?;
|
||||
let (s, mt) = self.resolver.npm_to_file_url(
|
||||
&npm_ref,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
file_referrer,
|
||||
)?;
|
||||
specifier = s;
|
||||
media_type = Some(mt);
|
||||
}
|
||||
|
@ -1435,7 +1509,8 @@ impl Documents {
|
|||
return Some((specifier, media_type));
|
||||
};
|
||||
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
|
||||
self.resolve_dependency(types, &specifier, file_referrer)
|
||||
let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc);
|
||||
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
|
||||
} else {
|
||||
Some((doc.specifier().clone(), doc.media_type()))
|
||||
}
|
||||
|
@ -1503,6 +1578,7 @@ fn parse_and_analyze_module(
|
|||
maybe_headers: Option<&HashMap<String, String>>,
|
||||
media_type: MediaType,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: &LspResolver,
|
||||
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
|
||||
let parsed_source_result = parse_source(specifier.clone(), text, media_type);
|
||||
|
@ -1511,6 +1587,7 @@ fn parse_and_analyze_module(
|
|||
&parsed_source_result,
|
||||
maybe_headers,
|
||||
file_referrer,
|
||||
is_cjs_resolver,
|
||||
resolver,
|
||||
);
|
||||
(Some(parsed_source_result), Some(module_result))
|
||||
|
@ -1536,11 +1613,26 @@ fn analyze_module(
|
|||
parsed_source_result: &ParsedSourceResult,
|
||||
maybe_headers: Option<&HashMap<String, String>>,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
is_cjs_resolver: &LspIsCjsResolver,
|
||||
resolver: &LspResolver,
|
||||
) -> ModuleResult {
|
||||
match parsed_source_result {
|
||||
Ok(parsed_source) => {
|
||||
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
|
||||
let cli_resolver = resolver.as_cli_resolver(file_referrer);
|
||||
let config_data = resolver.as_config_data(file_referrer);
|
||||
let valid_referrer = specifier.clone();
|
||||
let jsx_import_source_config =
|
||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||
let resolver = SingleReferrerGraphResolver {
|
||||
valid_referrer: &valid_referrer,
|
||||
referrer_kind: is_cjs_resolver.get_lsp_referrer_kind(
|
||||
&specifier,
|
||||
Some(parsed_source.compute_is_script()),
|
||||
),
|
||||
cli_resolver,
|
||||
jsx_import_source_config: jsx_import_source_config.as_ref(),
|
||||
};
|
||||
Ok(deno_graph::parse_module_from_ast(
|
||||
deno_graph::ParseModuleFromAstOptions {
|
||||
graph_kind: deno_graph::GraphKind::TypesOnly,
|
||||
|
@ -1551,7 +1643,7 @@ fn analyze_module(
|
|||
// dynamic imports like import(`./dir/${something}`) in the LSP
|
||||
file_system: &deno_graph::source::NullFileSystem,
|
||||
jsr_url_provider: &CliJsrUrlProvider,
|
||||
maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)),
|
||||
maybe_resolver: Some(&resolver),
|
||||
maybe_npm_resolver: Some(&npm_resolver),
|
||||
},
|
||||
))
|
||||
|
|
|
@ -22,6 +22,7 @@ use deno_semver::jsr::JsrPackageReqReference;
|
|||
use indexmap::Equivalent;
|
||||
use indexmap::IndexSet;
|
||||
use log::error;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use serde::Deserialize;
|
||||
use serde_json::from_value;
|
||||
use std::collections::BTreeMap;
|
||||
|
@ -77,6 +78,7 @@ use super::parent_process_checker;
|
|||
use super::performance::Performance;
|
||||
use super::refactor;
|
||||
use super::registries::ModuleRegistry;
|
||||
use super::resolver::LspIsCjsResolver;
|
||||
use super::resolver::LspResolver;
|
||||
use super::testing;
|
||||
use super::text;
|
||||
|
@ -144,6 +146,7 @@ pub struct StateSnapshot {
|
|||
pub project_version: usize,
|
||||
pub assets: AssetsSnapshot,
|
||||
pub config: Arc<Config>,
|
||||
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
|
||||
pub documents: Arc<Documents>,
|
||||
pub resolver: Arc<LspResolver>,
|
||||
}
|
||||
|
@ -203,6 +206,7 @@ pub struct Inner {
|
|||
pub documents: Documents,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
initial_cwd: PathBuf,
|
||||
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
|
||||
jsr_search_api: CliJsrSearchApi,
|
||||
/// Handles module registries, which allow discovery of modules
|
||||
module_registry: ModuleRegistry,
|
||||
|
@ -480,6 +484,7 @@ impl Inner {
|
|||
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
|
||||
panic!("Could not resolve current working directory")
|
||||
});
|
||||
let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache));
|
||||
|
||||
Self {
|
||||
assets,
|
||||
|
@ -491,6 +496,7 @@ impl Inner {
|
|||
documents,
|
||||
http_client_provider,
|
||||
initial_cwd: initial_cwd.clone(),
|
||||
is_cjs_resolver,
|
||||
jsr_search_api,
|
||||
project_version: 0,
|
||||
task_queue: Default::default(),
|
||||
|
@ -601,6 +607,7 @@ impl Inner {
|
|||
project_version: self.project_version,
|
||||
assets: self.assets.snapshot(),
|
||||
config: Arc::new(self.config.clone()),
|
||||
is_cjs_resolver: self.is_cjs_resolver.clone(),
|
||||
documents: Arc::new(self.documents.clone()),
|
||||
resolver: self.resolver.snapshot(),
|
||||
})
|
||||
|
@ -622,6 +629,7 @@ impl Inner {
|
|||
}
|
||||
});
|
||||
self.cache = LspCache::new(global_cache_url);
|
||||
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache));
|
||||
let deno_dir = self.cache.deno_dir();
|
||||
let workspace_settings = self.config.workspace_settings();
|
||||
let maybe_root_path = self
|
||||
|
@ -982,7 +990,7 @@ impl Inner {
|
|||
spawn(async move {
|
||||
let specifier = {
|
||||
let inner = ls.inner.read().await;
|
||||
let resolver = inner.resolver.as_graph_resolver(Some(&referrer));
|
||||
let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
|
||||
let Ok(specifier) = resolver.resolve(
|
||||
&specifier,
|
||||
&deno_graph::Range {
|
||||
|
@ -990,6 +998,7 @@ impl Inner {
|
|||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
NodeModuleKind::Esm,
|
||||
deno_graph::source::ResolutionMode::Types,
|
||||
) else {
|
||||
return;
|
||||
|
@ -1622,6 +1631,10 @@ impl Inner {
|
|||
let file_diagnostics = self
|
||||
.diagnostics_server
|
||||
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
|
||||
let specifier_kind = asset_or_doc
|
||||
.document()
|
||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
||||
.unwrap_or(NodeModuleKind::Esm);
|
||||
let mut includes_no_cache = false;
|
||||
for diagnostic in &fixable_diagnostics {
|
||||
match diagnostic.source.as_deref() {
|
||||
|
@ -1660,7 +1673,13 @@ impl Inner {
|
|||
.await;
|
||||
for action in actions {
|
||||
code_actions
|
||||
.add_ts_fix_action(&specifier, &action, diagnostic, self)
|
||||
.add_ts_fix_action(
|
||||
&specifier,
|
||||
specifier_kind,
|
||||
&action,
|
||||
diagnostic,
|
||||
self,
|
||||
)
|
||||
.map_err(|err| {
|
||||
error!("Unable to convert fix: {:#}", err);
|
||||
LspError::internal_error()
|
||||
|
@ -1806,10 +1825,9 @@ impl Inner {
|
|||
error!("Unable to decode code action data: {:#}", err);
|
||||
LspError::invalid_params("The CodeAction's data is invalid.")
|
||||
})?;
|
||||
let scope = self
|
||||
.get_asset_or_document(&code_action_data.specifier)
|
||||
.ok()
|
||||
.and_then(|d| d.scope().cloned());
|
||||
let maybe_asset_or_doc =
|
||||
self.get_asset_or_document(&code_action_data.specifier).ok();
|
||||
let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned());
|
||||
let combined_code_actions = self
|
||||
.ts_server
|
||||
.get_combined_code_fix(
|
||||
|
@ -1836,6 +1854,11 @@ impl Inner {
|
|||
let changes = if code_action_data.fix_id == "fixMissingImport" {
|
||||
fix_ts_import_changes(
|
||||
&code_action_data.specifier,
|
||||
maybe_asset_or_doc
|
||||
.as_ref()
|
||||
.and_then(|d| d.document())
|
||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
||||
.unwrap_or(NodeModuleKind::Esm),
|
||||
&combined_code_actions.changes,
|
||||
self,
|
||||
)
|
||||
|
@ -1889,6 +1912,10 @@ impl Inner {
|
|||
if kind_suffix == ".rewrite.function.returnType" {
|
||||
refactor_edit_info.edits = fix_ts_import_changes(
|
||||
&action_data.specifier,
|
||||
asset_or_doc
|
||||
.document()
|
||||
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
|
||||
.unwrap_or(NodeModuleKind::Esm),
|
||||
&refactor_edit_info.edits,
|
||||
self,
|
||||
)
|
||||
|
@ -2238,6 +2265,7 @@ impl Inner {
|
|||
&self.jsr_search_api,
|
||||
&self.npm_search_api,
|
||||
&self.documents,
|
||||
&self.is_cjs_resolver,
|
||||
self.resolver.as_ref(),
|
||||
self
|
||||
.config
|
||||
|
|
|
@ -263,7 +263,7 @@ impl ReplLanguageServer {
|
|||
}
|
||||
|
||||
fn get_document_uri(&self) -> Uri {
|
||||
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str())
|
||||
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str())
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,16 +2,18 @@
|
|||
|
||||
use dashmap::DashMap;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_cache_dir::HttpCache;
|
||||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::GraphImport;
|
||||
use deno_graph::ModuleSpecifier;
|
||||
use deno_graph::Range;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::url_from_directory_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
|
@ -24,6 +26,7 @@ use deno_semver::package::PackageReq;
|
|||
use indexmap::IndexMap;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
|
@ -33,6 +36,7 @@ use std::collections::HashSet;
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::cache::LspCache;
|
||||
use super::documents::Document;
|
||||
use super::jsr::JsrCacheResolver;
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::CacheSetting;
|
||||
|
@ -53,21 +57,20 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CjsTrackerOptions;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::CliResolverOptions;
|
||||
use crate::resolver::IsCjsResolver;
|
||||
use crate::resolver::WorkerCliNpmGraphResolver;
|
||||
use crate::tsc::into_specifier_and_media_type;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct LspScopeResolver {
|
||||
cjs_tracker: Option<Arc<LspCjsTracker>>,
|
||||
graph_resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
jsr_resolver: Option<Arc<JsrCacheResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
|
@ -81,8 +84,7 @@ struct LspScopeResolver {
|
|||
impl Default for LspScopeResolver {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
cjs_tracker: None,
|
||||
graph_resolver: create_graph_resolver(None, None, None),
|
||||
resolver: create_cli_resolver(None, None, None),
|
||||
jsr_resolver: None,
|
||||
npm_resolver: None,
|
||||
node_resolver: None,
|
||||
|
@ -103,7 +105,6 @@ impl LspScopeResolver {
|
|||
) -> Self {
|
||||
let mut npm_resolver = None;
|
||||
let mut node_resolver = None;
|
||||
let mut lsp_cjs_tracker = None;
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
|
@ -118,14 +119,7 @@ impl LspScopeResolver {
|
|||
.await;
|
||||
if let Some(npm_resolver) = &npm_resolver {
|
||||
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
|
||||
let cjs_tracker = create_cjs_tracker(
|
||||
in_npm_pkg_checker.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
);
|
||||
lsp_cjs_tracker =
|
||||
Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
|
||||
node_resolver = Some(create_node_resolver(
|
||||
cjs_tracker,
|
||||
fs.clone(),
|
||||
in_npm_pkg_checker,
|
||||
npm_resolver,
|
||||
|
@ -133,7 +127,7 @@ impl LspScopeResolver {
|
|||
));
|
||||
}
|
||||
}
|
||||
let graph_resolver = create_graph_resolver(
|
||||
let cli_resolver = create_cli_resolver(
|
||||
config_data.map(|d| d.as_ref()),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
|
@ -146,7 +140,9 @@ impl LspScopeResolver {
|
|||
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
|
||||
config_data.and_then(|d| d.lockfile.clone()),
|
||||
)));
|
||||
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver();
|
||||
let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
let maybe_jsx_import_source_config =
|
||||
config_data.and_then(|d| d.maybe_jsx_import_source_config());
|
||||
let graph_imports = config_data
|
||||
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
|
||||
.map(|imports| {
|
||||
|
@ -154,11 +150,18 @@ impl LspScopeResolver {
|
|||
imports
|
||||
.into_iter()
|
||||
.map(|(referrer, imports)| {
|
||||
let resolver = SingleReferrerGraphResolver {
|
||||
valid_referrer: &referrer,
|
||||
referrer_kind: NodeModuleKind::Esm,
|
||||
cli_resolver: &cli_resolver,
|
||||
jsx_import_source_config: maybe_jsx_import_source_config
|
||||
.as_ref(),
|
||||
};
|
||||
let graph_import = GraphImport::new(
|
||||
&referrer,
|
||||
imports,
|
||||
&CliJsrUrlProvider,
|
||||
Some(graph_resolver.as_ref()),
|
||||
Some(&resolver),
|
||||
Some(&npm_graph_resolver),
|
||||
);
|
||||
(referrer, graph_import)
|
||||
|
@ -182,6 +185,8 @@ impl LspScopeResolver {
|
|||
.resolve_req_reference(
|
||||
&req_ref,
|
||||
&referrer,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.ok()?,
|
||||
|
@ -195,8 +200,7 @@ impl LspScopeResolver {
|
|||
let package_json_deps_by_resolution =
|
||||
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
|
||||
Self {
|
||||
cjs_tracker: lsp_cjs_tracker,
|
||||
graph_resolver,
|
||||
resolver: cli_resolver,
|
||||
jsr_resolver,
|
||||
npm_resolver,
|
||||
node_resolver,
|
||||
|
@ -216,30 +220,22 @@ impl LspScopeResolver {
|
|||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
));
|
||||
let mut node_resolver = None;
|
||||
let mut lsp_cjs_tracker = None;
|
||||
if let Some(npm_resolver) = &npm_resolver {
|
||||
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
|
||||
let cjs_tracker = create_cjs_tracker(
|
||||
in_npm_pkg_checker.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
);
|
||||
lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
|
||||
node_resolver = Some(create_node_resolver(
|
||||
cjs_tracker,
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
npm_resolver,
|
||||
pkg_json_resolver.clone(),
|
||||
));
|
||||
}
|
||||
let graph_resolver = create_graph_resolver(
|
||||
let graph_resolver = create_cli_resolver(
|
||||
self.config_data.as_deref(),
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver.as_ref(),
|
||||
);
|
||||
Arc::new(Self {
|
||||
cjs_tracker: lsp_cjs_tracker,
|
||||
graph_resolver,
|
||||
resolver: graph_resolver,
|
||||
jsr_resolver: self.jsr_resolver.clone(),
|
||||
npm_resolver,
|
||||
node_resolver,
|
||||
|
@ -334,12 +330,12 @@ impl LspResolver {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_graph_resolver(
|
||||
pub fn as_cli_resolver(
|
||||
&self,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> &dyn Resolver {
|
||||
) -> &CliResolver {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.graph_resolver.as_ref()
|
||||
resolver.resolver.as_ref()
|
||||
}
|
||||
|
||||
pub fn create_graph_npm_resolver(
|
||||
|
@ -347,15 +343,15 @@ impl LspResolver {
|
|||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> WorkerCliNpmGraphResolver {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.graph_resolver.create_graph_npm_resolver()
|
||||
resolver.resolver.create_graph_npm_resolver()
|
||||
}
|
||||
|
||||
pub fn maybe_cjs_tracker(
|
||||
pub fn as_config_data(
|
||||
&self,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<&Arc<LspCjsTracker>> {
|
||||
) -> Option<&Arc<ConfigData>> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver.cjs_tracker.as_ref()
|
||||
resolver.config_data.as_ref()
|
||||
}
|
||||
|
||||
pub fn maybe_node_resolver(
|
||||
|
@ -429,13 +425,19 @@ impl LspResolver {
|
|||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let node_resolver = resolver.node_resolver.as_ref()?;
|
||||
Some(into_specifier_and_media_type(Some(
|
||||
node_resolver
|
||||
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
|
||||
.resolve_req_reference(
|
||||
req_ref,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.ok()?,
|
||||
)))
|
||||
}
|
||||
|
@ -478,6 +480,7 @@ impl LspResolver {
|
|||
&self,
|
||||
specifier_text: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
) -> bool {
|
||||
let resolver = self.get_scope_resolver(Some(referrer));
|
||||
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
|
||||
|
@ -487,6 +490,7 @@ impl LspResolver {
|
|||
.resolve_if_for_npm_pkg(
|
||||
specifier_text,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.ok()
|
||||
|
@ -615,21 +619,6 @@ async fn create_npm_resolver(
|
|||
Some(create_cli_npm_resolver_for_lsp(options).await)
|
||||
}
|
||||
|
||||
fn create_cjs_tracker(
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
) -> Arc<CjsTracker> {
|
||||
Arc::new(CjsTracker::new(
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
CjsTrackerOptions {
|
||||
// todo(dsherret): support in the lsp by stabilizing the feature
|
||||
// so that we don't have to pipe the config in here
|
||||
unstable_detect_cjs: false,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn create_in_npm_pkg_checker(
|
||||
npm_resolver: &Arc<dyn CliNpmResolver>,
|
||||
) -> Arc<dyn InNpmPackageChecker> {
|
||||
|
@ -649,7 +638,6 @@ fn create_in_npm_pkg_checker(
|
|||
}
|
||||
|
||||
fn create_node_resolver(
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
npm_resolver: &Arc<dyn CliNpmResolver>,
|
||||
|
@ -662,7 +650,6 @@ fn create_node_resolver(
|
|||
pkg_json_resolver.clone(),
|
||||
));
|
||||
Arc::new(CliNodeResolver::new(
|
||||
cjs_tracker.clone(),
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
node_resolver_inner,
|
||||
|
@ -670,13 +657,12 @@ fn create_node_resolver(
|
|||
))
|
||||
}
|
||||
|
||||
fn create_graph_resolver(
|
||||
fn create_cli_resolver(
|
||||
config_data: Option<&ConfigData>,
|
||||
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
|
||||
node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
) -> Arc<CliGraphResolver> {
|
||||
let workspace = config_data.map(|d| &d.member_dir.workspace);
|
||||
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
|
||||
) -> Arc<CliResolver> {
|
||||
Arc::new(CliResolver::new(CliResolverOptions {
|
||||
node_resolver: node_resolver.cloned(),
|
||||
npm_resolver: npm_resolver.cloned(),
|
||||
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else(
|
||||
|
@ -691,9 +677,6 @@ fn create_graph_resolver(
|
|||
))
|
||||
},
|
||||
),
|
||||
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
|
||||
workspace.to_maybe_jsx_import_source_config().ok().flatten()
|
||||
}),
|
||||
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
bare_node_builtins_enabled: config_data
|
||||
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
|
||||
|
@ -726,6 +709,141 @@ impl std::fmt::Debug for RedirectResolver {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LspIsCjsResolver {
|
||||
inner: IsCjsResolver,
|
||||
}
|
||||
|
||||
impl Default for LspIsCjsResolver {
|
||||
fn default() -> Self {
|
||||
LspIsCjsResolver::new(&Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl LspIsCjsResolver {
|
||||
pub fn new(cache: &LspCache) -> Self {
|
||||
#[derive(Debug)]
|
||||
struct LspInNpmPackageChecker {
|
||||
global_cache_dir: ModuleSpecifier,
|
||||
}
|
||||
|
||||
impl LspInNpmPackageChecker {
|
||||
pub fn new(cache: &LspCache) -> Self {
|
||||
let npm_folder_path = cache.deno_dir().npm_folder_path();
|
||||
Self {
|
||||
global_cache_dir: url_from_directory_path(
|
||||
&canonicalize_path_maybe_not_exists(&npm_folder_path)
|
||||
.unwrap_or(npm_folder_path),
|
||||
)
|
||||
.unwrap_or_else(|_| {
|
||||
ModuleSpecifier::parse("file:///invalid/").unwrap()
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InNpmPackageChecker for LspInNpmPackageChecker {
|
||||
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
if specifier.scheme() != "file" {
|
||||
return false;
|
||||
}
|
||||
if specifier
|
||||
.as_str()
|
||||
.starts_with(self.global_cache_dir.as_str())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
specifier.as_str().contains("/node_modules/")
|
||||
}
|
||||
}
|
||||
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
|
||||
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
|
||||
));
|
||||
|
||||
LspIsCjsResolver {
|
||||
inner: IsCjsResolver::new(
|
||||
Arc::new(LspInNpmPackageChecker::new(cache)),
|
||||
pkg_json_resolver,
|
||||
crate::resolver::IsCjsResolverOptions {
|
||||
detect_cjs: true,
|
||||
is_node_main: false,
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_maybe_doc_module_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_document: Option<&Document>,
|
||||
) -> NodeModuleKind {
|
||||
self.get_lsp_referrer_kind(
|
||||
specifier,
|
||||
maybe_document.and_then(|d| d.is_script()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind {
|
||||
self.get_lsp_referrer_kind(document.specifier(), document.is_script())
|
||||
}
|
||||
|
||||
pub fn get_lsp_referrer_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_script: Option<bool>,
|
||||
) -> NodeModuleKind {
|
||||
self.inner.get_lsp_referrer_kind(specifier, is_script)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SingleReferrerGraphResolver<'a> {
|
||||
pub valid_referrer: &'a ModuleSpecifier,
|
||||
pub referrer_kind: NodeModuleKind,
|
||||
pub cli_resolver: &'a CliResolver,
|
||||
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
|
||||
}
|
||||
|
||||
impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
|
||||
fn default_jsx_import_source(&self) -> Option<String> {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.and_then(|c| c.default_specifier.clone())
|
||||
}
|
||||
|
||||
fn default_jsx_import_source_types(&self) -> Option<String> {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.and_then(|c| c.default_types_specifier.clone())
|
||||
}
|
||||
|
||||
fn jsx_import_source_module(&self) -> &str {
|
||||
self
|
||||
.jsx_import_source_config
|
||||
.map(|c| c.module.as_str())
|
||||
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier_text: &str,
|
||||
referrer_range: &Range,
|
||||
mode: ResolutionMode,
|
||||
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
|
||||
// this resolver assumes it will only be used with a single referrer
|
||||
// with the provided referrer kind
|
||||
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
|
||||
self.cli_resolver.resolve(
|
||||
specifier_text,
|
||||
referrer_range,
|
||||
self.referrer_kind,
|
||||
mode,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl RedirectResolver {
|
||||
fn new(
|
||||
cache: Arc<dyn HttpCache>,
|
||||
|
@ -842,45 +960,6 @@ impl RedirectResolver {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LspCjsTracker {
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
}
|
||||
|
||||
impl LspCjsTracker {
|
||||
pub fn new(cjs_tracker: Arc<CjsTracker>) -> Self {
|
||||
Self { cjs_tracker }
|
||||
}
|
||||
|
||||
pub fn is_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
maybe_parsed_source: Option<&ParsedSource>,
|
||||
) -> bool {
|
||||
if let Some(module_kind) =
|
||||
self.cjs_tracker.get_known_kind(specifier, media_type)
|
||||
{
|
||||
module_kind.is_cjs()
|
||||
} else {
|
||||
let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script());
|
||||
maybe_is_script
|
||||
.and_then(|is_script| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_cjs_with_known_is_script(specifier, media_type, is_script)
|
||||
.ok()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(specifier, media_type)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -69,6 +69,7 @@ use indexmap::IndexMap;
|
|||
use indexmap::IndexSet;
|
||||
use lazy_regex::lazy_regex;
|
||||
use log::error;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
|
@ -4401,25 +4402,15 @@ fn op_load<'s>(
|
|||
None
|
||||
} else {
|
||||
let asset_or_document = state.get_asset_or_document(&specifier);
|
||||
asset_or_document.map(|doc| {
|
||||
let maybe_cjs_tracker = state
|
||||
.state_snapshot
|
||||
.resolver
|
||||
.maybe_cjs_tracker(Some(&specifier));
|
||||
LoadResponse {
|
||||
data: doc.text(),
|
||||
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
|
||||
version: state.script_version(&specifier),
|
||||
is_cjs: maybe_cjs_tracker
|
||||
.map(|t| {
|
||||
t.is_cjs(
|
||||
&specifier,
|
||||
doc.media_type(),
|
||||
doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()),
|
||||
)
|
||||
})
|
||||
.unwrap_or(false),
|
||||
}
|
||||
asset_or_document.map(|doc| LoadResponse {
|
||||
data: doc.text(),
|
||||
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
|
||||
version: state.script_version(&specifier),
|
||||
is_cjs: doc
|
||||
.document()
|
||||
.map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d))
|
||||
.unwrap_or(NodeModuleKind::Esm)
|
||||
== NodeModuleKind::Cjs,
|
||||
})
|
||||
};
|
||||
|
||||
|
@ -4662,6 +4653,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
|
|||
let (types, _) = documents.resolve_dependency(
|
||||
types,
|
||||
specifier,
|
||||
state
|
||||
.state_snapshot
|
||||
.is_cjs_resolver
|
||||
.get_doc_module_kind(doc),
|
||||
doc.file_referrer(),
|
||||
)?;
|
||||
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
|
||||
|
@ -5544,6 +5539,7 @@ mod tests {
|
|||
documents: Arc::new(documents),
|
||||
assets: Default::default(),
|
||||
config: Arc::new(config),
|
||||
is_cjs_resolver: Default::default(),
|
||||
resolver,
|
||||
});
|
||||
let performance = Arc::new(Performance::default());
|
||||
|
|
|
@ -27,8 +27,8 @@ use crate::node;
|
|||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::resolver::ModuleCodeStringSource;
|
||||
use crate::resolver::NotSupportedKindInNpmError;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
|
@ -60,7 +60,6 @@ use deno_core::RequestedModuleType;
|
|||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceCodeCacheInfo;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::JsModule;
|
||||
use deno_graph::JsonModule;
|
||||
|
@ -73,6 +72,7 @@ use deno_runtime::deno_node::create_host_defined_options;
|
|||
use deno_runtime::deno_node::NodeRequireLoader;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::InNpmPackageChecker;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
|
||||
|
@ -206,7 +206,6 @@ struct SharedCliModuleLoaderState {
|
|||
lib_worker: TsTypeLib,
|
||||
initial_cwd: PathBuf,
|
||||
is_inspecting: bool,
|
||||
is_npm_main: bool,
|
||||
is_repl: bool,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
code_cache: Option<Arc<CodeCache>>,
|
||||
|
@ -220,7 +219,7 @@ struct SharedCliModuleLoaderState {
|
|||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
}
|
||||
|
||||
pub struct CliModuleLoaderFactory {
|
||||
|
@ -243,7 +242,7 @@ impl CliModuleLoaderFactory {
|
|||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
shared: Arc::new(SharedCliModuleLoaderState {
|
||||
|
@ -252,7 +251,6 @@ impl CliModuleLoaderFactory {
|
|||
lib_worker: options.ts_type_lib_worker(),
|
||||
initial_cwd: options.initial_cwd().to_path_buf(),
|
||||
is_inspecting: options.is_inspecting(),
|
||||
is_npm_main: options.is_npm_main(),
|
||||
is_repl: matches!(
|
||||
options.sub_command(),
|
||||
DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_)
|
||||
|
@ -286,7 +284,6 @@ impl CliModuleLoaderFactory {
|
|||
Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
|
||||
lib,
|
||||
is_worker,
|
||||
is_npm_main: self.shared.is_npm_main,
|
||||
parent_permissions,
|
||||
permissions,
|
||||
graph_container: graph_container.clone(),
|
||||
|
@ -295,13 +292,14 @@ impl CliModuleLoaderFactory {
|
|||
parsed_source_cache: self.shared.parsed_source_cache.clone(),
|
||||
shared: self.shared.clone(),
|
||||
})));
|
||||
let node_require_loader = Rc::new(CliNodeRequireLoader::new(
|
||||
self.shared.emitter.clone(),
|
||||
self.shared.fs.clone(),
|
||||
let node_require_loader = Rc::new(CliNodeRequireLoader {
|
||||
cjs_tracker: self.shared.cjs_tracker.clone(),
|
||||
emitter: self.shared.emitter.clone(),
|
||||
fs: self.shared.fs.clone(),
|
||||
graph_container,
|
||||
self.shared.in_npm_pkg_checker.clone(),
|
||||
self.shared.npm_resolver.clone(),
|
||||
));
|
||||
in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(),
|
||||
npm_resolver: self.shared.npm_resolver.clone(),
|
||||
});
|
||||
CreateModuleLoaderResult {
|
||||
module_loader,
|
||||
node_require_loader,
|
||||
|
@ -343,7 +341,6 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
|||
|
||||
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
|
||||
lib: TsTypeLib,
|
||||
is_npm_main: bool,
|
||||
is_worker: bool,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
|
@ -450,7 +447,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
let referrer = if referrer.is_empty() && self.shared.is_repl {
|
||||
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
|
||||
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
|
||||
"./$deno$repl.ts"
|
||||
"./$deno$repl.mts"
|
||||
} else {
|
||||
referrer
|
||||
};
|
||||
|
@ -478,7 +475,12 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(raw_specifier, referrer, NodeResolutionMode::Execution)?
|
||||
.resolve(
|
||||
raw_specifier,
|
||||
referrer,
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
@ -508,6 +510,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
ResolutionMode::Execution,
|
||||
)?),
|
||||
};
|
||||
|
@ -518,6 +521,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
return self.shared.node_resolver.resolve_req_reference(
|
||||
&reference,
|
||||
referrer,
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
);
|
||||
}
|
||||
|
@ -538,6 +542,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
&package_folder,
|
||||
module.nv_reference.sub_path(),
|
||||
Some(referrer),
|
||||
self.shared.cjs_tracker.get_referrer_kind(referrer),
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.with_context(|| {
|
||||
|
@ -668,14 +673,11 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
is_script,
|
||||
..
|
||||
})) => {
|
||||
// todo(dsherret): revert in https://github.com/denoland/deno/pull/26439
|
||||
if self.is_npm_main && *is_script
|
||||
|| self.shared.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
*media_type,
|
||||
*is_script,
|
||||
)?
|
||||
{
|
||||
if self.shared.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
*media_type,
|
||||
*is_script,
|
||||
)? {
|
||||
return Ok(Some(CodeOrDeferredEmit::Cjs {
|
||||
specifier,
|
||||
media_type: *media_type,
|
||||
|
@ -1031,6 +1033,7 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
emitter: Arc<Emitter>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
graph_container: TGraphContainer,
|
||||
|
@ -1038,26 +1041,6 @@ struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
|
|||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
}
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer>
|
||||
CliNodeRequireLoader<TGraphContainer>
|
||||
{
|
||||
pub fn new(
|
||||
emitter: Arc<Emitter>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
graph_container: TGraphContainer,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
emitter,
|
||||
fs,
|
||||
graph_container,
|
||||
in_npm_pkg_checker,
|
||||
npm_resolver,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
||||
for CliNodeRequireLoader<TGraphContainer>
|
||||
{
|
||||
|
@ -1103,4 +1086,12 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
|||
Ok(text)
|
||||
}
|
||||
}
|
||||
|
||||
fn is_maybe_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
||||
}
|
||||
}
|
||||
|
|
12
cli/node.rs
12
cli/node.rs
|
@ -62,10 +62,6 @@ pub struct CliCjsCodeAnalyzer {
|
|||
cjs_tracker: Arc<CjsTracker>,
|
||||
fs: deno_fs::FileSystemRc,
|
||||
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
|
||||
// todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439
|
||||
// For example, this does not properly handle if cjs analysis was already done
|
||||
// and has been cached.
|
||||
is_npm_main: bool,
|
||||
}
|
||||
|
||||
impl CliCjsCodeAnalyzer {
|
||||
|
@ -74,14 +70,12 @@ impl CliCjsCodeAnalyzer {
|
|||
cjs_tracker: Arc<CjsTracker>,
|
||||
fs: deno_fs::FileSystemRc,
|
||||
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
|
||||
is_npm_main: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
cjs_tracker,
|
||||
fs,
|
||||
parsed_source_cache,
|
||||
is_npm_main,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,9 +100,7 @@ impl CliCjsCodeAnalyzer {
|
|||
}
|
||||
|
||||
let cjs_tracker = self.cjs_tracker.clone();
|
||||
let is_npm_main = self.is_npm_main;
|
||||
let is_maybe_cjs =
|
||||
cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main;
|
||||
let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
|
||||
let analysis = if is_maybe_cjs {
|
||||
let maybe_parsed_source = self
|
||||
.parsed_source_cache
|
||||
|
@ -135,7 +127,7 @@ impl CliCjsCodeAnalyzer {
|
|||
parsed_source.specifier(),
|
||||
media_type,
|
||||
is_script,
|
||||
)? || is_script && is_npm_main;
|
||||
)?;
|
||||
if is_cjs {
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
Ok(CliCjsAnalysis::Cjs {
|
||||
|
|
|
@ -18,6 +18,7 @@ pub struct BinEntries<'a> {
|
|||
seen_names: HashMap<&'a str, &'a NpmPackageId>,
|
||||
/// The bin entries
|
||||
entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
|
||||
sorted: bool,
|
||||
}
|
||||
|
||||
/// Returns the name of the default binary for the given package.
|
||||
|
@ -31,6 +32,20 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
|
|||
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
|
||||
}
|
||||
|
||||
pub fn warn_missing_entrypoint(
|
||||
bin_name: &str,
|
||||
package_path: &Path,
|
||||
entrypoint: &Path,
|
||||
) {
|
||||
log::warn!(
|
||||
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
|
||||
deno_terminal::colors::yellow("Warning"),
|
||||
bin_name,
|
||||
package_path.display(),
|
||||
entrypoint.display()
|
||||
);
|
||||
}
|
||||
|
||||
impl<'a> BinEntries<'a> {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
|
@ -42,6 +57,7 @@ impl<'a> BinEntries<'a> {
|
|||
package: &'a NpmResolutionPackage,
|
||||
package_path: PathBuf,
|
||||
) {
|
||||
self.sorted = false;
|
||||
// check for a new collision, if we haven't already
|
||||
// found one
|
||||
match package.bin.as_ref().unwrap() {
|
||||
|
@ -79,16 +95,21 @@ impl<'a> BinEntries<'a> {
|
|||
&str, // bin name
|
||||
&str, // bin script
|
||||
) -> Result<(), AnyError>,
|
||||
mut filter: impl FnMut(&NpmResolutionPackage) -> bool,
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.collisions.is_empty() {
|
||||
if !self.collisions.is_empty() && !self.sorted {
|
||||
// walking the dependency tree to find out the depth of each package
|
||||
// is sort of expensive, so we only do it if there's a collision
|
||||
sort_by_depth(snapshot, &mut self.entries, &mut self.collisions);
|
||||
self.sorted = true;
|
||||
}
|
||||
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for (package, package_path) in &self.entries {
|
||||
if !filter(package) {
|
||||
continue;
|
||||
}
|
||||
if let Some(bin_entries) = &package.bin {
|
||||
match bin_entries {
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
|
||||
|
@ -118,8 +139,8 @@ impl<'a> BinEntries<'a> {
|
|||
}
|
||||
|
||||
/// Collect the bin entries into a vec of (name, script path)
|
||||
pub fn into_bin_files(
|
||||
mut self,
|
||||
pub fn collect_bin_files(
|
||||
&mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
) -> Vec<(String, PathBuf)> {
|
||||
let mut bins = Vec::new();
|
||||
|
@ -131,17 +152,18 @@ impl<'a> BinEntries<'a> {
|
|||
bins.push((name.to_string(), package_path.join(script)));
|
||||
Ok(())
|
||||
},
|
||||
|_| true,
|
||||
)
|
||||
.unwrap();
|
||||
bins
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub fn finish(
|
||||
fn set_up_entries_filtered(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
filter: impl FnMut(&NpmResolutionPackage) -> bool,
|
||||
mut handler: impl FnMut(&EntrySetupOutcome<'_>),
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
|
||||
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|
||||
|
@ -160,18 +182,54 @@ impl<'a> BinEntries<'a> {
|
|||
Ok(())
|
||||
},
|
||||
|package, package_path, name, script| {
|
||||
set_up_bin_entry(
|
||||
let outcome = set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)
|
||||
)?;
|
||||
handler(&outcome);
|
||||
Ok(())
|
||||
},
|
||||
filter,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub fn finish(
|
||||
self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
handler: impl FnMut(&EntrySetupOutcome<'_>),
|
||||
) -> Result<(), AnyError> {
|
||||
self.set_up_entries_filtered(
|
||||
snapshot,
|
||||
bin_node_modules_dir_path,
|
||||
|_| true,
|
||||
handler,
|
||||
)
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub fn finish_only(
|
||||
self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
handler: impl FnMut(&EntrySetupOutcome<'_>),
|
||||
only: &HashSet<&NpmPackageId>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.set_up_entries_filtered(
|
||||
snapshot,
|
||||
bin_node_modules_dir_path,
|
||||
|package| only.contains(&package.id),
|
||||
handler,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// walk the dependency tree to find out the depth of each package
|
||||
|
@ -233,16 +291,17 @@ fn sort_by_depth(
|
|||
});
|
||||
}
|
||||
|
||||
pub fn set_up_bin_entry(
|
||||
package: &NpmResolutionPackage,
|
||||
bin_name: &str,
|
||||
pub fn set_up_bin_entry<'a>(
|
||||
package: &'a NpmResolutionPackage,
|
||||
bin_name: &'a str,
|
||||
#[allow(unused_variables)] bin_script: &str,
|
||||
#[allow(unused_variables)] package_path: &Path,
|
||||
#[allow(unused_variables)] package_path: &'a Path,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<EntrySetupOutcome<'a>, AnyError> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?;
|
||||
Ok(EntrySetupOutcome::Success)
|
||||
}
|
||||
#[cfg(unix)]
|
||||
{
|
||||
|
@ -252,9 +311,8 @@ pub fn set_up_bin_entry(
|
|||
bin_script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)?;
|
||||
)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
|
@ -301,14 +359,39 @@ fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
|
|||
Ok(true)
|
||||
}
|
||||
|
||||
pub enum EntrySetupOutcome<'a> {
|
||||
#[cfg_attr(windows, allow(dead_code))]
|
||||
MissingEntrypoint {
|
||||
bin_name: &'a str,
|
||||
package_path: &'a Path,
|
||||
entrypoint: PathBuf,
|
||||
package: &'a NpmResolutionPackage,
|
||||
},
|
||||
Success,
|
||||
}
|
||||
|
||||
impl<'a> EntrySetupOutcome<'a> {
|
||||
pub fn warn_if_failed(&self) {
|
||||
match self {
|
||||
EntrySetupOutcome::MissingEntrypoint {
|
||||
bin_name,
|
||||
package_path,
|
||||
entrypoint,
|
||||
..
|
||||
} => warn_missing_entrypoint(bin_name, package_path, entrypoint),
|
||||
EntrySetupOutcome::Success => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_bin_entry(
|
||||
_package: &NpmResolutionPackage,
|
||||
bin_name: &str,
|
||||
fn symlink_bin_entry<'a>(
|
||||
package: &'a NpmResolutionPackage,
|
||||
bin_name: &'a str,
|
||||
bin_script: &str,
|
||||
package_path: &Path,
|
||||
package_path: &'a Path,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<EntrySetupOutcome<'a>, AnyError> {
|
||||
use std::io;
|
||||
use std::os::unix::fs::symlink;
|
||||
let link = bin_node_modules_dir_path.join(bin_name);
|
||||
|
@ -318,14 +401,12 @@ fn symlink_bin_entry(
|
|||
format!("Can't set up '{}' bin at {}", bin_name, original.display())
|
||||
})?;
|
||||
if !found {
|
||||
log::warn!(
|
||||
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
|
||||
deno_terminal::colors::yellow("Warning"),
|
||||
return Ok(EntrySetupOutcome::MissingEntrypoint {
|
||||
bin_name,
|
||||
package_path.display(),
|
||||
original.display()
|
||||
);
|
||||
return Ok(());
|
||||
package_path,
|
||||
entrypoint: original,
|
||||
package,
|
||||
});
|
||||
}
|
||||
|
||||
let original_relative =
|
||||
|
@ -348,7 +429,7 @@ fn symlink_bin_entry(
|
|||
original_relative.display()
|
||||
)
|
||||
})?;
|
||||
return Ok(());
|
||||
return Ok(EntrySetupOutcome::Success);
|
||||
}
|
||||
return Err(err).with_context(|| {
|
||||
format!(
|
||||
|
@ -359,5 +440,5 @@ fn symlink_bin_entry(
|
|||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(EntrySetupOutcome::Success)
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ use deno_runtime::deno_io::FromRawIoHandle;
|
|||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
|
||||
use std::path::Path;
|
||||
|
@ -61,7 +62,7 @@ impl<'a> LifecycleScripts<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn has_lifecycle_scripts(
|
||||
pub fn has_lifecycle_scripts(
|
||||
package: &NpmResolutionPackage,
|
||||
package_path: &Path,
|
||||
) -> bool {
|
||||
|
@ -83,7 +84,7 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
|
|||
}
|
||||
|
||||
impl<'a> LifecycleScripts<'a> {
|
||||
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
|
||||
pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
|
||||
if !self.strategy.can_run_scripts() {
|
||||
return false;
|
||||
}
|
||||
|
@ -98,6 +99,9 @@ impl<'a> LifecycleScripts<'a> {
|
|||
PackagesAllowedScripts::None => false,
|
||||
}
|
||||
}
|
||||
pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool {
|
||||
self.strategy.has_run(package)
|
||||
}
|
||||
/// Register a package for running lifecycle scripts, if applicable.
|
||||
///
|
||||
/// `package_path` is the path containing the package's code (its root dir).
|
||||
|
@ -110,12 +114,12 @@ impl<'a> LifecycleScripts<'a> {
|
|||
) {
|
||||
if has_lifecycle_scripts(package, &package_path) {
|
||||
if self.can_run_scripts(&package.id.nv) {
|
||||
if !self.strategy.has_run(package) {
|
||||
if !self.has_run_scripts(package) {
|
||||
self
|
||||
.packages_with_scripts
|
||||
.push((package, package_path.into_owned()));
|
||||
}
|
||||
} else if !self.strategy.has_run(package)
|
||||
} else if !self.has_run_scripts(package)
|
||||
&& (self.config.explicit_install || !self.strategy.has_warned(package))
|
||||
{
|
||||
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
|
||||
|
@ -149,22 +153,32 @@ impl<'a> LifecycleScripts<'a> {
|
|||
self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
root_node_modules_dir_path: Option<&Path>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
progress_bar: &ProgressBar,
|
||||
) -> Result<(), AnyError> {
|
||||
self.warn_not_run_scripts()?;
|
||||
let get_package_path =
|
||||
|p: &NpmResolutionPackage| self.strategy.package_path(p);
|
||||
let mut failed_packages = Vec::new();
|
||||
let mut bin_entries = BinEntries::new();
|
||||
if !self.packages_with_scripts.is_empty() {
|
||||
let package_ids = self
|
||||
.packages_with_scripts
|
||||
.iter()
|
||||
.map(|(p, _)| &p.id)
|
||||
.collect::<HashSet<_>>();
|
||||
// get custom commands for each bin available in the node_modules dir (essentially
|
||||
// the scripts that are in `node_modules/.bin`)
|
||||
let base =
|
||||
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?;
|
||||
let base = resolve_baseline_custom_commands(
|
||||
&mut bin_entries,
|
||||
snapshot,
|
||||
packages,
|
||||
get_package_path,
|
||||
)?;
|
||||
let init_cwd = &self.config.initial_cwd;
|
||||
let process_state = crate::npm::managed::npm_process_state(
|
||||
snapshot.as_valid_serialized(),
|
||||
root_node_modules_dir_path,
|
||||
Some(root_node_modules_dir_path),
|
||||
);
|
||||
|
||||
let mut env_vars = crate::task_runner::real_env_vars();
|
||||
|
@ -221,7 +235,7 @@ impl<'a> LifecycleScripts<'a> {
|
|||
custom_commands: custom_commands.clone(),
|
||||
init_cwd,
|
||||
argv: &[],
|
||||
root_node_modules_dir: root_node_modules_dir_path,
|
||||
root_node_modules_dir: Some(root_node_modules_dir_path),
|
||||
stdio: Some(crate::task_runner::TaskIo {
|
||||
stderr: TaskStdio::piped(),
|
||||
stdout: TaskStdio::piped(),
|
||||
|
@ -262,6 +276,17 @@ impl<'a> LifecycleScripts<'a> {
|
|||
}
|
||||
self.strategy.did_run_scripts(package)?;
|
||||
}
|
||||
|
||||
// re-set up bin entries for the packages which we've run scripts for.
|
||||
// lifecycle scripts can create files that are linked to by bin entries,
|
||||
// and the only reliable way to handle this is to re-link bin entries
|
||||
// (this is what PNPM does as well)
|
||||
bin_entries.finish_only(
|
||||
snapshot,
|
||||
&root_node_modules_dir_path.join(".bin"),
|
||||
|outcome| outcome.warn_if_failed(),
|
||||
&package_ids,
|
||||
)?;
|
||||
}
|
||||
if failed_packages.is_empty() {
|
||||
Ok(())
|
||||
|
@ -281,9 +306,10 @@ impl<'a> LifecycleScripts<'a> {
|
|||
// take in all (non copy) packages from snapshot,
|
||||
// and resolve the set of available binaries to create
|
||||
// custom commands available to the task runner
|
||||
fn resolve_baseline_custom_commands(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
fn resolve_baseline_custom_commands<'a>(
|
||||
bin_entries: &mut BinEntries<'a>,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: &'a [NpmResolutionPackage],
|
||||
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
|
||||
|
@ -306,6 +332,7 @@ fn resolve_baseline_custom_commands(
|
|||
// doing it for packages that are set up already.
|
||||
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
|
||||
resolve_custom_commands_from_packages(
|
||||
bin_entries,
|
||||
custom_commands,
|
||||
snapshot,
|
||||
packages,
|
||||
|
@ -320,12 +347,12 @@ fn resolve_custom_commands_from_packages<
|
|||
'a,
|
||||
P: IntoIterator<Item = &'a NpmResolutionPackage>,
|
||||
>(
|
||||
bin_entries: &mut BinEntries<'a>,
|
||||
mut commands: crate::task_runner::TaskCustomCommands,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: P,
|
||||
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = BinEntries::new();
|
||||
for package in packages {
|
||||
let package_path = get_package_path(package);
|
||||
|
||||
|
@ -333,7 +360,7 @@ fn resolve_custom_commands_from_packages<
|
|||
bin_entries.add(package, package_path);
|
||||
}
|
||||
}
|
||||
let bins = bin_entries.into_bin_files(snapshot);
|
||||
let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot);
|
||||
for (bin_name, script_path) in bins {
|
||||
commands.insert(
|
||||
bin_name.clone(),
|
||||
|
@ -356,7 +383,9 @@ fn resolve_custom_commands_from_deps(
|
|||
snapshot: &NpmResolutionSnapshot,
|
||||
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = BinEntries::new();
|
||||
resolve_custom_commands_from_packages(
|
||||
&mut bin_entries,
|
||||
baseline,
|
||||
snapshot,
|
||||
package
|
||||
|
|
|
@ -55,6 +55,7 @@ use crate::util::progress_bar::ProgressMessagePrompt;
|
|||
use super::super::cache::NpmCache;
|
||||
use super::super::cache::TarballCache;
|
||||
use super::super::resolution::NpmResolution;
|
||||
use super::common::bin_entries;
|
||||
use super::common::NpmPackageFsResolver;
|
||||
use super::common::RegistryReadPermissionChecker;
|
||||
|
||||
|
@ -329,8 +330,7 @@ async fn sync_resolution_with_fs(
|
|||
let mut cache_futures = FuturesUnordered::new();
|
||||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
||||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
let bin_entries =
|
||||
Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new()));
|
||||
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
||||
let mut lifecycle_scripts =
|
||||
super::common::lifecycle_scripts::LifecycleScripts::new(
|
||||
lifecycle_scripts,
|
||||
|
@ -658,7 +658,28 @@ async fn sync_resolution_with_fs(
|
|||
// 7. Set up `node_modules/.bin` entries for packages that need it.
|
||||
{
|
||||
let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut());
|
||||
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
|
||||
bin_entries.finish(
|
||||
snapshot,
|
||||
&bin_node_modules_dir_path,
|
||||
|setup_outcome| {
|
||||
match setup_outcome {
|
||||
bin_entries::EntrySetupOutcome::MissingEntrypoint {
|
||||
package,
|
||||
package_path,
|
||||
..
|
||||
} if super::common::lifecycle_scripts::has_lifecycle_scripts(
|
||||
package,
|
||||
package_path,
|
||||
) && lifecycle_scripts.can_run_scripts(&package.id.nv)
|
||||
&& !lifecycle_scripts.has_run_scripts(package) =>
|
||||
{
|
||||
// ignore, it might get fixed when the lifecycle scripts run.
|
||||
// if not, we'll warn then
|
||||
}
|
||||
outcome => outcome.warn_if_failed(),
|
||||
}
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
// 8. Create symlinks for the workspace packages
|
||||
|
@ -708,7 +729,7 @@ async fn sync_resolution_with_fs(
|
|||
.finish(
|
||||
snapshot,
|
||||
&package_partitions.packages,
|
||||
Some(root_node_modules_dir_path),
|
||||
root_node_modules_dir_path,
|
||||
progress_bar,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time;
|
||||
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
|
@ -13,6 +12,7 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::OpState;
|
||||
use deno_runtime::deno_permissions::ChildPermissionsArg;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_web::StartTime;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
|
|||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
|
||||
let token = Uuid::new_v4();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
|
@ -147,8 +147,8 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) {
|
|||
|
||||
#[op2(fast)]
|
||||
#[number]
|
||||
fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> {
|
||||
let ns = state.borrow::<time::Instant>().elapsed().as_nanos();
|
||||
fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> {
|
||||
let ns = state.borrow::<StartTime>().elapsed().as_nanos();
|
||||
let ns_u64 = u64::try_from(ns)?;
|
||||
Ok(ns_u64)
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ pub fn op_jupyter_input(
|
|||
state: &mut OpState,
|
||||
#[string] prompt: String,
|
||||
is_password: bool,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
) -> Option<String> {
|
||||
let (last_execution_request, stdin_connection_proxy) = {
|
||||
(
|
||||
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
|
||||
|
@ -58,11 +58,11 @@ pub fn op_jupyter_input(
|
|||
if let Some(last_request) = maybe_last_request {
|
||||
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
|
||||
else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
if !msg.allow_stdin {
|
||||
return Ok(None);
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = InputRequest {
|
||||
|
@ -73,7 +73,7 @@ pub fn op_jupyter_input(
|
|||
let msg = JupyterMessage::new(content, Some(&last_request));
|
||||
|
||||
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
// Need to spawn a separate thread here, because `blocking_recv()` can't
|
||||
|
@ -82,17 +82,25 @@ pub fn op_jupyter_input(
|
|||
stdin_connection_proxy.lock().rx.blocking_recv()
|
||||
});
|
||||
let Ok(Some(response)) = join_handle.join() else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
let JupyterMessageContent::InputReply(msg) = response.content else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
return Ok(Some(msg.value));
|
||||
return Some(msg.value);
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum JupyterBroadcastError {
|
||||
#[error(transparent)]
|
||||
SerdeJson(serde_json::Error),
|
||||
#[error(transparent)]
|
||||
ZeroMq(AnyError),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
|
@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast(
|
|||
#[serde] content: serde_json::Value,
|
||||
#[serde] metadata: serde_json::Value,
|
||||
#[serde] buffers: Vec<deno_core::JsBuffer>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), JupyterBroadcastError> {
|
||||
let (iopub_connection, last_execution_request) = {
|
||||
let s = state.borrow();
|
||||
|
||||
|
@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast(
|
|||
content,
|
||||
err
|
||||
);
|
||||
err
|
||||
JupyterBroadcastError::SerdeJson(err)
|
||||
})?;
|
||||
|
||||
let jupyter_message = JupyterMessage::new(content, Some(&last_request))
|
||||
.with_metadata(metadata)
|
||||
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
|
||||
|
||||
iopub_connection.lock().send(jupyter_message).await?;
|
||||
iopub_connection
|
||||
.lock()
|
||||
.send(jupyter_message)
|
||||
.await
|
||||
.map_err(JupyterBroadcastError::ZeroMq)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_print(
|
||||
state: &mut OpState,
|
||||
#[string] msg: &str,
|
||||
is_err: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
|
||||
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
|
||||
|
||||
if is_err {
|
||||
if let Err(err) = sender.send(StreamContent::stderr(msg)) {
|
||||
log::error!("Failed to send stderr message: {}", err);
|
||||
}
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = sender.send(StreamContent::stdout(msg)) {
|
||||
log::error!("Failed to send stdout message: {}", err);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
|
|||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
|
||||
let token = Uuid::new_v4();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
|
@ -150,7 +150,7 @@ fn op_register_test_step(
|
|||
#[smi] parent_id: usize,
|
||||
#[smi] root_id: usize,
|
||||
#[string] root_name: String,
|
||||
) -> Result<usize, AnyError> {
|
||||
) -> usize {
|
||||
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
|
||||
let origin = state.borrow::<ModuleSpecifier>().to_string();
|
||||
let description = TestStepDescription {
|
||||
|
@ -169,7 +169,7 @@ fn op_register_test_step(
|
|||
};
|
||||
let sender = state.borrow_mut::<TestEventSender>();
|
||||
sender.send(TestEvent::StepRegister(description)).ok();
|
||||
Ok(id)
|
||||
id
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
|
289
cli/resolver.rs
289
cli/resolver.rs
|
@ -4,7 +4,6 @@ use async_trait::async_trait;
|
|||
use dashmap::DashMap;
|
||||
use dashmap::DashSet;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleKind;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionDiagnostic;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
|
@ -17,9 +16,7 @@ use deno_core::ModuleSourceCode;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::source::UnknownBuiltInNodeModuleError;
|
||||
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
|
||||
use deno_graph::NpmLoadError;
|
||||
use deno_graph::NpmResolvePkgReqsResult;
|
||||
use deno_npm::resolution::NpmResolutionError;
|
||||
|
@ -52,7 +49,6 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::args::JsxImportSourceConfig;
|
||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::CliNpmResolver;
|
||||
|
@ -108,7 +104,6 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNodeResolver {
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
|
@ -117,14 +112,12 @@ pub struct CliNodeResolver {
|
|||
|
||||
impl CliNodeResolver {
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_tracker,
|
||||
fs,
|
||||
in_npm_pkg_checker,
|
||||
node_resolver,
|
||||
|
@ -140,9 +133,11 @@ impl CliNodeResolver {
|
|||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
let resolution_result = self.resolve(specifier, referrer, mode);
|
||||
let resolution_result =
|
||||
self.resolve(specifier, referrer, referrer_kind, mode);
|
||||
match resolution_result {
|
||||
Ok(res) => Ok(Some(res)),
|
||||
Err(err) => {
|
||||
|
@ -213,35 +208,26 @@ impl CliNodeResolver {
|
|||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<NodeResolution, NodeResolveError> {
|
||||
let referrer_kind = if self
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(referrer, MediaType::from_specifier(referrer))
|
||||
.map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))?
|
||||
{
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
};
|
||||
|
||||
let res =
|
||||
self
|
||||
.node_resolver
|
||||
.resolve(specifier, referrer, referrer_kind, mode)?;
|
||||
Ok(res)
|
||||
self
|
||||
.node_resolver
|
||||
.resolve(specifier, referrer, referrer_kind, mode)
|
||||
}
|
||||
|
||||
pub fn resolve_req_reference(
|
||||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
self.resolve_req_with_sub_path(
|
||||
req_ref.req(),
|
||||
req_ref.sub_path(),
|
||||
referrer,
|
||||
referrer_kind,
|
||||
mode,
|
||||
)
|
||||
}
|
||||
|
@ -251,6 +237,7 @@ impl CliNodeResolver {
|
|||
req: &PackageReq,
|
||||
sub_path: Option<&str>,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let package_folder = self
|
||||
|
@ -260,6 +247,7 @@ impl CliNodeResolver {
|
|||
&package_folder,
|
||||
sub_path,
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
mode,
|
||||
);
|
||||
match resolution_result {
|
||||
|
@ -284,12 +272,14 @@ impl CliNodeResolver {
|
|||
package_folder: &Path,
|
||||
sub_path: Option<&str>,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<ModuleSpecifier, PackageSubpathResolveError> {
|
||||
self.node_resolver.resolve_package_subpath_from_deno_module(
|
||||
package_folder,
|
||||
sub_path,
|
||||
maybe_referrer,
|
||||
referrer_kind,
|
||||
mode,
|
||||
)
|
||||
}
|
||||
|
@ -419,10 +409,6 @@ impl NpmModuleLoader {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct CjsTrackerOptions {
|
||||
pub unstable_detect_cjs: bool,
|
||||
}
|
||||
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
///
|
||||
/// Modules that are `.js` or `.ts` are only known to be CJS or
|
||||
|
@ -430,22 +416,22 @@ pub struct CjsTrackerOptions {
|
|||
/// will be "maybe CJS" until they're loaded.
|
||||
#[derive(Debug)]
|
||||
pub struct CjsTracker {
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
unstable_detect_cjs: bool,
|
||||
known: DashMap<ModuleSpecifier, ModuleKind>,
|
||||
is_cjs_resolver: IsCjsResolver,
|
||||
known: DashMap<ModuleSpecifier, NodeModuleKind>,
|
||||
}
|
||||
|
||||
impl CjsTracker {
|
||||
pub fn new(
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: CjsTrackerOptions,
|
||||
options: IsCjsResolverOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
unstable_detect_cjs: options.unstable_detect_cjs,
|
||||
is_cjs_resolver: IsCjsResolver::new(
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
options,
|
||||
),
|
||||
known: Default::default(),
|
||||
}
|
||||
}
|
||||
|
@ -485,47 +471,90 @@ impl CjsTracker {
|
|||
.get_known_kind_with_is_script(specifier, media_type, is_script)
|
||||
{
|
||||
Some(kind) => kind,
|
||||
None => self.check_based_on_pkg_json(specifier)?,
|
||||
None => self.is_cjs_resolver.check_based_on_pkg_json(specifier)?,
|
||||
};
|
||||
Ok(kind.is_cjs())
|
||||
Ok(kind == NodeModuleKind::Cjs)
|
||||
}
|
||||
|
||||
pub fn get_known_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
) -> Option<ModuleKind> {
|
||||
) -> Option<NodeModuleKind> {
|
||||
self.get_known_kind_with_is_script(specifier, media_type, None)
|
||||
}
|
||||
|
||||
pub fn get_referrer_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> NodeModuleKind {
|
||||
if specifier.scheme() != "file" {
|
||||
return NodeModuleKind::Esm;
|
||||
}
|
||||
self
|
||||
.get_known_kind(specifier, MediaType::from_specifier(specifier))
|
||||
.unwrap_or(NodeModuleKind::Esm)
|
||||
}
|
||||
|
||||
fn get_known_kind_with_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: Option<bool>,
|
||||
) -> Option<ModuleKind> {
|
||||
if specifier.scheme() != "file" {
|
||||
return Some(ModuleKind::Esm);
|
||||
}
|
||||
) -> Option<NodeModuleKind> {
|
||||
self.is_cjs_resolver.get_known_kind_with_is_script(
|
||||
specifier,
|
||||
media_type,
|
||||
is_script,
|
||||
&self.known,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
match media_type {
|
||||
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm),
|
||||
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs),
|
||||
#[derive(Debug)]
|
||||
pub struct IsCjsResolverOptions {
|
||||
pub detect_cjs: bool,
|
||||
pub is_node_main: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IsCjsResolver {
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: IsCjsResolverOptions,
|
||||
}
|
||||
|
||||
impl IsCjsResolver {
|
||||
pub fn new(
|
||||
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
|
||||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
options: IsCjsResolverOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
in_npm_pkg_checker,
|
||||
pkg_json_resolver,
|
||||
options,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_lsp_referrer_kind(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_script: Option<bool>,
|
||||
) -> NodeModuleKind {
|
||||
if specifier.scheme() != "file" {
|
||||
return NodeModuleKind::Esm;
|
||||
}
|
||||
match MediaType::from_specifier(specifier) {
|
||||
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => NodeModuleKind::Esm,
|
||||
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => NodeModuleKind::Cjs,
|
||||
MediaType::Dts => {
|
||||
// dts files are always determined based on the package.json because
|
||||
// they contain imports/exports even when considered CJS
|
||||
if let Some(value) = self.known.get(specifier).map(|v| *v) {
|
||||
Some(value)
|
||||
} else {
|
||||
let value = self.check_based_on_pkg_json(specifier).ok();
|
||||
if let Some(value) = value {
|
||||
self.known.insert(specifier.clone(), value);
|
||||
}
|
||||
Some(value.unwrap_or(ModuleKind::Esm))
|
||||
}
|
||||
self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm)
|
||||
}
|
||||
MediaType::Wasm |
|
||||
MediaType::Json => Some(ModuleKind::Esm),
|
||||
MediaType::Json => NodeModuleKind::Esm,
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::TypeScript
|
||||
|
@ -534,18 +563,63 @@ impl CjsTracker {
|
|||
| MediaType::Css
|
||||
| MediaType::SourceMap
|
||||
| MediaType::Unknown => {
|
||||
if let Some(value) = self.known.get(specifier).map(|v| *v) {
|
||||
if value.is_cjs() && is_script == Some(false) {
|
||||
match is_script {
|
||||
Some(true) => self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm),
|
||||
Some(false) | None => NodeModuleKind::Esm,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_known_kind_with_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: Option<bool>,
|
||||
known_cache: &DashMap<ModuleSpecifier, NodeModuleKind>,
|
||||
) -> Option<NodeModuleKind> {
|
||||
if specifier.scheme() != "file" {
|
||||
return Some(NodeModuleKind::Esm);
|
||||
}
|
||||
|
||||
match media_type {
|
||||
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(NodeModuleKind::Esm),
|
||||
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(NodeModuleKind::Cjs),
|
||||
MediaType::Dts => {
|
||||
// dts files are always determined based on the package.json because
|
||||
// they contain imports/exports even when considered CJS
|
||||
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
|
||||
Some(value)
|
||||
} else {
|
||||
let value = self.check_based_on_pkg_json(specifier).ok();
|
||||
if let Some(value) = value {
|
||||
known_cache.insert(specifier.clone(), value);
|
||||
}
|
||||
Some(value.unwrap_or(NodeModuleKind::Esm))
|
||||
}
|
||||
}
|
||||
MediaType::Wasm |
|
||||
MediaType::Json => Some(NodeModuleKind::Esm),
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Tsx
|
||||
// treat these as unknown
|
||||
| MediaType::Css
|
||||
| MediaType::SourceMap
|
||||
| MediaType::Unknown => {
|
||||
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
|
||||
if value == NodeModuleKind::Cjs && is_script == Some(false) {
|
||||
// we now know this is actually esm
|
||||
self.known.insert(specifier.clone(), ModuleKind::Esm);
|
||||
Some(ModuleKind::Esm)
|
||||
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
|
||||
Some(NodeModuleKind::Esm)
|
||||
} else {
|
||||
Some(value)
|
||||
}
|
||||
} else if is_script == Some(false) {
|
||||
// we know this is esm
|
||||
self.known.insert(specifier.clone(), ModuleKind::Esm);
|
||||
Some(ModuleKind::Esm)
|
||||
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
|
||||
Some(NodeModuleKind::Esm)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -556,27 +630,38 @@ impl CjsTracker {
|
|||
fn check_based_on_pkg_json(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<ModuleKind, ClosestPkgJsonError> {
|
||||
) -> Result<NodeModuleKind, ClosestPkgJsonError> {
|
||||
if self.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||
if let Some(pkg_json) =
|
||||
self.pkg_json_resolver.get_closest_package_json(specifier)?
|
||||
{
|
||||
let is_file_location_cjs = pkg_json.typ != "module";
|
||||
Ok(ModuleKind::from_is_cjs(is_file_location_cjs))
|
||||
Ok(if is_file_location_cjs {
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
})
|
||||
} else {
|
||||
Ok(ModuleKind::Cjs)
|
||||
Ok(NodeModuleKind::Cjs)
|
||||
}
|
||||
} else if self.unstable_detect_cjs {
|
||||
} else if self.options.detect_cjs || self.options.is_node_main {
|
||||
if let Some(pkg_json) =
|
||||
self.pkg_json_resolver.get_closest_package_json(specifier)?
|
||||
{
|
||||
let is_cjs_type = pkg_json.typ == "commonjs";
|
||||
Ok(ModuleKind::from_is_cjs(is_cjs_type))
|
||||
let is_cjs_type = pkg_json.typ == "commonjs"
|
||||
|| self.options.is_node_main && pkg_json.typ == "none";
|
||||
Ok(if is_cjs_type {
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
})
|
||||
} else if self.options.is_node_main {
|
||||
Ok(NodeModuleKind::Cjs)
|
||||
} else {
|
||||
Ok(ModuleKind::Esm)
|
||||
Ok(NodeModuleKind::Esm)
|
||||
}
|
||||
} else {
|
||||
Ok(ModuleKind::Esm)
|
||||
Ok(NodeModuleKind::Esm)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -587,48 +672,33 @@ pub type CliSloppyImportsResolver =
|
|||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug)]
|
||||
pub struct CliGraphResolver {
|
||||
pub struct CliResolver {
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
maybe_default_jsx_import_source: Option<String>,
|
||||
maybe_default_jsx_import_source_types: Option<String>,
|
||||
maybe_jsx_import_source_module: Option<String>,
|
||||
maybe_vendor_specifier: Option<ModuleSpecifier>,
|
||||
found_package_json_dep_flag: AtomicFlag,
|
||||
bare_node_builtins_enabled: bool,
|
||||
warned_pkgs: DashSet<PackageReq>,
|
||||
}
|
||||
|
||||
pub struct CliGraphResolverOptions<'a> {
|
||||
pub struct CliResolverOptions<'a> {
|
||||
pub node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
pub workspace_resolver: Arc<WorkspaceResolver>,
|
||||
pub bare_node_builtins_enabled: bool,
|
||||
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
pub maybe_vendor_dir: Option<&'a PathBuf>,
|
||||
}
|
||||
|
||||
impl CliGraphResolver {
|
||||
pub fn new(options: CliGraphResolverOptions) -> Self {
|
||||
impl CliResolver {
|
||||
pub fn new(options: CliResolverOptions) -> Self {
|
||||
Self {
|
||||
node_resolver: options.node_resolver,
|
||||
npm_resolver: options.npm_resolver,
|
||||
sloppy_imports_resolver: options.sloppy_imports_resolver,
|
||||
workspace_resolver: options.workspace_resolver,
|
||||
maybe_default_jsx_import_source: options
|
||||
.maybe_jsx_import_source_config
|
||||
.as_ref()
|
||||
.and_then(|c| c.default_specifier.clone()),
|
||||
maybe_default_jsx_import_source_types: options
|
||||
.maybe_jsx_import_source_config
|
||||
.as_ref()
|
||||
.and_then(|c| c.default_types_specifier.clone()),
|
||||
maybe_jsx_import_source_module: options
|
||||
.maybe_jsx_import_source_config
|
||||
.map(|c| c.module),
|
||||
maybe_vendor_specifier: options
|
||||
.maybe_vendor_dir
|
||||
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
|
||||
|
@ -638,10 +708,6 @@ impl CliGraphResolver {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_graph_resolver(&self) -> &dyn Resolver {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
|
||||
WorkerCliNpmGraphResolver {
|
||||
npm_resolver: self.npm_resolver.as_ref(),
|
||||
|
@ -649,28 +715,12 @@ impl CliGraphResolver {
|
|||
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolver for CliGraphResolver {
|
||||
fn default_jsx_import_source(&self) -> Option<String> {
|
||||
self.maybe_default_jsx_import_source.clone()
|
||||
}
|
||||
|
||||
fn default_jsx_import_source_types(&self) -> Option<String> {
|
||||
self.maybe_default_jsx_import_source_types.clone()
|
||||
}
|
||||
|
||||
fn jsx_import_source_module(&self) -> &str {
|
||||
self
|
||||
.maybe_jsx_import_source_module
|
||||
.as_deref()
|
||||
.unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE)
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
pub fn resolve(
|
||||
&self,
|
||||
raw_specifier: &str,
|
||||
referrer_range: &deno_graph::Range,
|
||||
referrer_kind: NodeModuleKind,
|
||||
mode: ResolutionMode,
|
||||
) -> Result<ModuleSpecifier, ResolveError> {
|
||||
fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode {
|
||||
|
@ -686,7 +736,7 @@ impl Resolver for CliGraphResolver {
|
|||
if let Some(node_resolver) = self.node_resolver.as_ref() {
|
||||
if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) {
|
||||
return node_resolver
|
||||
.resolve(raw_specifier, referrer, to_node_mode(mode))
|
||||
.resolve(raw_specifier, referrer, referrer_kind, to_node_mode(mode))
|
||||
.map(|res| res.into_url())
|
||||
.map_err(|e| ResolveError::Other(e.into()));
|
||||
}
|
||||
|
@ -759,6 +809,7 @@ impl Resolver for CliGraphResolver {
|
|||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into())),
|
||||
|
@ -800,6 +851,7 @@ impl Resolver for CliGraphResolver {
|
|||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
|
@ -847,6 +899,7 @@ impl Resolver for CliGraphResolver {
|
|||
pkg_folder,
|
||||
npm_req_ref.sub_path(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()));
|
||||
|
@ -855,7 +908,12 @@ impl Resolver for CliGraphResolver {
|
|||
// do npm resolution for byonm
|
||||
if is_byonm {
|
||||
return node_resolver
|
||||
.resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode))
|
||||
.resolve_req_reference(
|
||||
&npm_req_ref,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(|err| err.into());
|
||||
}
|
||||
}
|
||||
|
@ -869,7 +927,12 @@ impl Resolver for CliGraphResolver {
|
|||
// If byonm, check if the bare specifier resolves to an npm package
|
||||
if is_byonm && referrer.scheme() == "file" {
|
||||
let maybe_resolution = node_resolver
|
||||
.resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode))
|
||||
.resolve_if_for_npm_pkg(
|
||||
raw_specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
to_node_mode(mode),
|
||||
)
|
||||
.map_err(ResolveError::Other)?;
|
||||
if let Some(res) = maybe_resolution {
|
||||
match res {
|
||||
|
|
|
@ -528,7 +528,6 @@
|
|||
"bare-node-builtins",
|
||||
"byonm",
|
||||
"cron",
|
||||
"detect-cjs",
|
||||
"ffi",
|
||||
"fs",
|
||||
"fmt-component",
|
||||
|
|
|
@ -47,6 +47,7 @@ use deno_runtime::deno_fs::FileSystem;
|
|||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::ops::otel::OtelConfig;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
|
@ -185,6 +186,7 @@ pub struct Metadata {
|
|||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub unstable_config: UnstableConfig,
|
||||
pub otel_config: Option<OtelConfig>, // None means disabled.
|
||||
}
|
||||
|
||||
fn write_binary_bytes(
|
||||
|
@ -718,10 +720,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: false,
|
||||
bare_node_builtins: cli_options.unstable_bare_node_builtins(),
|
||||
detect_cjs: cli_options.unstable_detect_cjs(),
|
||||
sloppy_imports: cli_options.unstable_sloppy_imports(),
|
||||
features: cli_options.unstable_features(),
|
||||
},
|
||||
otel_config: cli_options.otel_config(),
|
||||
};
|
||||
|
||||
write_binary_bytes(
|
||||
|
|
|
@ -45,6 +45,8 @@ use deno_runtime::WorkerLogLevel;
|
|||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::parse_from_json;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use serialization::DenoCompileModuleSource;
|
||||
use std::borrow::Cow;
|
||||
|
@ -76,9 +78,9 @@ use crate::npm::CliNpmResolverCreateOptions;
|
|||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::CreateInNpmPkgCheckerOptions;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::resolver::CjsTrackerOptions;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::IsCjsResolverOptions;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
@ -146,13 +148,27 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
type_error(format!("Referrer uses invalid specifier: {}", err))
|
||||
})?
|
||||
};
|
||||
let referrer_kind = if self
|
||||
.shared
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
|
||||
{
|
||||
NodeModuleKind::Cjs
|
||||
} else {
|
||||
NodeModuleKind::Esm
|
||||
};
|
||||
|
||||
if self.shared.node_resolver.in_npm_package(&referrer) {
|
||||
return Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)?
|
||||
.resolve(
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
@ -178,6 +194,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
pkg_json.dir_path(),
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)?,
|
||||
),
|
||||
|
@ -192,6 +209,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
req,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
}
|
||||
|
@ -211,6 +229,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
Some(&referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)?,
|
||||
)
|
||||
|
@ -224,6 +243,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
return self.shared.node_resolver.resolve_req_reference(
|
||||
&reference,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
);
|
||||
}
|
||||
|
@ -250,6 +270,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Execution,
|
||||
)?;
|
||||
if let Some(res) = maybe_res {
|
||||
|
@ -429,6 +450,14 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
|
|||
) -> Result<String, AnyError> {
|
||||
Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?)
|
||||
}
|
||||
|
||||
fn is_maybe_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneModuleLoaderFactory {
|
||||
|
@ -628,14 +657,14 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
let cjs_tracker = Arc::new(CjsTracker::new(
|
||||
in_npm_pkg_checker.clone(),
|
||||
pkg_json_resolver.clone(),
|
||||
CjsTrackerOptions {
|
||||
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
|
||||
IsCjsResolverOptions {
|
||||
detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(),
|
||||
is_node_main: false,
|
||||
},
|
||||
));
|
||||
let cache_db = Caches::new(deno_dir_provider.clone());
|
||||
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
|
||||
let cli_node_resolver = Arc::new(CliNodeResolver::new(
|
||||
cjs_tracker.clone(),
|
||||
fs.clone(),
|
||||
in_npm_pkg_checker.clone(),
|
||||
node_resolver.clone(),
|
||||
|
@ -646,7 +675,6 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
cjs_tracker.clone(),
|
||||
fs.clone(),
|
||||
None,
|
||||
false,
|
||||
);
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_code_analyzer,
|
||||
|
@ -800,6 +828,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
|||
serve_port: None,
|
||||
serve_host: None,
|
||||
},
|
||||
metadata.otel_config,
|
||||
);
|
||||
|
||||
// Initialize v8 once from the main thread.
|
||||
|
|
|
@ -350,6 +350,7 @@ impl<'a> VfsEntryRef<'a> {
|
|||
atime: None,
|
||||
birthtime: None,
|
||||
mtime: None,
|
||||
ctime: None,
|
||||
blksize: 0,
|
||||
size: 0,
|
||||
dev: 0,
|
||||
|
@ -372,6 +373,7 @@ impl<'a> VfsEntryRef<'a> {
|
|||
atime: None,
|
||||
birthtime: None,
|
||||
mtime: None,
|
||||
ctime: None,
|
||||
blksize: 0,
|
||||
size: file.len,
|
||||
dev: 0,
|
||||
|
@ -394,6 +396,7 @@ impl<'a> VfsEntryRef<'a> {
|
|||
atime: None,
|
||||
birthtime: None,
|
||||
mtime: None,
|
||||
ctime: None,
|
||||
blksize: 0,
|
||||
size: 0,
|
||||
dev: 0,
|
||||
|
|
|
@ -480,7 +480,7 @@ fn filter_coverages(
|
|||
.filter(|e| {
|
||||
let is_internal = e.url.starts_with("ext:")
|
||||
|| e.url.ends_with("__anonymous__")
|
||||
|| e.url.ends_with("$deno$test.js")
|
||||
|| e.url.ends_with("$deno$test.mjs")
|
||||
|| e.url.ends_with(".snap")
|
||||
|| is_supported_test_path(Path::new(e.url.as_str()))
|
||||
|| doc_test_re.is_match(e.url.as_str())
|
||||
|
|
|
@ -790,28 +790,26 @@ fn format_ensure_stable(
|
|||
return Ok(Some(current_text));
|
||||
}
|
||||
Err(err) => {
|
||||
panic!(
|
||||
bail!(
|
||||
concat!(
|
||||
"Formatting succeeded initially, but failed when ensuring a ",
|
||||
"stable format. This indicates a bug in the formatter where ",
|
||||
"the text it produces is not syntactically correct. As a temporary ",
|
||||
"workaround you can ignore this file ({}).\n\n{:#}"
|
||||
"workaround you can ignore this file.\n\n{:#}"
|
||||
),
|
||||
file_path.display(),
|
||||
err,
|
||||
)
|
||||
}
|
||||
}
|
||||
count += 1;
|
||||
if count == 5 {
|
||||
panic!(
|
||||
bail!(
|
||||
concat!(
|
||||
"Formatting not stable. Bailed after {} tries. This indicates a bug ",
|
||||
"in the formatter where it formats the file ({}) differently each time. As a ",
|
||||
"in the formatter where it formats the file differently each time. As a ",
|
||||
"temporary workaround you can ignore this file."
|
||||
),
|
||||
count,
|
||||
file_path.display(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1215,6 +1213,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use test_util::assert_starts_with;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
|
@ -1270,12 +1270,16 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
|
||||
fn test_format_ensure_stable_unstable_format() {
|
||||
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
|
||||
Ok(Some(format!("1{file_text}")))
|
||||
})
|
||||
.unwrap();
|
||||
let err =
|
||||
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
|
||||
Ok(Some(format!("1{file_text}")))
|
||||
})
|
||||
.unwrap_err();
|
||||
assert_starts_with!(
|
||||
err.to_string(),
|
||||
"Formatting not stable. Bailed after 5 tries."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1289,16 +1293,20 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Formatting succeeded initially, but failed when")]
|
||||
fn test_format_ensure_stable_error_second() {
|
||||
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
|
||||
if file_text == "1" {
|
||||
Ok(Some("11".to_string()))
|
||||
} else {
|
||||
bail!("Error formatting.")
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
let err =
|
||||
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
|
||||
if file_text == "1" {
|
||||
Ok(Some("11".to_string()))
|
||||
} else {
|
||||
bail!("Error formatting.")
|
||||
}
|
||||
})
|
||||
.unwrap_err();
|
||||
assert_starts_with!(
|
||||
err.to_string(),
|
||||
"Formatting succeeded initially, but failed when"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1396,6 +1396,7 @@ mod tests {
|
|||
.env_clear()
|
||||
// use the deno binary in the target directory
|
||||
.env("PATH", test_util::target_dir())
|
||||
.env("RUST_BACKTRACE", "1")
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait()
|
||||
|
|
|
@ -61,7 +61,7 @@ pub async fn kernel(
|
|||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let main_module =
|
||||
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
|
||||
resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd())
|
||||
.unwrap();
|
||||
// TODO(bartlomieju): should we run with all permissions?
|
||||
let permissions =
|
||||
|
|
|
@ -64,7 +64,7 @@ pub use rules::LintRuleProvider;
|
|||
|
||||
const JSON_SCHEMA_VERSION: u8 = 1;
|
||||
|
||||
static STDIN_FILE_NAME: &str = "$deno$stdin.ts";
|
||||
static STDIN_FILE_NAME: &str = "$deno$stdin.mts";
|
||||
|
||||
pub async fn lint(
|
||||
flags: Arc<Flags>,
|
||||
|
|
|
@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule {
|
|||
captures: Default::default(),
|
||||
};
|
||||
|
||||
// fill this and capture the sloppy imports in the resolver
|
||||
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
|
||||
graph_kind: deno_graph::GraphKind::All,
|
||||
specifier: context.specifier().clone(),
|
||||
|
|
|
@ -44,7 +44,11 @@ pub async fn cache_top_level_deps(
|
|||
|
||||
let mut seen_reqs = std::collections::HashSet::new();
|
||||
|
||||
for entry in import_map.imports().entries() {
|
||||
for entry in import_map.imports().entries().chain(
|
||||
import_map
|
||||
.scopes()
|
||||
.flat_map(|scope| scope.imports.entries()),
|
||||
) {
|
||||
let Some(specifier) = entry.value else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::cdp;
|
|||
use crate::colors;
|
||||
use crate::lsp::ReplLanguageServer;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliResolver;
|
||||
use crate::tools::test::report_tests;
|
||||
use crate::tools::test::reporters::PrettyTestReporter;
|
||||
use crate::tools::test::reporters::TestReporter;
|
||||
|
@ -44,12 +44,12 @@ use deno_core::url::Url;
|
|||
use deno_core::LocalInspectorSession;
|
||||
use deno_core::PollEventLoopOptions;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::Position;
|
||||
use deno_graph::PositionRange;
|
||||
use deno_graph::SpecifierWithRange;
|
||||
use deno_runtime::worker::MainWorker;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Match;
|
||||
use regex::Regex;
|
||||
|
@ -180,7 +180,7 @@ struct ReplJsxState {
|
|||
|
||||
pub struct ReplSession {
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
pub worker: MainWorker,
|
||||
session: LocalInspectorSession,
|
||||
pub context_id: u64,
|
||||
|
@ -199,7 +199,7 @@ impl ReplSession {
|
|||
pub async fn initialize(
|
||||
cli_options: &CliOptions,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
resolver: Arc<CliResolver>,
|
||||
mut worker: MainWorker,
|
||||
main_module: ModuleSpecifier,
|
||||
test_event_receiver: TestEventReceiver,
|
||||
|
@ -245,7 +245,7 @@ impl ReplSession {
|
|||
assert_ne!(context_id, 0);
|
||||
|
||||
let referrer =
|
||||
deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd())
|
||||
deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd())
|
||||
.unwrap();
|
||||
|
||||
let cwd_url =
|
||||
|
@ -712,7 +712,12 @@ impl ReplSession {
|
|||
.flat_map(|i| {
|
||||
self
|
||||
.resolver
|
||||
.resolve(i, &referrer_range, ResolutionMode::Execution)
|
||||
.resolve(
|
||||
i,
|
||||
&referrer_range,
|
||||
NodeModuleKind::Esm,
|
||||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok()
|
||||
.or_else(|| ModuleSpecifier::parse(i).ok())
|
||||
})
|
||||
|
|
|
@ -4,8 +4,6 @@ use std::collections::HashMap;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleKind;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::StreamExt;
|
||||
|
@ -18,7 +16,6 @@ use tokio::select;
|
|||
|
||||
use crate::cdp;
|
||||
use crate::emit::Emitter;
|
||||
use crate::resolver::CjsTracker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::file_watcher::WatcherRestartMode;
|
||||
|
||||
|
@ -63,7 +60,6 @@ pub struct HmrRunner {
|
|||
session: LocalInspectorSession,
|
||||
watcher_communicator: Arc<WatcherCommunicator>,
|
||||
script_ids: HashMap<String, String>,
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
emitter: Arc<Emitter>,
|
||||
}
|
||||
|
||||
|
@ -146,7 +142,6 @@ impl crate::worker::HmrRunner for HmrRunner {
|
|||
|
||||
let source_code = self.emitter.load_and_emit_for_hmr(
|
||||
&module_url,
|
||||
ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?),
|
||||
).await?;
|
||||
|
||||
let mut tries = 1;
|
||||
|
@ -179,14 +174,12 @@ impl crate::worker::HmrRunner for HmrRunner {
|
|||
|
||||
impl HmrRunner {
|
||||
pub fn new(
|
||||
cjs_tracker: Arc<CjsTracker>,
|
||||
emitter: Arc<Emitter>,
|
||||
session: LocalInspectorSession,
|
||||
watcher_communicator: Arc<WatcherCommunicator>,
|
||||
) -> Self {
|
||||
Self {
|
||||
session,
|
||||
cjs_tracker,
|
||||
emitter,
|
||||
watcher_communicator,
|
||||
script_ids: HashMap::new(),
|
||||
|
|
|
@ -121,8 +121,8 @@ delete Object.prototype.__proto__;
|
|||
/** @type {Map<string, ts.SourceFile>} */
|
||||
const sourceFileCache = new Map();
|
||||
|
||||
/** @type {Map<string, string>} */
|
||||
const sourceTextCache = new Map();
|
||||
/** @type {Map<string, ts.IScriptSnapshot & { isCjs?: boolean; }>} */
|
||||
const scriptSnapshotCache = new Map();
|
||||
|
||||
/** @type {Map<string, number>} */
|
||||
const sourceRefCounts = new Map();
|
||||
|
@ -133,9 +133,6 @@ delete Object.prototype.__proto__;
|
|||
/** @type {Map<string, boolean>} */
|
||||
const isNodeSourceFileCache = new Map();
|
||||
|
||||
/** @type {Map<string, boolean>} */
|
||||
const isCjsCache = new Map();
|
||||
|
||||
// Maps asset specifiers to the first scope that the asset was loaded into.
|
||||
/** @type {Map<string, string | null>} */
|
||||
const assetScopes = new Map();
|
||||
|
@ -210,12 +207,13 @@ delete Object.prototype.__proto__;
|
|||
const mapKey = path + key;
|
||||
let sourceFile = documentRegistrySourceFileCache.get(mapKey);
|
||||
if (!sourceFile || sourceFile.version !== version) {
|
||||
const isCjs = /** @type {any} */ (scriptSnapshot).isCjs;
|
||||
sourceFile = ts.createLanguageServiceSourceFile(
|
||||
fileName,
|
||||
scriptSnapshot,
|
||||
{
|
||||
...getCreateSourceFileOptions(sourceFileOptions),
|
||||
impliedNodeFormat: (isCjsCache.get(fileName) ?? false)
|
||||
impliedNodeFormat: isCjs
|
||||
? ts.ModuleKind.CommonJS
|
||||
: ts.ModuleKind.ESNext,
|
||||
// in the lsp we want to be able to show documentation
|
||||
|
@ -320,7 +318,7 @@ delete Object.prototype.__proto__;
|
|||
if (lastRequestMethod != "cleanupSemanticCache") {
|
||||
const mapKey = path + key;
|
||||
documentRegistrySourceFileCache.delete(mapKey);
|
||||
sourceTextCache.delete(path);
|
||||
scriptSnapshotCache.delete(path);
|
||||
ops.op_release(path);
|
||||
}
|
||||
} else {
|
||||
|
@ -624,8 +622,6 @@ delete Object.prototype.__proto__;
|
|||
`"data" is unexpectedly null for "${specifier}".`,
|
||||
);
|
||||
|
||||
isCjsCache.set(specifier, isCjs);
|
||||
|
||||
sourceFile = ts.createSourceFile(
|
||||
specifier,
|
||||
data,
|
||||
|
@ -699,7 +695,7 @@ delete Object.prototype.__proto__;
|
|||
/** @type {[string, ts.Extension] | undefined} */
|
||||
const resolved = ops.op_resolve(
|
||||
containingFilePath,
|
||||
isCjsCache.get(containingFilePath) ?? false,
|
||||
containingFileMode === ts.ModuleKind.CommonJS,
|
||||
[fileReference.fileName],
|
||||
)?.[0];
|
||||
if (resolved) {
|
||||
|
@ -723,7 +719,14 @@ delete Object.prototype.__proto__;
|
|||
}
|
||||
});
|
||||
},
|
||||
resolveModuleNames(specifiers, base) {
|
||||
resolveModuleNames(
|
||||
specifiers,
|
||||
base,
|
||||
_reusedNames,
|
||||
_redirectedReference,
|
||||
_options,
|
||||
containingSourceFile,
|
||||
) {
|
||||
if (logDebug) {
|
||||
debug(`host.resolveModuleNames()`);
|
||||
debug(` base: ${base}`);
|
||||
|
@ -732,7 +735,7 @@ delete Object.prototype.__proto__;
|
|||
/** @type {Array<[string, ts.Extension] | undefined>} */
|
||||
const resolved = ops.op_resolve(
|
||||
base,
|
||||
isCjsCache.get(base) ?? false,
|
||||
containingSourceFile?.impliedNodeFormat === ts.ModuleKind.CommonJS,
|
||||
specifiers,
|
||||
);
|
||||
if (resolved) {
|
||||
|
@ -814,19 +817,19 @@ delete Object.prototype.__proto__;
|
|||
return ts.ScriptSnapshot.fromString(sourceFile.text);
|
||||
}
|
||||
}
|
||||
let sourceText = sourceTextCache.get(specifier);
|
||||
if (sourceText == undefined) {
|
||||
let scriptSnapshot = scriptSnapshotCache.get(specifier);
|
||||
if (scriptSnapshot == undefined) {
|
||||
/** @type {{ data: string, version: string, isCjs: boolean }} */
|
||||
const fileInfo = ops.op_load(specifier);
|
||||
if (!fileInfo) {
|
||||
return undefined;
|
||||
}
|
||||
isCjsCache.set(specifier, fileInfo.isCjs);
|
||||
sourceTextCache.set(specifier, fileInfo.data);
|
||||
scriptSnapshot = ts.ScriptSnapshot.fromString(fileInfo.data);
|
||||
scriptSnapshot.isCjs = fileInfo.isCjs;
|
||||
scriptSnapshotCache.set(specifier, scriptSnapshot);
|
||||
scriptVersionCache.set(specifier, fileInfo.version);
|
||||
sourceText = fileInfo.data;
|
||||
}
|
||||
return ts.ScriptSnapshot.fromString(sourceText);
|
||||
return scriptSnapshot;
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -1238,7 +1241,7 @@ delete Object.prototype.__proto__;
|
|||
closed = true;
|
||||
}
|
||||
scriptVersionCache.delete(script);
|
||||
sourceTextCache.delete(script);
|
||||
scriptSnapshotCache.delete(script);
|
||||
}
|
||||
|
||||
if (newConfigsByScope || opened || closed) {
|
||||
|
|
7
cli/tsc/dts/lib.deno.ns.d.ts
vendored
7
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -2971,6 +2971,10 @@ declare namespace Deno {
|
|||
* field from `stat` on Mac/BSD and `ftCreationTime` on Windows. This may
|
||||
* not be available on all platforms. */
|
||||
birthtime: Date | null;
|
||||
/** The last change time of the file. This corresponds to the `ctime`
|
||||
* field from `stat` on Mac/BSD and `ChangeTime` on Windows. This may
|
||||
* not be available on all platforms. */
|
||||
ctime: Date | null;
|
||||
/** ID of the device containing the file. */
|
||||
dev: number;
|
||||
/** Inode number.
|
||||
|
@ -2979,8 +2983,7 @@ declare namespace Deno {
|
|||
ino: number | null;
|
||||
/** The underlying raw `st_mode` bits that contain the standard Unix
|
||||
* permissions for this file/directory.
|
||||
*
|
||||
* _Linux/Mac OS only._ */
|
||||
*/
|
||||
mode: number | null;
|
||||
/** Number of hard links pointing to this file.
|
||||
*
|
||||
|
|
102
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
102
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
|
@ -1225,6 +1225,108 @@ declare namespace Deno {
|
|||
export {}; // only export exports
|
||||
}
|
||||
|
||||
/**
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export namespace tracing {
|
||||
/**
|
||||
* Whether tracing is enabled.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export const enabled: boolean;
|
||||
|
||||
/**
|
||||
* Allowed attribute type.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export type AttributeValue = string | number | boolean | bigint;
|
||||
|
||||
/**
|
||||
* A tracing span.
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export class Span implements Disposable {
|
||||
readonly traceId: string;
|
||||
readonly spanId: string;
|
||||
readonly parentSpanId: string;
|
||||
readonly kind: string;
|
||||
readonly name: string;
|
||||
readonly startTime: number;
|
||||
readonly endTime: number;
|
||||
readonly status: null | { code: 1 } | { code: 2; message: string };
|
||||
readonly attributes: Record<string, AttributeValue>;
|
||||
readonly traceFlags: number;
|
||||
|
||||
/**
|
||||
* Construct a new Span and enter it as the "current" span.
|
||||
*/
|
||||
constructor(
|
||||
name: string,
|
||||
kind?: "internal" | "server" | "client" | "producer" | "consumer",
|
||||
);
|
||||
|
||||
/**
|
||||
* Set an attribute on this span.
|
||||
*/
|
||||
setAttribute(
|
||||
name: string,
|
||||
value: AttributeValue,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Enter this span as the "current" span.
|
||||
*/
|
||||
enter(): void;
|
||||
|
||||
/**
|
||||
* Exit this span as the "current" span and restore the previous one.
|
||||
*/
|
||||
exit(): void;
|
||||
|
||||
/**
|
||||
* End this span, and exit it as the "current" span.
|
||||
*/
|
||||
end(): void;
|
||||
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
/**
|
||||
* Get the "current" span, if one exists.
|
||||
*/
|
||||
static current(): Span | undefined | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* A SpanExporter compatible with OpenTelemetry.js
|
||||
* https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_sdk_trace_base.SpanExporter.html
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export class SpanExporter {}
|
||||
|
||||
/**
|
||||
* A ContextManager compatible with OpenTelemetry.js
|
||||
* https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_api.ContextManager.html
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export class ContextManager {}
|
||||
|
||||
export {}; // only export exports
|
||||
}
|
||||
|
||||
/**
|
||||
* @category Telemetry
|
||||
* @experimental
|
||||
*/
|
||||
export namespace metrics {
|
||||
export {}; // only export exports
|
||||
}
|
||||
|
||||
export {}; // only export exports
|
||||
}
|
||||
|
||||
|
|
|
@ -343,31 +343,36 @@ impl TypeCheckingCjsTracker {
|
|||
media_type: MediaType,
|
||||
code: &Arc<str>,
|
||||
) -> bool {
|
||||
if let Some(module_kind) =
|
||||
self.cjs_tracker.get_known_kind(specifier, media_type)
|
||||
{
|
||||
module_kind.is_cjs()
|
||||
} else {
|
||||
let maybe_is_script = self
|
||||
.module_info_cache
|
||||
.as_module_analyzer()
|
||||
.analyze_sync(specifier, media_type, code)
|
||||
.ok()
|
||||
.map(|info| info.is_script);
|
||||
maybe_is_script
|
||||
.and_then(|is_script| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_cjs_with_known_is_script(specifier, media_type, is_script)
|
||||
.ok()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(specifier, media_type)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
||||
let maybe_is_script = self
|
||||
.module_info_cache
|
||||
.as_module_analyzer()
|
||||
.analyze_sync(specifier, media_type, code)
|
||||
.ok()
|
||||
.map(|info| info.is_script);
|
||||
maybe_is_script
|
||||
.and_then(|is_script| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_cjs_with_known_is_script(specifier, media_type, is_script)
|
||||
.ok()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_maybe_cjs(specifier, media_type)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_cjs_with_known_is_script(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
is_script: bool,
|
||||
) -> Result<bool, node_resolver::errors::ClosestPkgJsonError> {
|
||||
self
|
||||
.cjs_tracker
|
||||
.is_cjs_with_known_is_script(specifier, media_type, is_script)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -627,8 +632,12 @@ fn op_load_inner(
|
|||
match module {
|
||||
Module::Js(module) => {
|
||||
media_type = module.media_type;
|
||||
if matches!(media_type, MediaType::Cjs | MediaType::Cts) {
|
||||
is_cjs = true;
|
||||
if let Some(npm_state) = &state.maybe_npm {
|
||||
is_cjs = npm_state.cjs_tracker.is_cjs_with_known_is_script(
|
||||
specifier,
|
||||
module.media_type,
|
||||
module.is_script,
|
||||
)?;
|
||||
}
|
||||
let source = module
|
||||
.fast_check_module()
|
||||
|
@ -737,6 +746,7 @@ fn op_resolve_inner(
|
|||
"Error converting a string module specifier for \"op_resolve\".",
|
||||
)?
|
||||
};
|
||||
let referrer_module = state.graph.get(&referrer);
|
||||
for specifier in args.specifiers {
|
||||
if specifier.starts_with("node:") {
|
||||
resolved.push((
|
||||
|
@ -752,16 +762,19 @@ fn op_resolve_inner(
|
|||
continue;
|
||||
}
|
||||
|
||||
let graph = &state.graph;
|
||||
let resolved_dep = graph
|
||||
.get(&referrer)
|
||||
let resolved_dep = referrer_module
|
||||
.and_then(|m| m.js())
|
||||
.and_then(|m| m.dependencies_prefer_fast_check().get(&specifier))
|
||||
.and_then(|d| d.maybe_type.ok().or_else(|| d.maybe_code.ok()));
|
||||
|
||||
let maybe_result = match resolved_dep {
|
||||
Some(ResolutionResolved { specifier, .. }) => {
|
||||
resolve_graph_specifier_types(specifier, &referrer, state)?
|
||||
resolve_graph_specifier_types(
|
||||
specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
state,
|
||||
)?
|
||||
}
|
||||
_ => {
|
||||
match resolve_non_graph_specifier_types(
|
||||
|
@ -834,6 +847,7 @@ fn op_resolve_inner(
|
|||
fn resolve_graph_specifier_types(
|
||||
specifier: &ModuleSpecifier,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
state: &State,
|
||||
) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> {
|
||||
let graph = &state.graph;
|
||||
|
@ -886,6 +900,7 @@ fn resolve_graph_specifier_types(
|
|||
&package_folder,
|
||||
module.nv_reference.sub_path(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Types,
|
||||
);
|
||||
let maybe_url = match res_result {
|
||||
|
@ -965,6 +980,7 @@ fn resolve_non_graph_specifier_types(
|
|||
&package_folder,
|
||||
npm_req_ref.sub_path(),
|
||||
Some(referrer),
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Types,
|
||||
);
|
||||
let maybe_url = match res_result {
|
||||
|
|
|
@ -586,7 +586,10 @@ fn generate_pseudo_file(
|
|||
wrap_kind,
|
||||
}));
|
||||
|
||||
let source = deno_ast::swc::codegen::to_code(&transformed);
|
||||
let source = deno_ast::swc::codegen::to_code_with_comments(
|
||||
Some(&parsed.comments().as_single_threaded()),
|
||||
&transformed,
|
||||
);
|
||||
|
||||
log::debug!("{}:\n{}", file.specifier, source);
|
||||
|
||||
|
@ -1165,6 +1168,33 @@ Deno.test("file:///main.ts$3-6.ts", async ()=>{
|
|||
media_type: MediaType::TypeScript,
|
||||
}],
|
||||
},
|
||||
// https://github.com/denoland/deno/issues/26728
|
||||
Test {
|
||||
input: Input {
|
||||
source: r#"
|
||||
/**
|
||||
* ```ts
|
||||
* // @ts-expect-error: can only add numbers
|
||||
* add('1', '2');
|
||||
* ```
|
||||
*/
|
||||
export function add(first: number, second: number) {
|
||||
return first + second;
|
||||
}
|
||||
"#,
|
||||
specifier: "file:///main.ts",
|
||||
},
|
||||
expected: vec![Expected {
|
||||
source: r#"import { add } from "file:///main.ts";
|
||||
Deno.test("file:///main.ts$3-7.ts", async ()=>{
|
||||
// @ts-expect-error: can only add numbers
|
||||
add('1', '2');
|
||||
});
|
||||
"#,
|
||||
specifier: "file:///main.ts$3-7.ts",
|
||||
media_type: MediaType::TypeScript,
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
for test in tests {
|
||||
|
@ -1376,6 +1406,31 @@ console.log(Foo);
|
|||
media_type: MediaType::TypeScript,
|
||||
}],
|
||||
},
|
||||
// https://github.com/denoland/deno/issues/26728
|
||||
Test {
|
||||
input: Input {
|
||||
source: r#"
|
||||
/**
|
||||
* ```ts
|
||||
* // @ts-expect-error: can only add numbers
|
||||
* add('1', '2');
|
||||
* ```
|
||||
*/
|
||||
export function add(first: number, second: number) {
|
||||
return first + second;
|
||||
}
|
||||
"#,
|
||||
specifier: "file:///main.ts",
|
||||
},
|
||||
expected: vec![Expected {
|
||||
source: r#"import { add } from "file:///main.ts";
|
||||
// @ts-expect-error: can only add numbers
|
||||
add('1', '2');
|
||||
"#,
|
||||
specifier: "file:///main.ts$3-7.ts",
|
||||
media_type: MediaType::TypeScript,
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
for test in tests {
|
||||
|
|
|
@ -30,6 +30,7 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
|
|||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::ops::otel::OtelConfig;
|
||||
use deno_runtime::ops::process::NpmProcessStateProviderRc;
|
||||
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
|
||||
use deno_runtime::web_worker::WebWorker;
|
||||
|
@ -43,6 +44,7 @@ use deno_runtime::WorkerExecutionMode;
|
|||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_terminal::colors;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use tokio::select;
|
||||
|
||||
|
@ -142,6 +144,7 @@ struct SharedWorkerState {
|
|||
storage_key_resolver: StorageKeyResolver,
|
||||
options: CliMainWorkerOptions,
|
||||
subcommand: DenoSubcommand,
|
||||
otel_config: Option<OtelConfig>, // `None` means OpenTelemetry is disabled.
|
||||
}
|
||||
|
||||
impl SharedWorkerState {
|
||||
|
@ -405,6 +408,7 @@ impl CliMainWorkerFactory {
|
|||
storage_key_resolver: StorageKeyResolver,
|
||||
subcommand: DenoSubcommand,
|
||||
options: CliMainWorkerOptions,
|
||||
otel_config: Option<OtelConfig>,
|
||||
) -> Self {
|
||||
Self {
|
||||
shared: Arc::new(SharedWorkerState {
|
||||
|
@ -427,6 +431,7 @@ impl CliMainWorkerFactory {
|
|||
storage_key_resolver,
|
||||
options,
|
||||
subcommand,
|
||||
otel_config,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -576,6 +581,7 @@ impl CliMainWorkerFactory {
|
|||
mode,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
otel_config: shared.otel_config.clone(),
|
||||
},
|
||||
extensions: custom_extensions,
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
|
@ -675,6 +681,7 @@ impl CliMainWorkerFactory {
|
|||
package_folder,
|
||||
sub_path,
|
||||
/* referrer */ None,
|
||||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Execution,
|
||||
)?;
|
||||
if specifier
|
||||
|
@ -775,6 +782,7 @@ fn create_web_worker_callback(
|
|||
mode: WorkerExecutionMode::Worker,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
otel_config: shared.otel_config.clone(),
|
||||
},
|
||||
extensions: vec![],
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.170.0"
|
||||
version = "0.171.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.108.0"
|
||||
version = "0.109.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
6
ext/cache/lib.rs
vendored
6
ext/cache/lib.rs
vendored
|
@ -33,7 +33,9 @@ pub enum CacheError {
|
|||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
|
||||
pub struct CreateCache<C: Cache + 'static>(
|
||||
pub Arc<dyn Fn() -> Result<C, CacheError>>,
|
||||
);
|
||||
|
||||
deno_core::extension!(deno_cache,
|
||||
deps = [ deno_webidl, deno_web, deno_url, deno_fetch ],
|
||||
|
@ -231,7 +233,7 @@ where
|
|||
if let Some(cache) = state.try_borrow::<CA>() {
|
||||
Ok(cache.clone())
|
||||
} else if let Some(create_cache) = state.try_borrow::<CreateCache<CA>>() {
|
||||
let cache = create_cache.0();
|
||||
let cache = create_cache.0()?;
|
||||
state.put(cache);
|
||||
Ok(state.borrow::<CA>().clone())
|
||||
} else {
|
||||
|
|
23
ext/cache/sqlite.rs
vendored
23
ext/cache/sqlite.rs
vendored
|
@ -42,7 +42,7 @@ pub struct SqliteBackedCache {
|
|||
}
|
||||
|
||||
impl SqliteBackedCache {
|
||||
pub fn new(cache_storage_dir: PathBuf) -> Self {
|
||||
pub fn new(cache_storage_dir: PathBuf) -> Result<Self, CacheError> {
|
||||
{
|
||||
std::fs::create_dir_all(&cache_storage_dir)
|
||||
.expect("failed to create cache dir");
|
||||
|
@ -57,18 +57,14 @@ impl SqliteBackedCache {
|
|||
PRAGMA synchronous=NORMAL;
|
||||
PRAGMA optimize;
|
||||
";
|
||||
connection
|
||||
.execute_batch(initial_pragmas)
|
||||
.expect("failed to execute pragmas");
|
||||
connection
|
||||
.execute(
|
||||
"CREATE TABLE IF NOT EXISTS cache_storage (
|
||||
connection.execute_batch(initial_pragmas)?;
|
||||
connection.execute(
|
||||
"CREATE TABLE IF NOT EXISTS cache_storage (
|
||||
id INTEGER PRIMARY KEY,
|
||||
cache_name TEXT NOT NULL UNIQUE
|
||||
)",
|
||||
(),
|
||||
)
|
||||
.expect("failed to create cache_storage table");
|
||||
(),
|
||||
)?;
|
||||
connection
|
||||
.execute(
|
||||
"CREATE TABLE IF NOT EXISTS request_response_list (
|
||||
|
@ -86,12 +82,11 @@ impl SqliteBackedCache {
|
|||
UNIQUE (cache_id, request_url)
|
||||
)",
|
||||
(),
|
||||
)
|
||||
.expect("failed to create request_response_list table");
|
||||
SqliteBackedCache {
|
||||
)?;
|
||||
Ok(SqliteBackedCache {
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
cache_storage_dir,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.45.0"
|
||||
version = "0.46.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.176.0"
|
||||
version = "0.177.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.56.0"
|
||||
version = "0.57.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.190.0"
|
||||
version = "0.191.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -269,12 +269,6 @@ class Request {
|
|||
/** @type {AbortSignal} */
|
||||
get [_signal]() {
|
||||
const signal = this[_signalCache];
|
||||
// This signal not been created yet, and the request is still in progress
|
||||
if (signal === undefined) {
|
||||
const signal = newSignal();
|
||||
this[_signalCache] = signal;
|
||||
return signal;
|
||||
}
|
||||
// This signal has not been created yet, but the request has already completed
|
||||
if (signal === false) {
|
||||
const signal = newSignal();
|
||||
|
@ -282,6 +276,18 @@ class Request {
|
|||
signal[signalAbort](signalAbortError);
|
||||
return signal;
|
||||
}
|
||||
|
||||
// This signal not been created yet, and the request is still in progress
|
||||
if (signal === undefined) {
|
||||
const signal = newSignal();
|
||||
this[_signalCache] = signal;
|
||||
this[_request].onCancel?.(() => {
|
||||
signal[signalAbort](signalAbortError);
|
||||
});
|
||||
|
||||
return signal;
|
||||
}
|
||||
|
||||
return signal;
|
||||
}
|
||||
get [_mimeType]() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.200.0"
|
||||
version = "0.201.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.163.0"
|
||||
version = "0.164.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -346,9 +346,10 @@ const { 0: statStruct, 1: statBuf } = createByteStruct({
|
|||
mtime: "date",
|
||||
atime: "date",
|
||||
birthtime: "date",
|
||||
ctime: "date",
|
||||
dev: "u64",
|
||||
ino: "?u64",
|
||||
mode: "?u64",
|
||||
mode: "u64",
|
||||
nlink: "?u64",
|
||||
uid: "?u64",
|
||||
gid: "?u64",
|
||||
|
@ -377,9 +378,10 @@ function parseFileInfo(response) {
|
|||
birthtime: response.birthtimeSet === true
|
||||
? new Date(response.birthtime)
|
||||
: null,
|
||||
ctime: response.ctimeSet === true ? new Date(response.ctime) : null,
|
||||
dev: response.dev,
|
||||
mode: response.mode,
|
||||
ino: unix ? response.ino : null,
|
||||
mode: unix ? response.mode : null,
|
||||
nlink: unix ? response.nlink : null,
|
||||
uid: unix ? response.uid : null,
|
||||
gid: unix ? response.gid : null,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.86.0"
|
||||
version = "0.87.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -229,6 +229,7 @@ impl FileSystem for InMemoryFs {
|
|||
mtime: None,
|
||||
atime: None,
|
||||
birthtime: None,
|
||||
ctime: None,
|
||||
dev: 0,
|
||||
ino: 0,
|
||||
mode: 0,
|
||||
|
@ -251,6 +252,7 @@ impl FileSystem for InMemoryFs {
|
|||
mtime: None,
|
||||
atime: None,
|
||||
birthtime: None,
|
||||
ctime: None,
|
||||
dev: 0,
|
||||
ino: 0,
|
||||
mode: 0,
|
||||
|
|
|
@ -1795,6 +1795,8 @@ create_struct_writer! {
|
|||
atime: u64,
|
||||
birthtime_set: bool,
|
||||
birthtime: u64,
|
||||
ctime_set: bool,
|
||||
ctime: u64,
|
||||
// Following are only valid under Unix.
|
||||
dev: u64,
|
||||
ino: u64,
|
||||
|
@ -1826,6 +1828,8 @@ impl From<FsStat> for SerializableStat {
|
|||
atime: stat.atime.unwrap_or(0),
|
||||
birthtime_set: stat.birthtime.is_some(),
|
||||
birthtime: stat.birthtime.unwrap_or(0),
|
||||
ctime_set: stat.ctime.is_some(),
|
||||
ctime: stat.ctime.unwrap_or(0),
|
||||
|
||||
dev: stat.dev,
|
||||
ino: stat.ino,
|
||||
|
|
|
@ -821,24 +821,46 @@ fn stat_extra(
|
|||
Ok(info.dwVolumeSerialNumber as u64)
|
||||
}
|
||||
|
||||
const WINDOWS_TICK: i64 = 10_000; // 100-nanosecond intervals in a millisecond
|
||||
const SEC_TO_UNIX_EPOCH: i64 = 11_644_473_600; // Seconds between Windows epoch and Unix epoch
|
||||
|
||||
fn windows_time_to_unix_time_msec(windows_time: &i64) -> i64 {
|
||||
let milliseconds_since_windows_epoch = windows_time / WINDOWS_TICK;
|
||||
milliseconds_since_windows_epoch - SEC_TO_UNIX_EPOCH * 1000
|
||||
}
|
||||
|
||||
use windows_sys::Wdk::Storage::FileSystem::FILE_ALL_INFORMATION;
|
||||
use windows_sys::Win32::Foundation::NTSTATUS;
|
||||
|
||||
unsafe fn query_file_information(
|
||||
handle: winapi::shared::ntdef::HANDLE,
|
||||
) -> std::io::Result<FILE_ALL_INFORMATION> {
|
||||
) -> Result<FILE_ALL_INFORMATION, NTSTATUS> {
|
||||
use windows_sys::Wdk::Storage::FileSystem::NtQueryInformationFile;
|
||||
use windows_sys::Win32::Foundation::RtlNtStatusToDosError;
|
||||
use windows_sys::Win32::Foundation::ERROR_MORE_DATA;
|
||||
use windows_sys::Win32::System::IO::IO_STATUS_BLOCK;
|
||||
|
||||
let mut info = std::mem::MaybeUninit::<FILE_ALL_INFORMATION>::zeroed();
|
||||
let mut io_status_block =
|
||||
std::mem::MaybeUninit::<IO_STATUS_BLOCK>::zeroed();
|
||||
let status = NtQueryInformationFile(
|
||||
handle as _,
|
||||
std::ptr::null_mut(),
|
||||
io_status_block.as_mut_ptr(),
|
||||
info.as_mut_ptr() as *mut _,
|
||||
std::mem::size_of::<FILE_ALL_INFORMATION>() as _,
|
||||
18, /* FileAllInformation */
|
||||
);
|
||||
|
||||
if status < 0 {
|
||||
return Err(std::io::Error::last_os_error());
|
||||
let converted_status = RtlNtStatusToDosError(status);
|
||||
|
||||
// If error more data is returned, then it means that the buffer is too small to get full filename information
|
||||
// to have that we should retry. However, since we only use BasicInformation and StandardInformation, it is fine to ignore it
|
||||
// since struct is populated with other data anyway.
|
||||
// https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/nf-ntifs-ntqueryinformationfile#remarksdd
|
||||
if converted_status != ERROR_MORE_DATA {
|
||||
return Err(converted_status as NTSTATUS);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(info.assume_init())
|
||||
|
@ -862,10 +884,13 @@ fn stat_extra(
|
|||
}
|
||||
|
||||
let result = get_dev(file_handle);
|
||||
CloseHandle(file_handle);
|
||||
fsstat.dev = result?;
|
||||
|
||||
if let Ok(file_info) = query_file_information(file_handle) {
|
||||
fsstat.ctime = Some(windows_time_to_unix_time_msec(
|
||||
&file_info.BasicInformation.ChangeTime,
|
||||
) as u64);
|
||||
|
||||
if file_info.BasicInformation.FileAttributes
|
||||
& winapi::um::winnt::FILE_ATTRIBUTE_REPARSE_POINT
|
||||
!= 0
|
||||
|
@ -898,6 +923,7 @@ fn stat_extra(
|
|||
}
|
||||
}
|
||||
|
||||
CloseHandle(file_handle);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
op_http_get_request_headers,
|
||||
op_http_get_request_method_and_url,
|
||||
op_http_read_request_body,
|
||||
op_http_request_on_cancel,
|
||||
op_http_serve,
|
||||
op_http_serve_on,
|
||||
op_http_set_promise_complete,
|
||||
|
@ -41,6 +42,10 @@ const {
|
|||
Uint8Array,
|
||||
Promise,
|
||||
} = primordials;
|
||||
const {
|
||||
getAsyncContext,
|
||||
setAsyncContext,
|
||||
} = core;
|
||||
|
||||
import { InnerBody } from "ext:deno_fetch/22_body.js";
|
||||
import { Event } from "ext:deno_web/02_event.js";
|
||||
|
@ -373,6 +378,18 @@ class InnerRequest {
|
|||
get external() {
|
||||
return this.#external;
|
||||
}
|
||||
|
||||
onCancel(callback) {
|
||||
if (this.#external === null) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
PromisePrototypeThen(
|
||||
op_http_request_on_cancel(this.#external),
|
||||
callback,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class CallbackContext {
|
||||
|
@ -384,8 +401,10 @@ class CallbackContext {
|
|||
/** @type {Promise<void> | undefined} */
|
||||
closing;
|
||||
listener;
|
||||
asyncContext;
|
||||
|
||||
constructor(signal, args, listener) {
|
||||
this.asyncContext = getAsyncContext();
|
||||
// The abort signal triggers a non-graceful shutdown
|
||||
signal?.addEventListener(
|
||||
"abort",
|
||||
|
@ -495,82 +514,89 @@ function fastSyncResponseOrStream(
|
|||
*/
|
||||
function mapToCallback(context, callback, onError) {
|
||||
return async function (req) {
|
||||
// Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback
|
||||
// 500 error.
|
||||
let innerRequest;
|
||||
let response;
|
||||
const asyncContext = getAsyncContext();
|
||||
setAsyncContext(context.asyncContext);
|
||||
|
||||
try {
|
||||
innerRequest = new InnerRequest(req, context);
|
||||
const request = fromInnerRequest(innerRequest, "immutable");
|
||||
innerRequest.request = request;
|
||||
response = await callback(
|
||||
request,
|
||||
new ServeHandlerInfo(innerRequest),
|
||||
);
|
||||
|
||||
// Throwing Error if the handler return value is not a Response class
|
||||
if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) {
|
||||
throw new TypeError(
|
||||
"Return value from serve handler must be a response or a promise resolving to a response",
|
||||
);
|
||||
}
|
||||
|
||||
if (response.type === "error") {
|
||||
throw new TypeError(
|
||||
"Return value from serve handler must not be an error response (like Response.error())",
|
||||
);
|
||||
}
|
||||
|
||||
if (response.bodyUsed) {
|
||||
throw new TypeError(
|
||||
"The body of the Response returned from the serve handler has already been consumed",
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback
|
||||
// 500 error.
|
||||
let innerRequest;
|
||||
let response;
|
||||
try {
|
||||
response = await onError(error);
|
||||
innerRequest = new InnerRequest(req, context);
|
||||
const request = fromInnerRequest(innerRequest, "immutable");
|
||||
innerRequest.request = request;
|
||||
response = await callback(
|
||||
request,
|
||||
new ServeHandlerInfo(innerRequest),
|
||||
);
|
||||
|
||||
// Throwing Error if the handler return value is not a Response class
|
||||
if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) {
|
||||
throw new TypeError(
|
||||
"Return value from onError handler must be a response or a promise resolving to a response",
|
||||
"Return value from serve handler must be a response or a promise resolving to a response",
|
||||
);
|
||||
}
|
||||
|
||||
if (response.type === "error") {
|
||||
throw new TypeError(
|
||||
"Return value from serve handler must not be an error response (like Response.error())",
|
||||
);
|
||||
}
|
||||
|
||||
if (response.bodyUsed) {
|
||||
throw new TypeError(
|
||||
"The body of the Response returned from the serve handler has already been consumed",
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// deno-lint-ignore no-console
|
||||
console.error("Exception in onError while handling exception", error);
|
||||
response = internalServerError();
|
||||
try {
|
||||
response = await onError(error);
|
||||
if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) {
|
||||
throw new TypeError(
|
||||
"Return value from onError handler must be a response or a promise resolving to a response",
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// deno-lint-ignore no-console
|
||||
console.error("Exception in onError while handling exception", error);
|
||||
response = internalServerError();
|
||||
}
|
||||
}
|
||||
}
|
||||
const inner = toInnerResponse(response);
|
||||
if (innerRequest?.[_upgraded]) {
|
||||
// We're done here as the connection has been upgraded during the callback and no longer requires servicing.
|
||||
if (response !== UPGRADE_RESPONSE_SENTINEL) {
|
||||
// deno-lint-ignore no-console
|
||||
console.error("Upgrade response was not returned from callback");
|
||||
context.close();
|
||||
const inner = toInnerResponse(response);
|
||||
if (innerRequest?.[_upgraded]) {
|
||||
// We're done here as the connection has been upgraded during the callback and no longer requires servicing.
|
||||
if (response !== UPGRADE_RESPONSE_SENTINEL) {
|
||||
// deno-lint-ignore no-console
|
||||
console.error("Upgrade response was not returned from callback");
|
||||
context.close();
|
||||
}
|
||||
innerRequest?.[_upgraded]();
|
||||
return;
|
||||
}
|
||||
innerRequest?.[_upgraded]();
|
||||
return;
|
||||
}
|
||||
|
||||
// Did everything shut down while we were waiting?
|
||||
if (context.closed) {
|
||||
// We're shutting down, so this status shouldn't make it back to the client but "Service Unavailable" seems appropriate
|
||||
innerRequest?.close();
|
||||
op_http_set_promise_complete(req, 503);
|
||||
return;
|
||||
}
|
||||
|
||||
const status = inner.status;
|
||||
const headers = inner.headerList;
|
||||
if (headers && headers.length > 0) {
|
||||
if (headers.length == 1) {
|
||||
op_http_set_response_header(req, headers[0][0], headers[0][1]);
|
||||
} else {
|
||||
op_http_set_response_headers(req, headers);
|
||||
// Did everything shut down while we were waiting?
|
||||
if (context.closed) {
|
||||
// We're shutting down, so this status shouldn't make it back to the client but "Service Unavailable" seems appropriate
|
||||
innerRequest?.close();
|
||||
op_http_set_promise_complete(req, 503);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
fastSyncResponseOrStream(req, inner.body, status, innerRequest);
|
||||
const status = inner.status;
|
||||
const headers = inner.headerList;
|
||||
if (headers && headers.length > 0) {
|
||||
if (headers.length == 1) {
|
||||
op_http_set_response_header(req, headers[0][0], headers[0][1]);
|
||||
} else {
|
||||
op_http_set_response_headers(req, headers);
|
||||
}
|
||||
}
|
||||
|
||||
fastSyncResponseOrStream(req, inner.body, status, innerRequest);
|
||||
} finally {
|
||||
setAsyncContext(asyncContext);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.174.0"
|
||||
version = "0.175.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -564,6 +564,7 @@ fn is_request_compressible(
|
|||
match accept_encoding.to_str() {
|
||||
// Firefox and Chrome send this -- no need to parse
|
||||
Ok("gzip, deflate, br") => return Compression::Brotli,
|
||||
Ok("gzip, deflate, br, zstd") => return Compression::Brotli,
|
||||
Ok("gzip") => return Compression::GZip,
|
||||
Ok("br") => return Compression::Brotli,
|
||||
_ => (),
|
||||
|
@ -700,6 +701,27 @@ fn set_response(
|
|||
http.complete();
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_http_get_request_cancelled(external: *const c_void) -> bool {
|
||||
let http =
|
||||
// SAFETY: op is called with external.
|
||||
unsafe { clone_external!(external, "op_http_get_request_cancelled") };
|
||||
http.cancelled()
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_http_request_on_cancel(external: *const c_void) {
|
||||
let http =
|
||||
// SAFETY: op is called with external.
|
||||
unsafe { clone_external!(external, "op_http_request_on_cancel") };
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
|
||||
http.on_cancel(tx);
|
||||
drop(http);
|
||||
|
||||
rx.await.ok();
|
||||
}
|
||||
|
||||
/// Returned promise resolves when body streaming finishes.
|
||||
/// Call [`op_http_close_after_finish`] when done with the external.
|
||||
#[op2(async)]
|
||||
|
|
|
@ -112,7 +112,9 @@ deno_core::extension!(
|
|||
http_next::op_http_close_after_finish,
|
||||
http_next::op_http_get_request_header,
|
||||
http_next::op_http_get_request_headers,
|
||||
http_next::op_http_request_on_cancel,
|
||||
http_next::op_http_get_request_method_and_url<HTTP>,
|
||||
http_next::op_http_get_request_cancelled,
|
||||
http_next::op_http_read_request_body,
|
||||
http_next::op_http_serve_on<HTTP>,
|
||||
http_next::op_http_serve<HTTP>,
|
||||
|
|
|
@ -27,6 +27,7 @@ use std::rc::Rc;
|
|||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
use std::task::Waker;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
pub type Request = hyper::Request<Incoming>;
|
||||
pub type Response = hyper::Response<HttpRecordResponse>;
|
||||
|
@ -211,6 +212,7 @@ pub struct UpgradeUnavailableError;
|
|||
|
||||
struct HttpRecordInner {
|
||||
server_state: SignallingRc<HttpServerState>,
|
||||
closed_channel: Option<oneshot::Sender<()>>,
|
||||
request_info: HttpConnectionProperties,
|
||||
request_parts: http::request::Parts,
|
||||
request_body: Option<RequestBodyState>,
|
||||
|
@ -276,6 +278,7 @@ impl HttpRecord {
|
|||
response_body_finished: false,
|
||||
response_body_waker: None,
|
||||
trailers: None,
|
||||
closed_channel: None,
|
||||
been_dropped: false,
|
||||
finished: false,
|
||||
needs_close_after_finish: false,
|
||||
|
@ -312,6 +315,10 @@ impl HttpRecord {
|
|||
RefMut::map(self.self_mut(), |inner| &mut inner.needs_close_after_finish)
|
||||
}
|
||||
|
||||
pub fn on_cancel(&self, sender: oneshot::Sender<()>) {
|
||||
self.self_mut().closed_channel = Some(sender);
|
||||
}
|
||||
|
||||
fn recycle(self: Rc<Self>) {
|
||||
assert!(
|
||||
Rc::strong_count(&self) == 1,
|
||||
|
@ -390,6 +397,9 @@ impl HttpRecord {
|
|||
inner.been_dropped = true;
|
||||
// The request body might include actual resources.
|
||||
inner.request_body.take();
|
||||
if let Some(closed_channel) = inner.closed_channel.take() {
|
||||
let _ = closed_channel.send(());
|
||||
}
|
||||
}
|
||||
|
||||
/// Complete this record, potentially expunging it if it is fully complete (ie: cancelled as well).
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.86.0"
|
||||
version = "0.87.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
12
ext/io/fs.rs
12
ext/io/fs.rs
|
@ -94,6 +94,7 @@ pub struct FsStat {
|
|||
pub mtime: Option<u64>,
|
||||
pub atime: Option<u64>,
|
||||
pub birthtime: Option<u64>,
|
||||
pub ctime: Option<u64>,
|
||||
|
||||
pub dev: u64,
|
||||
pub ino: u64,
|
||||
|
@ -153,6 +154,16 @@ impl FsStat {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn get_ctime(ctime_or_0: i64) -> Option<u64> {
|
||||
if ctime_or_0 > 0 {
|
||||
// ctime return seconds since epoch, but we need milliseconds
|
||||
return Some(ctime_or_0 as u64 * 1000);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
Self {
|
||||
is_file: metadata.is_file(),
|
||||
is_directory: metadata.is_dir(),
|
||||
|
@ -162,6 +173,7 @@ impl FsStat {
|
|||
mtime: to_msec(metadata.modified()),
|
||||
atime: to_msec(metadata.accessed()),
|
||||
birthtime: to_msec(metadata.created()),
|
||||
ctime: get_ctime(unix_or_zero!(ctime)),
|
||||
|
||||
dev: unix_or_zero!(dev),
|
||||
ino: unix_or_zero!(ino),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_kv"
|
||||
version = "0.84.0"
|
||||
version = "0.85.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct KvConfig {
|
||||
pub(crate) max_write_key_size_bytes: usize,
|
||||
pub(crate) max_read_key_size_bytes: usize,
|
||||
pub(crate) max_value_size_bytes: usize,
|
||||
pub(crate) max_read_ranges: usize,
|
||||
pub(crate) max_read_entries: usize,
|
||||
pub(crate) max_checks: usize,
|
||||
pub(crate) max_mutations: usize,
|
||||
pub(crate) max_watched_keys: usize,
|
||||
pub(crate) max_total_mutation_size_bytes: usize,
|
||||
pub(crate) max_total_key_size_bytes: usize,
|
||||
pub max_write_key_size_bytes: usize,
|
||||
pub max_read_key_size_bytes: usize,
|
||||
pub max_value_size_bytes: usize,
|
||||
pub max_read_ranges: usize,
|
||||
pub max_read_entries: usize,
|
||||
pub max_checks: usize,
|
||||
pub max_mutations: usize,
|
||||
pub max_watched_keys: usize,
|
||||
pub max_total_mutation_size_bytes: usize,
|
||||
pub max_total_key_size_bytes: usize,
|
||||
}
|
||||
|
||||
impl KvConfig {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_napi"
|
||||
version = "0.107.0"
|
||||
version = "0.108.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.106.0"
|
||||
version = "0.107.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_net"
|
||||
version = "0.168.0"
|
||||
version = "0.169.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -17,11 +17,11 @@ path = "lib.rs"
|
|||
deno_core.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
deno_tls.workspace = true
|
||||
hickory-proto = "0.24"
|
||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
|
||||
pin-project.workspace = true
|
||||
rustls-tokio-stream.workspace = true
|
||||
serde.workspace = true
|
||||
socket2.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
trust-dns-proto = "0.23"
|
||||
trust-dns-resolver = { version = "0.23", features = ["tokio-runtime", "serde-config"] }
|
||||
|
|
|
@ -18,6 +18,16 @@ use deno_core::OpState;
|
|||
use deno_core::RcRef;
|
||||
use deno_core::Resource;
|
||||
use deno_core::ResourceId;
|
||||
use hickory_proto::rr::rdata::caa::Value;
|
||||
use hickory_proto::rr::record_data::RData;
|
||||
use hickory_proto::rr::record_type::RecordType;
|
||||
use hickory_resolver::config::NameServerConfigGroup;
|
||||
use hickory_resolver::config::ResolverConfig;
|
||||
use hickory_resolver::config::ResolverOpts;
|
||||
use hickory_resolver::error::ResolveError;
|
||||
use hickory_resolver::error::ResolveErrorKind;
|
||||
use hickory_resolver::system_conf;
|
||||
use hickory_resolver::AsyncResolver;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use socket2::Domain;
|
||||
|
@ -33,16 +43,6 @@ use std::rc::Rc;
|
|||
use std::str::FromStr;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::net::UdpSocket;
|
||||
use trust_dns_proto::rr::rdata::caa::Value;
|
||||
use trust_dns_proto::rr::record_data::RData;
|
||||
use trust_dns_proto::rr::record_type::RecordType;
|
||||
use trust_dns_resolver::config::NameServerConfigGroup;
|
||||
use trust_dns_resolver::config::ResolverConfig;
|
||||
use trust_dns_resolver::config::ResolverOpts;
|
||||
use trust_dns_resolver::error::ResolveError;
|
||||
use trust_dns_resolver::error::ResolveErrorKind;
|
||||
use trust_dns_resolver::system_conf;
|
||||
use trust_dns_resolver::AsyncResolver;
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -828,6 +828,21 @@ mod tests {
|
|||
use deno_core::JsRuntime;
|
||||
use deno_core::RuntimeOptions;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use hickory_proto::rr::rdata::a::A;
|
||||
use hickory_proto::rr::rdata::aaaa::AAAA;
|
||||
use hickory_proto::rr::rdata::caa::KeyValue;
|
||||
use hickory_proto::rr::rdata::caa::CAA;
|
||||
use hickory_proto::rr::rdata::mx::MX;
|
||||
use hickory_proto::rr::rdata::name::ANAME;
|
||||
use hickory_proto::rr::rdata::name::CNAME;
|
||||
use hickory_proto::rr::rdata::name::NS;
|
||||
use hickory_proto::rr::rdata::name::PTR;
|
||||
use hickory_proto::rr::rdata::naptr::NAPTR;
|
||||
use hickory_proto::rr::rdata::srv::SRV;
|
||||
use hickory_proto::rr::rdata::txt::TXT;
|
||||
use hickory_proto::rr::rdata::SOA;
|
||||
use hickory_proto::rr::record_data::RData;
|
||||
use hickory_proto::rr::Name;
|
||||
use socket2::SockRef;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::Ipv6Addr;
|
||||
|
@ -836,21 +851,6 @@ mod tests {
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use trust_dns_proto::rr::rdata::a::A;
|
||||
use trust_dns_proto::rr::rdata::aaaa::AAAA;
|
||||
use trust_dns_proto::rr::rdata::caa::KeyValue;
|
||||
use trust_dns_proto::rr::rdata::caa::CAA;
|
||||
use trust_dns_proto::rr::rdata::mx::MX;
|
||||
use trust_dns_proto::rr::rdata::name::ANAME;
|
||||
use trust_dns_proto::rr::rdata::name::CNAME;
|
||||
use trust_dns_proto::rr::rdata::name::NS;
|
||||
use trust_dns_proto::rr::rdata::name::PTR;
|
||||
use trust_dns_proto::rr::rdata::naptr::NAPTR;
|
||||
use trust_dns_proto::rr::rdata::srv::SRV;
|
||||
use trust_dns_proto::rr::rdata::txt::TXT;
|
||||
use trust_dns_proto::rr::rdata::SOA;
|
||||
use trust_dns_proto::rr::record_data::RData;
|
||||
use trust_dns_proto::rr::Name;
|
||||
|
||||
#[test]
|
||||
fn rdata_to_return_record_a() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_node"
|
||||
version = "0.113.0"
|
||||
version = "0.114.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -14,6 +14,7 @@ use deno_core::url::Url;
|
|||
#[allow(unused_imports)]
|
||||
use deno_core::v8;
|
||||
use deno_core::v8::ExternalReference;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::NpmResolverRc;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
|
@ -157,6 +158,10 @@ pub trait NodeRequireLoader {
|
|||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
|
||||
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError>;
|
||||
|
||||
/// Get if the module kind is maybe CJS and loading should determine
|
||||
/// if its CJS or ESM.
|
||||
fn is_maybe_cjs(&self, specifier: &Url) -> Result<bool, ClosestPkgJsonError>;
|
||||
}
|
||||
|
||||
pub static NODE_ENV_VAR_ALLOWLIST: Lazy<HashSet<String>> = Lazy::new(|| {
|
||||
|
@ -345,6 +350,7 @@ deno_core::extension!(deno_node,
|
|||
ops::zlib::op_zlib_write,
|
||||
ops::zlib::op_zlib_init,
|
||||
ops::zlib::op_zlib_reset,
|
||||
ops::zlib::op_zlib_crc32,
|
||||
ops::zlib::brotli::op_brotli_compress,
|
||||
ops::zlib::brotli::op_brotli_compress_async,
|
||||
ops::zlib::brotli::op_create_brotli_compress,
|
||||
|
@ -384,6 +390,7 @@ deno_core::extension!(deno_node,
|
|||
ops::require::op_require_proxy_path,
|
||||
ops::require::op_require_is_deno_dir_package,
|
||||
ops::require::op_require_resolve_deno_dir,
|
||||
ops::require::op_require_is_maybe_cjs,
|
||||
ops::require::op_require_is_request_relative,
|
||||
ops::require::op_require_resolve_lookup_paths,
|
||||
ops::require::op_require_try_self_parent_path<P>,
|
||||
|
@ -397,7 +404,6 @@ deno_core::extension!(deno_node,
|
|||
ops::require::op_require_read_file<P>,
|
||||
ops::require::op_require_as_file_path,
|
||||
ops::require::op_require_resolve_exports<P>,
|
||||
ops::require::op_require_read_closest_package_json<P>,
|
||||
ops::require::op_require_read_package_scope<P>,
|
||||
ops::require::op_require_package_imports_resolve<P>,
|
||||
ops::require::op_require_break_on_next_statement,
|
||||
|
|
|
@ -4,9 +4,6 @@ use aes::cipher::block_padding::Pkcs7;
|
|||
use aes::cipher::BlockDecryptMut;
|
||||
use aes::cipher::BlockEncryptMut;
|
||||
use aes::cipher::KeyIvInit;
|
||||
use deno_core::error::range_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::Resource;
|
||||
use digest::generic_array::GenericArray;
|
||||
use digest::KeyInit;
|
||||
|
@ -50,8 +47,22 @@ pub struct DecipherContext {
|
|||
decipher: Rc<RefCell<Decipher>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CipherContextError {
|
||||
#[error("Cipher context is already in use")]
|
||||
ContextInUse,
|
||||
#[error("{0}")]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Cipher(#[from] CipherError),
|
||||
}
|
||||
|
||||
impl CipherContext {
|
||||
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
|
||||
pub fn new(
|
||||
algorithm: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, CipherContextError> {
|
||||
Ok(Self {
|
||||
cipher: Rc::new(RefCell::new(Cipher::new(algorithm, key, iv)?)),
|
||||
})
|
||||
|
@ -74,16 +85,31 @@ impl CipherContext {
|
|||
auto_pad: bool,
|
||||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
) -> Result<Tag, AnyError> {
|
||||
) -> Result<Tag, CipherContextError> {
|
||||
Rc::try_unwrap(self.cipher)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?
|
||||
.map_err(|_| CipherContextError::ContextInUse)?
|
||||
.into_inner()
|
||||
.r#final(auto_pad, input, output)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DecipherContextError {
|
||||
#[error("Decipher context is already in use")]
|
||||
ContextInUse,
|
||||
#[error("{0}")]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Decipher(#[from] DecipherError),
|
||||
}
|
||||
|
||||
impl DecipherContext {
|
||||
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
|
||||
pub fn new(
|
||||
algorithm: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, DecipherContextError> {
|
||||
Ok(Self {
|
||||
decipher: Rc::new(RefCell::new(Decipher::new(algorithm, key, iv)?)),
|
||||
})
|
||||
|
@ -103,11 +129,12 @@ impl DecipherContext {
|
|||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), DecipherContextError> {
|
||||
Rc::try_unwrap(self.decipher)
|
||||
.map_err(|_| type_error("Decipher context is already in use"))?
|
||||
.map_err(|_| DecipherContextError::ContextInUse)?
|
||||
.into_inner()
|
||||
.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -123,12 +150,26 @@ impl Resource for DecipherContext {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CipherError {
|
||||
#[error("IV length must be 12 bytes")]
|
||||
InvalidIvLength,
|
||||
#[error("Invalid key length")]
|
||||
InvalidKeyLength,
|
||||
#[error("Invalid initialization vector")]
|
||||
InvalidInitializationVector,
|
||||
#[error("Cannot pad the input data")]
|
||||
CannotPadInputData,
|
||||
#[error("Unknown cipher {0}")]
|
||||
UnknownCipher(String),
|
||||
}
|
||||
|
||||
impl Cipher {
|
||||
fn new(
|
||||
algorithm_name: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, CipherError> {
|
||||
use Cipher::*;
|
||||
Ok(match algorithm_name {
|
||||
"aes-128-cbc" => {
|
||||
|
@ -139,7 +180,7 @@ impl Cipher {
|
|||
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))),
|
||||
"aes-128-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(CipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let cipher =
|
||||
|
@ -149,7 +190,7 @@ impl Cipher {
|
|||
}
|
||||
"aes-256-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(CipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let cipher =
|
||||
|
@ -159,15 +200,15 @@ impl Cipher {
|
|||
}
|
||||
"aes256" | "aes-256-cbc" => {
|
||||
if key.len() != 32 {
|
||||
return Err(range_error("Invalid key length"));
|
||||
return Err(CipherError::InvalidKeyLength);
|
||||
}
|
||||
if iv.len() != 16 {
|
||||
return Err(type_error("Invalid initialization vector"));
|
||||
return Err(CipherError::InvalidInitializationVector);
|
||||
}
|
||||
|
||||
Aes256Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into())))
|
||||
}
|
||||
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
|
||||
_ => return Err(CipherError::UnknownCipher(algorithm_name.to_string())),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -235,14 +276,14 @@ impl Cipher {
|
|||
auto_pad: bool,
|
||||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
) -> Result<Tag, AnyError> {
|
||||
) -> Result<Tag, CipherError> {
|
||||
assert!(input.len() < 16);
|
||||
use Cipher::*;
|
||||
match (self, auto_pad) {
|
||||
(Aes128Cbc(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes128Cbc(mut encryptor), false) => {
|
||||
|
@ -255,7 +296,7 @@ impl Cipher {
|
|||
(Aes128Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes128Ecb(mut encryptor), false) => {
|
||||
|
@ -268,7 +309,7 @@ impl Cipher {
|
|||
(Aes192Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes192Ecb(mut encryptor), false) => {
|
||||
|
@ -281,7 +322,7 @@ impl Cipher {
|
|||
(Aes256Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes256Ecb(mut encryptor), false) => {
|
||||
|
@ -296,7 +337,7 @@ impl Cipher {
|
|||
(Aes256Cbc(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes256Cbc(mut encryptor), false) => {
|
||||
|
@ -319,12 +360,32 @@ impl Cipher {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DecipherError {
|
||||
#[error("IV length must be 12 bytes")]
|
||||
InvalidIvLength,
|
||||
#[error("Invalid key length")]
|
||||
InvalidKeyLength,
|
||||
#[error("Invalid initialization vector")]
|
||||
InvalidInitializationVector,
|
||||
#[error("Cannot unpad the input data")]
|
||||
CannotUnpadInputData,
|
||||
#[error("Failed to authenticate data")]
|
||||
DataAuthenticationFailed,
|
||||
#[error("setAutoPadding(false) not supported for Aes128Gcm yet")]
|
||||
SetAutoPaddingFalseAes128GcmUnsupported,
|
||||
#[error("setAutoPadding(false) not supported for Aes256Gcm yet")]
|
||||
SetAutoPaddingFalseAes256GcmUnsupported,
|
||||
#[error("Unknown cipher {0}")]
|
||||
UnknownCipher(String),
|
||||
}
|
||||
|
||||
impl Decipher {
|
||||
fn new(
|
||||
algorithm_name: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, DecipherError> {
|
||||
use Decipher::*;
|
||||
Ok(match algorithm_name {
|
||||
"aes-128-cbc" => {
|
||||
|
@ -335,7 +396,7 @@ impl Decipher {
|
|||
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))),
|
||||
"aes-128-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(DecipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let decipher =
|
||||
|
@ -345,7 +406,7 @@ impl Decipher {
|
|||
}
|
||||
"aes-256-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(DecipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let decipher =
|
||||
|
@ -355,15 +416,17 @@ impl Decipher {
|
|||
}
|
||||
"aes256" | "aes-256-cbc" => {
|
||||
if key.len() != 32 {
|
||||
return Err(range_error("Invalid key length"));
|
||||
return Err(DecipherError::InvalidKeyLength);
|
||||
}
|
||||
if iv.len() != 16 {
|
||||
return Err(type_error("Invalid initialization vector"));
|
||||
return Err(DecipherError::InvalidInitializationVector);
|
||||
}
|
||||
|
||||
Aes256Cbc(Box::new(cbc::Decryptor::new(key.into(), iv.into())))
|
||||
}
|
||||
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
|
||||
_ => {
|
||||
return Err(DecipherError::UnknownCipher(algorithm_name.to_string()))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -432,14 +495,14 @@ impl Decipher {
|
|||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), DecipherError> {
|
||||
use Decipher::*;
|
||||
match (self, auto_pad) {
|
||||
(Aes128Cbc(decryptor), true) => {
|
||||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes128Cbc(mut decryptor), false) => {
|
||||
|
@ -453,7 +516,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes128Ecb(mut decryptor), false) => {
|
||||
|
@ -467,7 +530,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes192Ecb(mut decryptor), false) => {
|
||||
|
@ -481,7 +544,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes256Ecb(mut decryptor), false) => {
|
||||
|
@ -496,28 +559,28 @@ impl Decipher {
|
|||
if tag.as_slice() == auth_tag {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(type_error("Failed to authenticate data"))
|
||||
Err(DecipherError::DataAuthenticationFailed)
|
||||
}
|
||||
}
|
||||
(Aes128Gcm(_), false) => Err(type_error(
|
||||
"setAutoPadding(false) not supported for Aes256Gcm yet",
|
||||
)),
|
||||
(Aes128Gcm(_), false) => {
|
||||
Err(DecipherError::SetAutoPaddingFalseAes128GcmUnsupported)
|
||||
}
|
||||
(Aes256Gcm(decipher), true) => {
|
||||
let tag = decipher.finish();
|
||||
if tag.as_slice() == auth_tag {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(type_error("Failed to authenticate data"))
|
||||
Err(DecipherError::DataAuthenticationFailed)
|
||||
}
|
||||
}
|
||||
(Aes256Gcm(_), false) => Err(type_error(
|
||||
"setAutoPadding(false) not supported for Aes256Gcm yet",
|
||||
)),
|
||||
(Aes256Gcm(_), false) => {
|
||||
Err(DecipherError::SetAutoPaddingFalseAes256GcmUnsupported)
|
||||
}
|
||||
(Aes256Cbc(decryptor), true) => {
|
||||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes256Cbc(mut decryptor), false) => {
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::GarbageCollected;
|
||||
use digest::Digest;
|
||||
use digest::DynDigest;
|
||||
|
@ -19,7 +17,7 @@ impl Hasher {
|
|||
pub fn new(
|
||||
algorithm: &str,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
let hash = Hash::new(algorithm, output_length)?;
|
||||
|
||||
Ok(Self {
|
||||
|
@ -44,7 +42,7 @@ impl Hasher {
|
|||
pub fn clone_inner(
|
||||
&self,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Option<Self>, AnyError> {
|
||||
) -> Result<Option<Self>, HashError> {
|
||||
let hash = self.hash.borrow();
|
||||
let Some(hash) = hash.as_ref() else {
|
||||
return Ok(None);
|
||||
|
@ -184,11 +182,19 @@ pub enum Hash {
|
|||
|
||||
use Hash::*;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum HashError {
|
||||
#[error("Output length mismatch for non-extendable algorithm")]
|
||||
OutputLengthMismatch,
|
||||
#[error("Digest method not supported: {0}")]
|
||||
DigestMethodUnsupported(String),
|
||||
}
|
||||
|
||||
impl Hash {
|
||||
pub fn new(
|
||||
algorithm_name: &str,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
match algorithm_name {
|
||||
"shake128" => return Ok(Shake128(Default::default(), output_length)),
|
||||
"shake256" => return Ok(Shake256(Default::default(), output_length)),
|
||||
|
@ -201,17 +207,13 @@ impl Hash {
|
|||
let digest: D = Digest::new();
|
||||
if let Some(length) = output_length {
|
||||
if length != digest.output_size() {
|
||||
return Err(generic_error(
|
||||
"Output length mismatch for non-extendable algorithm",
|
||||
));
|
||||
return Err(HashError::OutputLengthMismatch);
|
||||
}
|
||||
}
|
||||
FixedSize(Box::new(digest))
|
||||
},
|
||||
_ => {
|
||||
return Err(generic_error(format!(
|
||||
"Digest method not supported: {algorithm_name}"
|
||||
)))
|
||||
return Err(HashError::DigestMethodUnsupported(algorithm_name.to_string()))
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -243,14 +245,12 @@ impl Hash {
|
|||
pub fn clone_hash(
|
||||
&self,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
let hash = match self {
|
||||
FixedSize(context) => {
|
||||
if let Some(length) = output_length {
|
||||
if length != context.output_size() {
|
||||
return Err(generic_error(
|
||||
"Output length mismatch for non-extendable algorithm",
|
||||
));
|
||||
return Err(HashError::OutputLengthMismatch);
|
||||
}
|
||||
}
|
||||
FixedSize(context.box_clone())
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::JsBuffer;
|
||||
|
@ -34,14 +33,14 @@ use rsa::Pkcs1v15Encrypt;
|
|||
use rsa::RsaPrivateKey;
|
||||
use rsa::RsaPublicKey;
|
||||
|
||||
mod cipher;
|
||||
pub mod cipher;
|
||||
mod dh;
|
||||
mod digest;
|
||||
pub mod digest;
|
||||
pub mod keys;
|
||||
mod md5_sha1;
|
||||
mod pkcs3;
|
||||
mod primes;
|
||||
mod sign;
|
||||
pub mod sign;
|
||||
pub mod x509;
|
||||
|
||||
use self::digest::match_fixed_digest_with_eager_block_buffer;
|
||||
|
@ -58,38 +57,31 @@ pub fn op_node_check_prime(
|
|||
pub fn op_node_check_prime_bytes(
|
||||
#[anybuffer] bytes: &[u8],
|
||||
#[number] checks: usize,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> bool {
|
||||
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
|
||||
Ok(primes::is_probably_prime(&candidate, checks))
|
||||
primes::is_probably_prime(&candidate, checks)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_node_check_prime_async(
|
||||
#[bigint] num: i64,
|
||||
#[number] checks: usize,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, tokio::task::JoinError> {
|
||||
// TODO(@littledivy): use rayon for CPU-bound tasks
|
||||
Ok(
|
||||
spawn_blocking(move || {
|
||||
primes::is_probably_prime(&BigInt::from(num), checks)
|
||||
})
|
||||
.await?,
|
||||
)
|
||||
spawn_blocking(move || primes::is_probably_prime(&BigInt::from(num), checks))
|
||||
.await
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub fn op_node_check_prime_bytes_async(
|
||||
#[anybuffer] bytes: &[u8],
|
||||
#[number] checks: usize,
|
||||
) -> Result<impl Future<Output = Result<bool, AnyError>>, AnyError> {
|
||||
) -> impl Future<Output = Result<bool, tokio::task::JoinError>> {
|
||||
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
|
||||
// TODO(@littledivy): use rayon for CPU-bound tasks
|
||||
Ok(async move {
|
||||
Ok(
|
||||
spawn_blocking(move || primes::is_probably_prime(&candidate, checks))
|
||||
.await?,
|
||||
)
|
||||
})
|
||||
async move {
|
||||
spawn_blocking(move || primes::is_probably_prime(&candidate, checks)).await
|
||||
}
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -97,7 +89,7 @@ pub fn op_node_check_prime_bytes_async(
|
|||
pub fn op_node_create_hash(
|
||||
#[string] algorithm: &str,
|
||||
output_length: Option<u32>,
|
||||
) -> Result<digest::Hasher, AnyError> {
|
||||
) -> Result<digest::Hasher, digest::HashError> {
|
||||
digest::Hasher::new(algorithm, output_length.map(|l| l as usize))
|
||||
}
|
||||
|
||||
|
@ -145,17 +137,31 @@ pub fn op_node_hash_digest_hex(
|
|||
pub fn op_node_hash_clone(
|
||||
#[cppgc] hasher: &digest::Hasher,
|
||||
output_length: Option<u32>,
|
||||
) -> Result<Option<digest::Hasher>, AnyError> {
|
||||
) -> Result<Option<digest::Hasher>, digest::HashError> {
|
||||
hasher.clone_inner(output_length.map(|l| l as usize))
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PrivateEncryptDecryptError {
|
||||
#[error(transparent)]
|
||||
Pkcs8(#[from] pkcs8::Error),
|
||||
#[error(transparent)]
|
||||
Spki(#[from] spki::Error),
|
||||
#[error(transparent)]
|
||||
Utf8(#[from] std::str::Utf8Error),
|
||||
#[error(transparent)]
|
||||
Rsa(#[from] rsa::Error),
|
||||
#[error("Unknown padding")]
|
||||
UnknownPadding,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_node_private_encrypt(
|
||||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -172,7 +178,7 @@ pub fn op_node_private_encrypt(
|
|||
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
|
||||
.into(),
|
||||
),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,13 +188,13 @@ pub fn op_node_private_decrypt(
|
|||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
|
||||
|
||||
match padding {
|
||||
1 => Ok(key.decrypt(Pkcs1v15Encrypt, &msg)?.into()),
|
||||
4 => Ok(key.decrypt(Oaep::new::<sha1::Sha1>(), &msg)?.into()),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +204,7 @@ pub fn op_node_public_encrypt(
|
|||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPublicKey::from_public_key_pem((&key).try_into()?)?;
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -209,7 +215,7 @@ pub fn op_node_public_encrypt(
|
|||
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
|
||||
.into(),
|
||||
),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +226,7 @@ pub fn op_node_create_cipheriv(
|
|||
#[string] algorithm: &str,
|
||||
#[buffer] key: &[u8],
|
||||
#[buffer] iv: &[u8],
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, cipher::CipherContextError> {
|
||||
let context = cipher::CipherContext::new(algorithm, key, iv)?;
|
||||
Ok(state.resource_table.add(context))
|
||||
}
|
||||
|
@ -262,11 +268,14 @@ pub fn op_node_cipheriv_final(
|
|||
auto_pad: bool,
|
||||
#[buffer] input: &[u8],
|
||||
#[anybuffer] output: &mut [u8],
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
|
||||
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::CipherContext>(rid)
|
||||
.map_err(cipher::CipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
context.r#final(auto_pad, input, output)
|
||||
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
|
||||
context.r#final(auto_pad, input, output).map_err(Into::into)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -274,10 +283,13 @@ pub fn op_node_cipheriv_final(
|
|||
pub fn op_node_cipheriv_take(
|
||||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
|
||||
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::CipherContext>(rid)
|
||||
.map_err(cipher::CipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
|
||||
Ok(context.take_tag())
|
||||
}
|
||||
|
||||
|
@ -288,7 +300,7 @@ pub fn op_node_create_decipheriv(
|
|||
#[string] algorithm: &str,
|
||||
#[buffer] key: &[u8],
|
||||
#[buffer] iv: &[u8],
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, cipher::DecipherContextError> {
|
||||
let context = cipher::DecipherContext::new(algorithm, key, iv)?;
|
||||
Ok(state.resource_table.add(context))
|
||||
}
|
||||
|
@ -326,10 +338,13 @@ pub fn op_node_decipheriv_decrypt(
|
|||
pub fn op_node_decipheriv_take(
|
||||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
) -> Result<(), AnyError> {
|
||||
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
|
||||
) -> Result<(), cipher::DecipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::DecipherContext>(rid)
|
||||
.map_err(cipher::DecipherContextError::Resource)?;
|
||||
Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -341,11 +356,16 @@ pub fn op_node_decipheriv_final(
|
|||
#[buffer] input: &[u8],
|
||||
#[anybuffer] output: &mut [u8],
|
||||
#[buffer] auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
|
||||
) -> Result<(), cipher::DecipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::DecipherContext>(rid)
|
||||
.map_err(cipher::DecipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
context.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
|
||||
context
|
||||
.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -356,7 +376,7 @@ pub fn op_node_sign(
|
|||
#[string] digest_type: &str,
|
||||
#[smi] pss_salt_length: Option<u32>,
|
||||
#[smi] dsa_signature_encoding: u32,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
handle.sign_prehashed(
|
||||
digest_type,
|
||||
digest,
|
||||
|
@ -373,7 +393,7 @@ pub fn op_node_verify(
|
|||
#[buffer] signature: &[u8],
|
||||
#[smi] pss_salt_length: Option<u32>,
|
||||
#[smi] dsa_signature_encoding: u32,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
handle.verify_prehashed(
|
||||
digest_type,
|
||||
digest,
|
||||
|
@ -383,13 +403,21 @@ pub fn op_node_verify(
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Pbkdf2Error {
|
||||
#[error("unsupported digest: {0}")]
|
||||
UnsupportedDigest(String),
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
}
|
||||
|
||||
fn pbkdf2_sync(
|
||||
password: &[u8],
|
||||
salt: &[u8],
|
||||
iterations: u32,
|
||||
algorithm_name: &str,
|
||||
derived_key: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), Pbkdf2Error> {
|
||||
match_fixed_digest_with_eager_block_buffer!(
|
||||
algorithm_name,
|
||||
fn <D>() {
|
||||
|
@ -397,10 +425,7 @@ fn pbkdf2_sync(
|
|||
Ok(())
|
||||
},
|
||||
_ => {
|
||||
Err(type_error(format!(
|
||||
"unsupported digest: {}",
|
||||
algorithm_name
|
||||
)))
|
||||
Err(Pbkdf2Error::UnsupportedDigest(algorithm_name.to_string()))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -424,7 +449,7 @@ pub async fn op_node_pbkdf2_async(
|
|||
#[smi] iterations: u32,
|
||||
#[string] digest: String,
|
||||
#[number] keylen: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, Pbkdf2Error> {
|
||||
spawn_blocking(move || {
|
||||
let mut derived_key = vec![0; keylen];
|
||||
pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key)
|
||||
|
@ -450,15 +475,27 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum HkdfError {
|
||||
#[error("expected secret key")]
|
||||
ExpectedSecretKey,
|
||||
#[error("HKDF-Expand failed")]
|
||||
HkdfExpandFailed,
|
||||
#[error("Unsupported digest: {0}")]
|
||||
UnsupportedDigest(String),
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
}
|
||||
|
||||
fn hkdf_sync(
|
||||
digest_algorithm: &str,
|
||||
handle: &KeyObjectHandle,
|
||||
salt: &[u8],
|
||||
info: &[u8],
|
||||
okm: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), HkdfError> {
|
||||
let Some(ikm) = handle.as_secret_key() else {
|
||||
return Err(type_error("expected secret key"));
|
||||
return Err(HkdfError::ExpectedSecretKey);
|
||||
};
|
||||
|
||||
match_fixed_digest_with_eager_block_buffer!(
|
||||
|
@ -466,10 +503,10 @@ fn hkdf_sync(
|
|||
fn <D>() {
|
||||
let hk = Hkdf::<D>::new(Some(salt), ikm);
|
||||
hk.expand(info, okm)
|
||||
.map_err(|_| type_error("HKDF-Expand failed"))
|
||||
.map_err(|_| HkdfError::HkdfExpandFailed)
|
||||
},
|
||||
_ => {
|
||||
Err(type_error(format!("Unsupported digest: {}", digest_algorithm)))
|
||||
Err(HkdfError::UnsupportedDigest(digest_algorithm.to_string()))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -481,7 +518,7 @@ pub fn op_node_hkdf(
|
|||
#[buffer] salt: &[u8],
|
||||
#[buffer] info: &[u8],
|
||||
#[buffer] okm: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), HkdfError> {
|
||||
hkdf_sync(digest_algorithm, handle, salt, info, okm)
|
||||
}
|
||||
|
||||
|
@ -493,7 +530,7 @@ pub async fn op_node_hkdf_async(
|
|||
#[buffer] salt: JsBuffer,
|
||||
#[buffer] info: JsBuffer,
|
||||
#[number] okm_len: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, HkdfError> {
|
||||
let handle = handle.clone();
|
||||
spawn_blocking(move || {
|
||||
let mut okm = vec![0u8; okm_len];
|
||||
|
@ -509,27 +546,24 @@ pub fn op_node_dh_compute_secret(
|
|||
#[buffer] prime: JsBuffer,
|
||||
#[buffer] private_key: JsBuffer,
|
||||
#[buffer] their_public_key: JsBuffer,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> ToJsBuffer {
|
||||
let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref());
|
||||
let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref());
|
||||
let primei: BigUint = BigUint::from_bytes_be(prime.as_ref());
|
||||
let shared_secret: BigUint = pubkey.modpow(&privkey, &primei);
|
||||
|
||||
Ok(shared_secret.to_bytes_be().into())
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[number]
|
||||
pub fn op_node_random_int(
|
||||
#[number] min: i64,
|
||||
#[number] max: i64,
|
||||
) -> Result<i64, AnyError> {
|
||||
pub fn op_node_random_int(#[number] min: i64, #[number] max: i64) -> i64 {
|
||||
let mut rng = rand::thread_rng();
|
||||
// Uniform distribution is required to avoid Modulo Bias
|
||||
// https://en.wikipedia.org/wiki/Fisher–Yates_shuffle#Modulo_bias
|
||||
let dist = Uniform::from(min..max);
|
||||
|
||||
Ok(dist.sample(&mut rng))
|
||||
dist.sample(&mut rng)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -542,7 +576,7 @@ fn scrypt(
|
|||
parallelization: u32,
|
||||
_maxmem: u32,
|
||||
output_buffer: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
// Construct Params
|
||||
let params = scrypt::Params::new(
|
||||
cost as u8,
|
||||
|
@ -573,7 +607,7 @@ pub fn op_node_scrypt_sync(
|
|||
#[smi] parallelization: u32,
|
||||
#[smi] maxmem: u32,
|
||||
#[anybuffer] output_buffer: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
scrypt(
|
||||
password,
|
||||
salt,
|
||||
|
@ -586,6 +620,14 @@ pub fn op_node_scrypt_sync(
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ScryptAsyncError {
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
#[error(transparent)]
|
||||
Other(deno_core::error::AnyError),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[serde]
|
||||
pub async fn op_node_scrypt_async(
|
||||
|
@ -596,10 +638,11 @@ pub async fn op_node_scrypt_async(
|
|||
#[smi] block_size: u32,
|
||||
#[smi] parallelization: u32,
|
||||
#[smi] maxmem: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, ScryptAsyncError> {
|
||||
spawn_blocking(move || {
|
||||
let mut output_buffer = vec![0u8; keylen as usize];
|
||||
let res = scrypt(
|
||||
|
||||
scrypt(
|
||||
password,
|
||||
salt,
|
||||
keylen,
|
||||
|
@ -608,25 +651,30 @@ pub async fn op_node_scrypt_async(
|
|||
parallelization,
|
||||
maxmem,
|
||||
&mut output_buffer,
|
||||
);
|
||||
|
||||
if res.is_ok() {
|
||||
Ok(output_buffer.into())
|
||||
} else {
|
||||
// TODO(lev): rethrow the error?
|
||||
Err(generic_error("scrypt failure"))
|
||||
}
|
||||
)
|
||||
.map(|_| output_buffer.into())
|
||||
.map_err(ScryptAsyncError::Other)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum EcdhEncodePubKey {
|
||||
#[error("Invalid public key")]
|
||||
InvalidPublicKey,
|
||||
#[error("Unsupported curve")]
|
||||
UnsupportedCurve,
|
||||
#[error(transparent)]
|
||||
Sec1(#[from] sec1::Error),
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[buffer]
|
||||
pub fn op_node_ecdh_encode_pubkey(
|
||||
#[string] curve: &str,
|
||||
#[buffer] pubkey: &[u8],
|
||||
compress: bool,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
) -> Result<Vec<u8>, EcdhEncodePubKey> {
|
||||
use elliptic_curve::sec1::FromEncodedPoint;
|
||||
|
||||
match curve {
|
||||
|
@ -639,7 +687,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -652,7 +700,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -665,7 +713,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -678,14 +726,14 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
||||
Ok(pubkey.to_encoded_point(compress).as_ref().to_vec())
|
||||
}
|
||||
&_ => Err(type_error("Unsupported curve")),
|
||||
&_ => Err(EcdhEncodePubKey::UnsupportedCurve),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -695,7 +743,7 @@ pub fn op_node_ecdh_generate_keys(
|
|||
#[buffer] pubbuf: &mut [u8],
|
||||
#[buffer] privbuf: &mut [u8],
|
||||
#[string] format: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let compress = format == "compressed";
|
||||
match curve {
|
||||
|
@ -742,7 +790,7 @@ pub fn op_node_ecdh_compute_secret(
|
|||
#[buffer] this_priv: Option<JsBuffer>,
|
||||
#[buffer] their_pub: &mut [u8],
|
||||
#[buffer] secret: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) {
|
||||
match curve {
|
||||
"secp256k1" => {
|
||||
let their_public_key =
|
||||
|
@ -760,8 +808,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"prime256v1" | "secp256r1" => {
|
||||
let their_public_key =
|
||||
|
@ -776,8 +822,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"secp384r1" => {
|
||||
let their_public_key =
|
||||
|
@ -792,8 +836,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"secp224r1" => {
|
||||
let their_public_key =
|
||||
|
@ -808,8 +850,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
&_ => todo!(),
|
||||
}
|
||||
|
@ -820,7 +860,7 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
#[string] curve: &str,
|
||||
#[buffer] privkey: &[u8],
|
||||
#[buffer] pubkey: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) {
|
||||
match curve {
|
||||
"secp256k1" => {
|
||||
let this_private_key =
|
||||
|
@ -828,8 +868,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"prime256v1" | "secp256r1" => {
|
||||
let this_private_key =
|
||||
|
@ -837,7 +875,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
"secp384r1" => {
|
||||
let this_private_key =
|
||||
|
@ -845,7 +882,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
"secp224r1" => {
|
||||
let this_private_key =
|
||||
|
@ -853,7 +889,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
&_ => todo!(),
|
||||
}
|
||||
|
@ -874,8 +909,20 @@ pub fn op_node_gen_prime(#[number] size: usize) -> ToJsBuffer {
|
|||
#[serde]
|
||||
pub async fn op_node_gen_prime_async(
|
||||
#[number] size: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
Ok(spawn_blocking(move || gen_prime(size)).await?)
|
||||
) -> Result<ToJsBuffer, tokio::task::JoinError> {
|
||||
spawn_blocking(move || gen_prime(size)).await
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DiffieHellmanError {
|
||||
#[error("Expected private key")]
|
||||
ExpectedPrivateKey,
|
||||
#[error("Expected public key")]
|
||||
ExpectedPublicKey,
|
||||
#[error("DH parameters mismatch")]
|
||||
DhParametersMismatch,
|
||||
#[error("Unsupported key type for diffie hellman, or key type mismatch")]
|
||||
UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -883,117 +930,134 @@ pub async fn op_node_gen_prime_async(
|
|||
pub fn op_node_diffie_hellman(
|
||||
#[cppgc] private: &KeyObjectHandle,
|
||||
#[cppgc] public: &KeyObjectHandle,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, DiffieHellmanError> {
|
||||
let private = private
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("Expected private key"))?;
|
||||
.ok_or(DiffieHellmanError::ExpectedPrivateKey)?;
|
||||
let public = public
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("Expected public key"))?;
|
||||
.ok_or(DiffieHellmanError::ExpectedPublicKey)?;
|
||||
|
||||
let res = match (private, &*public) {
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
|
||||
) => p224::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
|
||||
) => p256::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
|
||||
) => p384::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::X25519(private),
|
||||
AsymmetricPublicKey::X25519(public),
|
||||
) => private
|
||||
.diffie_hellman(public)
|
||||
.to_bytes()
|
||||
.into_iter()
|
||||
.collect(),
|
||||
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
|
||||
if private.params.prime != public.params.prime
|
||||
|| private.params.base != public.params.base
|
||||
{
|
||||
return Err(type_error("DH parameters mismatch"));
|
||||
let res =
|
||||
match (private, &*public) {
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
|
||||
) => p224::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
|
||||
) => p256::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
|
||||
) => p384::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::X25519(private),
|
||||
AsymmetricPublicKey::X25519(public),
|
||||
) => private
|
||||
.diffie_hellman(public)
|
||||
.to_bytes()
|
||||
.into_iter()
|
||||
.collect(),
|
||||
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
|
||||
if private.params.prime != public.params.prime
|
||||
|| private.params.base != public.params.base
|
||||
{
|
||||
return Err(DiffieHellmanError::DhParametersMismatch);
|
||||
}
|
||||
|
||||
// OSIP - Octet-String-to-Integer primitive
|
||||
let public_key = public.key.clone().into_vec();
|
||||
let pubkey = BigUint::from_bytes_be(&public_key);
|
||||
|
||||
// Exponentiation (z = y^x mod p)
|
||||
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
|
||||
let private_key = private.key.clone().into_vec();
|
||||
let private_key = BigUint::from_bytes_be(&private_key);
|
||||
let shared_secret = pubkey.modpow(&private_key, &prime);
|
||||
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
|
||||
// OSIP - Octet-String-to-Integer primitive
|
||||
let public_key = public.key.clone().into_vec();
|
||||
let pubkey = BigUint::from_bytes_be(&public_key);
|
||||
|
||||
// Exponentiation (z = y^x mod p)
|
||||
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
|
||||
let private_key = private.key.clone().into_vec();
|
||||
let private_key = BigUint::from_bytes_be(&private_key);
|
||||
let shared_secret = pubkey.modpow(&private_key, &prime);
|
||||
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
_ => {
|
||||
return Err(type_error(
|
||||
"Unsupported key type for diffie hellman, or key type mismatch",
|
||||
))
|
||||
}
|
||||
};
|
||||
_ => return Err(
|
||||
DiffieHellmanError::UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
|
||||
),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SignEd25519Error {
|
||||
#[error("Expected private key")]
|
||||
ExpectedPrivateKey,
|
||||
#[error("Expected Ed25519 private key")]
|
||||
ExpectedEd25519PrivateKey,
|
||||
#[error("Invalid Ed25519 private key")]
|
||||
InvalidEd25519PrivateKey,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_sign_ed25519(
|
||||
#[cppgc] key: &KeyObjectHandle,
|
||||
#[buffer] data: &[u8],
|
||||
#[buffer] signature: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), SignEd25519Error> {
|
||||
let private = key
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("Expected private key"))?;
|
||||
.ok_or(SignEd25519Error::ExpectedPrivateKey)?;
|
||||
|
||||
let ed25519 = match private {
|
||||
AsymmetricPrivateKey::Ed25519(private) => private,
|
||||
_ => return Err(type_error("Expected Ed25519 private key")),
|
||||
_ => return Err(SignEd25519Error::ExpectedEd25519PrivateKey),
|
||||
};
|
||||
|
||||
let pair = Ed25519KeyPair::from_seed_unchecked(ed25519.as_bytes().as_slice())
|
||||
.map_err(|_| type_error("Invalid Ed25519 private key"))?;
|
||||
.map_err(|_| SignEd25519Error::InvalidEd25519PrivateKey)?;
|
||||
signature.copy_from_slice(pair.sign(data).as_ref());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum VerifyEd25519Error {
|
||||
#[error("Expected public key")]
|
||||
ExpectedPublicKey,
|
||||
#[error("Expected Ed25519 public key")]
|
||||
ExpectedEd25519PublicKey,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_verify_ed25519(
|
||||
#[cppgc] key: &KeyObjectHandle,
|
||||
#[buffer] data: &[u8],
|
||||
#[buffer] signature: &[u8],
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, VerifyEd25519Error> {
|
||||
let public = key
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("Expected public key"))?;
|
||||
.ok_or(VerifyEd25519Error::ExpectedPublicKey)?;
|
||||
|
||||
let ed25519 = match &*public {
|
||||
AsymmetricPublicKey::Ed25519(public) => public,
|
||||
_ => return Err(type_error("Expected Ed25519 public key")),
|
||||
_ => return Err(VerifyEd25519Error::ExpectedEd25519PublicKey),
|
||||
};
|
||||
|
||||
let verified = ring::signature::UnparsedPublicKey::new(
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use rand::rngs::OsRng;
|
||||
use rsa::signature::hazmat::PrehashSigner as _;
|
||||
use rsa::signature::hazmat::PrehashVerifier as _;
|
||||
|
@ -26,7 +23,7 @@ use elliptic_curve::FieldBytesSize;
|
|||
fn dsa_signature<C: elliptic_curve::PrimeCurve>(
|
||||
encoding: u32,
|
||||
signature: ecdsa::Signature<C>,
|
||||
) -> Result<Box<[u8]>, AnyError>
|
||||
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError>
|
||||
where
|
||||
MaxSize<C>: ArrayLength<u8>,
|
||||
<FieldBytesSize<C> as Add>::Output: Add<MaxOverhead> + ArrayLength<u8>,
|
||||
|
@ -36,10 +33,54 @@ where
|
|||
0 => Ok(signature.to_der().to_bytes().to_vec().into_boxed_slice()),
|
||||
// IEEE P1363
|
||||
1 => Ok(signature.to_bytes().to_vec().into_boxed_slice()),
|
||||
_ => Err(type_error("invalid DSA signature encoding")),
|
||||
_ => Err(
|
||||
KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum KeyObjectHandlePrehashedSignAndVerifyError {
|
||||
#[error("invalid DSA signature encoding")]
|
||||
InvalidDsaSignatureEncoding,
|
||||
#[error("key is not a private key")]
|
||||
KeyIsNotPrivate,
|
||||
#[error("digest not allowed for RSA signature: {0}")]
|
||||
DigestNotAllowedForRsaSignature(String),
|
||||
#[error("failed to sign digest with RSA")]
|
||||
FailedToSignDigestWithRsa,
|
||||
#[error("digest not allowed for RSA-PSS signature: {0}")]
|
||||
DigestNotAllowedForRsaPssSignature(String),
|
||||
#[error("failed to sign digest with RSA-PSS")]
|
||||
FailedToSignDigestWithRsaPss,
|
||||
#[error("failed to sign digest with DSA")]
|
||||
FailedToSignDigestWithDsa,
|
||||
#[error("rsa-pss with different mf1 hash algorithm and hash algorithm is not supported")]
|
||||
RsaPssHashAlgorithmUnsupported,
|
||||
#[error(
|
||||
"private key does not allow {actual} to be used, expected {expected}"
|
||||
)]
|
||||
PrivateKeyDisallowsUsage { actual: String, expected: String },
|
||||
#[error("failed to sign digest")]
|
||||
FailedToSignDigest,
|
||||
#[error("x25519 key cannot be used for signing")]
|
||||
X25519KeyCannotBeUsedForSigning,
|
||||
#[error("Ed25519 key cannot be used for prehashed signing")]
|
||||
Ed25519KeyCannotBeUsedForPrehashedSigning,
|
||||
#[error("DH key cannot be used for signing")]
|
||||
DhKeyCannotBeUsedForSigning,
|
||||
#[error("key is not a public or private key")]
|
||||
KeyIsNotPublicOrPrivate,
|
||||
#[error("Invalid DSA signature")]
|
||||
InvalidDsaSignature,
|
||||
#[error("x25519 key cannot be used for verification")]
|
||||
X25519KeyCannotBeUsedForVerification,
|
||||
#[error("Ed25519 key cannot be used for prehashed verification")]
|
||||
Ed25519KeyCannotBeUsedForPrehashedVerification,
|
||||
#[error("DH key cannot be used for verification")]
|
||||
DhKeyCannotBeUsedForVerification,
|
||||
}
|
||||
|
||||
impl KeyObjectHandle {
|
||||
pub fn sign_prehashed(
|
||||
&self,
|
||||
|
@ -47,10 +88,10 @@ impl KeyObjectHandle {
|
|||
digest: &[u8],
|
||||
pss_salt_length: Option<u32>,
|
||||
dsa_signature_encoding: u32,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
let private_key = self
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("key is not a private key"))?;
|
||||
.ok_or(KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate)?;
|
||||
|
||||
match private_key {
|
||||
AsymmetricPrivateKey::Rsa(key) => {
|
||||
|
@ -63,17 +104,14 @@ impl KeyObjectHandle {
|
|||
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
)
|
||||
};
|
||||
|
||||
let signature = signer
|
||||
.sign(Some(&mut OsRng), key, digest)
|
||||
.map_err(|_| generic_error("failed to sign digest with RSA"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::RsaPss(key) => {
|
||||
|
@ -81,9 +119,7 @@ impl KeyObjectHandle {
|
|||
let mut salt_length = None;
|
||||
if let Some(details) = &key.details {
|
||||
if details.hash_algorithm != details.mf1_hash_algorithm {
|
||||
return Err(type_error(
|
||||
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
|
||||
));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
|
||||
}
|
||||
hash_algorithm = Some(details.hash_algorithm);
|
||||
salt_length = Some(details.salt_length as usize);
|
||||
|
@ -96,10 +132,10 @@ impl KeyObjectHandle {
|
|||
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
|
||||
if let Some(hash_algorithm) = hash_algorithm.take() {
|
||||
if Some(hash_algorithm) != algorithm {
|
||||
return Err(type_error(format!(
|
||||
"private key does not allow {} to be used, expected {}",
|
||||
digest_type, hash_algorithm.as_str()
|
||||
)));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
|
||||
actual: digest_type.to_string(),
|
||||
expected: hash_algorithm.as_str().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(salt_length) = salt_length {
|
||||
|
@ -109,15 +145,12 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA-PSS signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
|
||||
}
|
||||
);
|
||||
let signature = pss
|
||||
.sign(Some(&mut OsRng), &key.key, digest)
|
||||
.map_err(|_| generic_error("failed to sign digest with RSA-PSS"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::Dsa(key) => {
|
||||
|
@ -127,15 +160,12 @@ impl KeyObjectHandle {
|
|||
key.sign_prehashed_rfc6979::<D>(digest)
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
);
|
||||
|
||||
let signature =
|
||||
res.map_err(|_| generic_error("failed to sign digest with DSA"))?;
|
||||
res.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::Ec(key) => match key {
|
||||
|
@ -143,7 +173,7 @@ impl KeyObjectHandle {
|
|||
let signing_key = p224::ecdsa::SigningKey::from(key);
|
||||
let signature: p224::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
|
@ -151,7 +181,7 @@ impl KeyObjectHandle {
|
|||
let signing_key = p256::ecdsa::SigningKey::from(key);
|
||||
let signature: p256::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
|
@ -159,19 +189,17 @@ impl KeyObjectHandle {
|
|||
let signing_key = p384::ecdsa::SigningKey::from(key);
|
||||
let signature: p384::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
},
|
||||
AsymmetricPrivateKey::X25519(_) => {
|
||||
Err(type_error("x25519 key cannot be used for signing"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning)
|
||||
}
|
||||
AsymmetricPrivateKey::Ed25519(_) => Err(type_error(
|
||||
"Ed25519 key cannot be used for prehashed signing",
|
||||
)),
|
||||
AsymmetricPrivateKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning),
|
||||
AsymmetricPrivateKey::Dh(_) => {
|
||||
Err(type_error("DH key cannot be used for signing"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,10 +211,10 @@ impl KeyObjectHandle {
|
|||
signature: &[u8],
|
||||
pss_salt_length: Option<u32>,
|
||||
dsa_signature_encoding: u32,
|
||||
) -> Result<bool, AnyError> {
|
||||
let public_key = self
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("key is not a public or private key"))?;
|
||||
) -> Result<bool, KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
let public_key = self.as_public_key().ok_or(
|
||||
KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate,
|
||||
)?;
|
||||
|
||||
match &*public_key {
|
||||
AsymmetricPublicKey::Rsa(key) => {
|
||||
|
@ -199,10 +227,7 @@ impl KeyObjectHandle {
|
|||
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
)
|
||||
};
|
||||
|
@ -214,9 +239,7 @@ impl KeyObjectHandle {
|
|||
let mut salt_length = None;
|
||||
if let Some(details) = &key.details {
|
||||
if details.hash_algorithm != details.mf1_hash_algorithm {
|
||||
return Err(type_error(
|
||||
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
|
||||
));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
|
||||
}
|
||||
hash_algorithm = Some(details.hash_algorithm);
|
||||
salt_length = Some(details.salt_length as usize);
|
||||
|
@ -229,10 +252,10 @@ impl KeyObjectHandle {
|
|||
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
|
||||
if let Some(hash_algorithm) = hash_algorithm.take() {
|
||||
if Some(hash_algorithm) != algorithm {
|
||||
return Err(type_error(format!(
|
||||
"private key does not allow {} to be used, expected {}",
|
||||
digest_type, hash_algorithm.as_str()
|
||||
)));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
|
||||
actual: digest_type.to_string(),
|
||||
expected: hash_algorithm.as_str().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(salt_length) = salt_length {
|
||||
|
@ -242,17 +265,14 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA-PSS signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
|
||||
}
|
||||
);
|
||||
Ok(pss.verify(&key.key, digest, signature).is_ok())
|
||||
}
|
||||
AsymmetricPublicKey::Dsa(key) => {
|
||||
let signature = dsa::Signature::from_der(signature)
|
||||
.map_err(|_| type_error("Invalid DSA signature"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature)?;
|
||||
Ok(key.verify_prehash(digest, &signature).is_ok())
|
||||
}
|
||||
AsymmetricPublicKey::Ec(key) => match key {
|
||||
|
@ -294,13 +314,11 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
AsymmetricPublicKey::X25519(_) => {
|
||||
Err(type_error("x25519 key cannot be used for verification"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification)
|
||||
}
|
||||
AsymmetricPublicKey::Ed25519(_) => Err(type_error(
|
||||
"Ed25519 key cannot be used for prehashed verification",
|
||||
)),
|
||||
AsymmetricPublicKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification),
|
||||
AsymmetricPublicKey::Dh(_) => {
|
||||
Err(type_error("DH key cannot be used for verification"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
|
||||
use x509_parser::der_parser::asn1_rs::Any;
|
||||
use x509_parser::der_parser::asn1_rs::Tag;
|
||||
use x509_parser::der_parser::oid::Oid;
|
||||
pub use x509_parser::error::X509Error;
|
||||
use x509_parser::extensions;
|
||||
use x509_parser::pem;
|
||||
use x509_parser::prelude::*;
|
||||
|
@ -65,7 +65,7 @@ impl<'a> Deref for CertificateView<'a> {
|
|||
#[cppgc]
|
||||
pub fn op_node_x509_parse(
|
||||
#[buffer] buf: &[u8],
|
||||
) -> Result<Certificate, AnyError> {
|
||||
) -> Result<Certificate, X509Error> {
|
||||
let source = match pem::parse_x509_pem(buf) {
|
||||
Ok((_, pem)) => CertificateSources::Pem(pem),
|
||||
Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()),
|
||||
|
@ -81,7 +81,7 @@ pub fn op_node_x509_parse(
|
|||
X509Certificate::from_der(buf).map(|(_, cert)| cert)?
|
||||
}
|
||||
};
|
||||
Ok::<_, AnyError>(CertificateView { cert })
|
||||
Ok::<_, X509Error>(CertificateView { cert })
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -89,23 +89,23 @@ pub fn op_node_x509_parse(
|
|||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> Result<bool, AnyError> {
|
||||
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> bool {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.is_ca())
|
||||
cert.is_ca()
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_check_email(
|
||||
#[cppgc] cert: &Certificate,
|
||||
#[string] email: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> bool {
|
||||
let cert = cert.inner.get().deref();
|
||||
let subject = cert.subject();
|
||||
if subject
|
||||
.iter_email()
|
||||
.any(|e| e.as_str().unwrap_or("") == email)
|
||||
{
|
||||
return Ok(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
let subject_alt = cert
|
||||
|
@ -121,62 +121,60 @@ pub fn op_node_x509_check_email(
|
|||
for name in &subject_alt.general_names {
|
||||
if let extensions::GeneralName::RFC822Name(n) = name {
|
||||
if *n == email {
|
||||
return Ok(true);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
false
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha1::Sha1>())
|
||||
pub fn op_node_x509_fingerprint(#[cppgc] cert: &Certificate) -> Option<String> {
|
||||
cert.fingerprint::<sha1::Sha1>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint256(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha2::Sha256>())
|
||||
) -> Option<String> {
|
||||
cert.fingerprint::<sha2::Sha256>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint512(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha2::Sha512>())
|
||||
) -> Option<String> {
|
||||
cert.fingerprint::<sha2::Sha512>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_issuer(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, X509Error> {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(x509name_to_string(cert.issuer(), oid_registry())?)
|
||||
x509name_to_string(cert.issuer(), oid_registry())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_subject(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, X509Error> {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(x509name_to_string(cert.subject(), oid_registry())?)
|
||||
x509name_to_string(cert.subject(), oid_registry())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[cppgc]
|
||||
pub fn op_node_x509_public_key(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<KeyObjectHandle, AnyError> {
|
||||
) -> Result<KeyObjectHandle, super::keys::X509PublicKeyError> {
|
||||
let cert = cert.inner.get().deref();
|
||||
let public_key = &cert.tbs_certificate.subject_pki;
|
||||
|
||||
|
@ -245,37 +243,29 @@ fn x509name_to_string(
|
|||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_valid_from(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_valid_from(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.validity().not_before.to_string())
|
||||
cert.validity().not_before.to_string()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_valid_to(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_valid_to(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.validity().not_after.to_string())
|
||||
cert.validity().not_after.to_string()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_serial_number(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_serial_number(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
let mut s = cert.serial.to_str_radix(16);
|
||||
s.make_ascii_uppercase();
|
||||
Ok(s)
|
||||
s
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_key_usage(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<u16, AnyError> {
|
||||
pub fn op_node_x509_key_usage(#[cppgc] cert: &Certificate) -> u16 {
|
||||
let cert = cert.inner.get().deref();
|
||||
let key_usage = cert
|
||||
.extensions()
|
||||
|
@ -286,5 +276,5 @@ pub fn op_node_x509_key_usage(
|
|||
_ => None,
|
||||
});
|
||||
|
||||
Ok(key_usage.map(|k| k.flags).unwrap_or(0))
|
||||
key_usage.map(|k| k.flags).unwrap_or(0)
|
||||
}
|
||||
|
|
|
@ -1,16 +1,18 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8;
|
||||
use deno_core::JsRuntimeInspector;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::OpState;
|
||||
use deno_fs::FileSystemRc;
|
||||
use deno_package_json::NodeModuleKind;
|
||||
use deno_package_json::PackageJsonRc;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::url_from_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use node_resolver::REQUIRE_CONDITIONS;
|
||||
use std::borrow::Cow;
|
||||
|
@ -217,17 +219,17 @@ pub fn op_require_resolve_deno_dir(
|
|||
state: &mut OpState,
|
||||
#[string] request: String,
|
||||
#[string] parent_filename: String,
|
||||
) -> Option<String> {
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let resolver = state.borrow::<NpmResolverRc>();
|
||||
resolver
|
||||
.resolve_package_folder_from_package(
|
||||
&request,
|
||||
&ModuleSpecifier::from_file_path(&parent_filename).unwrap_or_else(|_| {
|
||||
panic!("Url::from_file_path: [{:?}]", parent_filename)
|
||||
}),
|
||||
)
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().into_owned())
|
||||
Ok(
|
||||
resolver
|
||||
.resolve_package_folder_from_package(
|
||||
&request,
|
||||
&url_from_file_path(&PathBuf::from(parent_filename))?,
|
||||
)
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().into_owned()),
|
||||
)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
@ -564,19 +566,17 @@ where
|
|||
}))
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_require_read_closest_package_json<P>(
|
||||
#[op2(fast)]
|
||||
pub fn op_require_is_maybe_cjs(
|
||||
state: &mut OpState,
|
||||
#[string] filename: String,
|
||||
) -> Result<Option<PackageJsonRc>, node_resolver::errors::ClosestPkgJsonError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
) -> Result<bool, ClosestPkgJsonError> {
|
||||
let filename = PathBuf::from(filename);
|
||||
// permissions: allow reading the closest package.json files
|
||||
let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
|
||||
pkg_json_resolver.get_closest_package_json_from_path(&filename)
|
||||
let Ok(url) = url_from_file_path(&filename) else {
|
||||
return Ok(false);
|
||||
};
|
||||
let loader = state.borrow::<NodeRequireLoaderRc>();
|
||||
loader.is_maybe_cjs(&url)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::op2;
|
||||
use libc::c_ulong;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use zlib::*;
|
||||
|
@ -381,6 +382,15 @@ pub fn op_zlib_close_if_pending(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[smi]
|
||||
pub fn op_zlib_crc32(#[buffer] data: &[u8], #[smi] value: u32) -> u32 {
|
||||
// SAFETY: `data` is a valid buffer.
|
||||
unsafe {
|
||||
zlib::crc32(value as c_ulong, data.as_ptr(), data.len() as u32) as u32
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
op_require_can_parse_as_esm,
|
||||
op_require_init_paths,
|
||||
op_require_is_deno_dir_package,
|
||||
op_require_is_maybe_cjs,
|
||||
op_require_is_request_relative,
|
||||
op_require_node_module_paths,
|
||||
op_require_package_imports_resolve,
|
||||
|
@ -19,7 +20,6 @@ import {
|
|||
op_require_path_is_absolute,
|
||||
op_require_path_resolve,
|
||||
op_require_proxy_path,
|
||||
op_require_read_closest_package_json,
|
||||
op_require_read_file,
|
||||
op_require_read_package_scope,
|
||||
op_require_real_path,
|
||||
|
@ -1060,36 +1060,13 @@ Module.prototype._compile = function (content, filename, format) {
|
|||
return result;
|
||||
};
|
||||
|
||||
Module._extensions[".js"] = function (module, filename) {
|
||||
const content = op_require_read_file(filename);
|
||||
|
||||
let format;
|
||||
if (StringPrototypeEndsWith(filename, ".js")) {
|
||||
const pkg = op_require_read_closest_package_json(filename);
|
||||
if (pkg?.type === "module") {
|
||||
format = "module";
|
||||
} else if (pkg?.type === "commonjs") {
|
||||
format = "commonjs";
|
||||
}
|
||||
}
|
||||
|
||||
module._compile(content, filename, format);
|
||||
};
|
||||
|
||||
Module._extensions[".ts"] =
|
||||
Module._extensions[".js"] =
|
||||
Module._extensions[".ts"] =
|
||||
Module._extensions[".jsx"] =
|
||||
Module._extensions[".tsx"] =
|
||||
function (module, filename) {
|
||||
const content = op_require_read_file(filename);
|
||||
|
||||
let format;
|
||||
const pkg = op_require_read_closest_package_json(filename);
|
||||
if (pkg?.type === "module") {
|
||||
format = "module";
|
||||
} else if (pkg?.type === "commonjs") {
|
||||
format = "commonjs";
|
||||
}
|
||||
|
||||
const format = op_require_is_maybe_cjs(filename) ? undefined : "module";
|
||||
module._compile(content, filename, format);
|
||||
};
|
||||
|
||||
|
@ -1233,6 +1210,24 @@ function isBuiltin(moduleName) {
|
|||
!StringPrototypeStartsWith(moduleName, "internal/");
|
||||
}
|
||||
|
||||
function getBuiltinModule(id) {
|
||||
if (!isBuiltin(id)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (StringPrototypeStartsWith(id, "node:")) {
|
||||
// Slice 'node:' prefix
|
||||
id = StringPrototypeSlice(id, 5);
|
||||
}
|
||||
|
||||
const mod = loadNativeModule(id, id);
|
||||
if (mod) {
|
||||
return mod.exports;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
Module.isBuiltin = isBuiltin;
|
||||
|
||||
Module.createRequire = createRequire;
|
||||
|
@ -1327,7 +1322,7 @@ export function register(_specifier, _parentUrl, _options) {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
export { builtinModules, createRequire, isBuiltin, Module };
|
||||
export { builtinModules, createRequire, getBuiltinModule, isBuiltin, Module };
|
||||
export const _cache = Module._cache;
|
||||
export const _extensions = Module._extensions;
|
||||
export const _findPath = Module._findPath;
|
||||
|
|
|
@ -290,8 +290,8 @@ export function convertFileInfoToStats(origin: Deno.FileInfo): Stats {
|
|||
isFIFO: () => false,
|
||||
isCharacterDevice: () => false,
|
||||
isSocket: () => false,
|
||||
ctime: origin.mtime,
|
||||
ctimeMs: origin.mtime?.getTime() || null,
|
||||
ctime: origin.ctime,
|
||||
ctimeMs: origin.ctime?.getTime() || null,
|
||||
});
|
||||
|
||||
return stats;
|
||||
|
@ -336,9 +336,9 @@ export function convertFileInfoToBigIntStats(
|
|||
isFIFO: () => false,
|
||||
isCharacterDevice: () => false,
|
||||
isSocket: () => false,
|
||||
ctime: origin.mtime,
|
||||
ctimeMs: origin.mtime ? BigInt(origin.mtime.getTime()) : null,
|
||||
ctimeNs: origin.mtime ? BigInt(origin.mtime.getTime()) * 1000000n : null,
|
||||
ctime: origin.ctime,
|
||||
ctimeMs: origin.ctime ? BigInt(origin.ctime.getTime()) : null,
|
||||
ctimeNs: origin.ctime ? BigInt(origin.ctime.getTime()) * 1000000n : null,
|
||||
});
|
||||
return stats;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ const {
|
|||
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
|
||||
import { errorMap } from "ext:deno_node/internal_binding/uv.ts";
|
||||
import { codes } from "ext:deno_node/internal/error_codes.ts";
|
||||
import { ERR_NOT_IMPLEMENTED } from "ext:deno_node/internal/errors.ts";
|
||||
|
||||
export type BinaryEncodings = "binary";
|
||||
|
||||
|
@ -34,8 +35,7 @@ export type TextEncodings =
|
|||
export type Encodings = BinaryEncodings | TextEncodings;
|
||||
|
||||
export function notImplemented(msg: string): never {
|
||||
const message = msg ? `Not implemented: ${msg}` : "Not implemented";
|
||||
throw new Error(message);
|
||||
throw new ERR_NOT_IMPLEMENTED(msg);
|
||||
}
|
||||
|
||||
export function warnNotImplemented(msg?: string) {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
import { primordials } from "ext:core/mod.js";
|
||||
const { JSONStringify, SymbolFor } = primordials;
|
||||
const { JSONStringify, SafeArrayIterator, SymbolFor } = primordials;
|
||||
import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs";
|
||||
import { codes } from "ext:deno_node/internal/error_codes.ts";
|
||||
import {
|
||||
|
@ -1874,6 +1874,11 @@ export class ERR_SOCKET_CLOSED extends NodeError {
|
|||
super("ERR_SOCKET_CLOSED", `Socket is closed`);
|
||||
}
|
||||
}
|
||||
export class ERR_SOCKET_CONNECTION_TIMEOUT extends NodeError {
|
||||
constructor() {
|
||||
super("ERR_SOCKET_CONNECTION_TIMEOUT", `Socket connection timeout`);
|
||||
}
|
||||
}
|
||||
export class ERR_SOCKET_DGRAM_IS_CONNECTED extends NodeError {
|
||||
constructor() {
|
||||
super("ERR_SOCKET_DGRAM_IS_CONNECTED", `Already connected`);
|
||||
|
@ -2385,6 +2390,15 @@ export class ERR_INVALID_RETURN_VALUE extends NodeTypeError {
|
|||
}
|
||||
}
|
||||
|
||||
export class ERR_NOT_IMPLEMENTED extends NodeError {
|
||||
constructor(message?: string) {
|
||||
super(
|
||||
"ERR_NOT_IMPLEMENTED",
|
||||
message ? `Not implemented: ${message}` : "Not implemented",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ERR_INVALID_URL extends NodeTypeError {
|
||||
input: string;
|
||||
constructor(input: string) {
|
||||
|
@ -2633,11 +2647,30 @@ export function aggregateTwoErrors(
|
|||
}
|
||||
return innerError || outerError;
|
||||
}
|
||||
|
||||
export class NodeAggregateError extends AggregateError {
|
||||
code: string;
|
||||
constructor(errors, message) {
|
||||
super(new SafeArrayIterator(errors), message);
|
||||
this.code = errors[0]?.code;
|
||||
}
|
||||
|
||||
get [kIsNodeError]() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// deno-lint-ignore adjacent-overload-signatures
|
||||
get ["constructor"]() {
|
||||
return AggregateError;
|
||||
}
|
||||
}
|
||||
|
||||
codes.ERR_IPC_CHANNEL_CLOSED = ERR_IPC_CHANNEL_CLOSED;
|
||||
codes.ERR_INVALID_ARG_TYPE = ERR_INVALID_ARG_TYPE;
|
||||
codes.ERR_INVALID_ARG_VALUE = ERR_INVALID_ARG_VALUE;
|
||||
codes.ERR_OUT_OF_RANGE = ERR_OUT_OF_RANGE;
|
||||
codes.ERR_SOCKET_BAD_PORT = ERR_SOCKET_BAD_PORT;
|
||||
codes.ERR_SOCKET_CONNECTION_TIMEOUT = ERR_SOCKET_CONNECTION_TIMEOUT;
|
||||
codes.ERR_BUFFER_OUT_OF_BOUNDS = ERR_BUFFER_OUT_OF_BOUNDS;
|
||||
codes.ERR_UNKNOWN_ENCODING = ERR_UNKNOWN_ENCODING;
|
||||
codes.ERR_PARSE_ARGS_INVALID_OPTION_VALUE = ERR_PARSE_ARGS_INVALID_OPTION_VALUE;
|
||||
|
@ -2838,6 +2871,7 @@ export default {
|
|||
ERR_INVALID_SYNC_FORK_INPUT,
|
||||
ERR_INVALID_THIS,
|
||||
ERR_INVALID_TUPLE,
|
||||
ERR_NOT_IMPLEMENTED,
|
||||
ERR_INVALID_URI,
|
||||
ERR_INVALID_URL,
|
||||
ERR_INVALID_URL_SCHEME,
|
||||
|
|
|
@ -95,4 +95,5 @@ export function makeSyncWrite(fd: number) {
|
|||
};
|
||||
}
|
||||
|
||||
export const kReinitializeHandle = Symbol("kReinitializeHandle");
|
||||
export const normalizedArgsSymbol = Symbol("normalizedArgs");
|
||||
|
|
|
@ -530,10 +530,12 @@ export function mapSysErrnoToUvErrno(sysErrno: number): number {
|
|||
|
||||
export const UV_EAI_MEMORY = codeMap.get("EAI_MEMORY")!;
|
||||
export const UV_EBADF = codeMap.get("EBADF")!;
|
||||
export const UV_ECANCELED = codeMap.get("ECANCELED")!;
|
||||
export const UV_EEXIST = codeMap.get("EEXIST");
|
||||
export const UV_EINVAL = codeMap.get("EINVAL")!;
|
||||
export const UV_ENOENT = codeMap.get("ENOENT");
|
||||
export const UV_ENOTSOCK = codeMap.get("ENOTSOCK")!;
|
||||
export const UV_ETIMEDOUT = codeMap.get("ETIMEDOUT")!;
|
||||
export const UV_UNKNOWN = codeMap.get("UNKNOWN")!;
|
||||
|
||||
export function errname(errno: number): string {
|
||||
|
|
|
@ -31,6 +31,7 @@ import {
|
|||
isIP,
|
||||
isIPv4,
|
||||
isIPv6,
|
||||
kReinitializeHandle,
|
||||
normalizedArgsSymbol,
|
||||
} from "ext:deno_node/internal/net.ts";
|
||||
import { Duplex } from "node:stream";
|
||||
|
@ -50,9 +51,11 @@ import {
|
|||
ERR_SERVER_ALREADY_LISTEN,
|
||||
ERR_SERVER_NOT_RUNNING,
|
||||
ERR_SOCKET_CLOSED,
|
||||
ERR_SOCKET_CONNECTION_TIMEOUT,
|
||||
errnoException,
|
||||
exceptionWithHostPort,
|
||||
genericNodeError,
|
||||
NodeAggregateError,
|
||||
uvExceptionWithHostPort,
|
||||
} from "ext:deno_node/internal/errors.ts";
|
||||
import type { ErrnoException } from "ext:deno_node/internal/errors.ts";
|
||||
|
@ -80,6 +83,7 @@ import { Buffer } from "node:buffer";
|
|||
import type { LookupOneOptions } from "ext:deno_node/internal/dns/utils.ts";
|
||||
import {
|
||||
validateAbortSignal,
|
||||
validateBoolean,
|
||||
validateFunction,
|
||||
validateInt32,
|
||||
validateNumber,
|
||||
|
@ -100,13 +104,25 @@ import { ShutdownWrap } from "ext:deno_node/internal_binding/stream_wrap.ts";
|
|||
import { assert } from "ext:deno_node/_util/asserts.ts";
|
||||
import { isWindows } from "ext:deno_node/_util/os.ts";
|
||||
import { ADDRCONFIG, lookup as dnsLookup } from "node:dns";
|
||||
import { codeMap } from "ext:deno_node/internal_binding/uv.ts";
|
||||
import {
|
||||
codeMap,
|
||||
UV_ECANCELED,
|
||||
UV_ETIMEDOUT,
|
||||
} from "ext:deno_node/internal_binding/uv.ts";
|
||||
import { guessHandleType } from "ext:deno_node/internal_binding/util.ts";
|
||||
import { debuglog } from "ext:deno_node/internal/util/debuglog.ts";
|
||||
import type { DuplexOptions } from "ext:deno_node/_stream.d.ts";
|
||||
import type { BufferEncoding } from "ext:deno_node/_global.d.ts";
|
||||
import type { Abortable } from "ext:deno_node/_events.d.ts";
|
||||
import { channel } from "node:diagnostics_channel";
|
||||
import { primordials } from "ext:core/mod.js";
|
||||
|
||||
const {
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypePush,
|
||||
FunctionPrototypeBind,
|
||||
MathMax,
|
||||
} = primordials;
|
||||
|
||||
let debug = debuglog("net", (fn) => {
|
||||
debug = fn;
|
||||
|
@ -120,6 +136,9 @@ const kBytesWritten = Symbol("kBytesWritten");
|
|||
const DEFAULT_IPV4_ADDR = "0.0.0.0";
|
||||
const DEFAULT_IPV6_ADDR = "::";
|
||||
|
||||
let autoSelectFamilyDefault = true;
|
||||
let autoSelectFamilyAttemptTimeoutDefault = 250;
|
||||
|
||||
type Handle = TCP | Pipe;
|
||||
|
||||
interface HandleOptions {
|
||||
|
@ -214,6 +233,8 @@ interface TcpSocketConnectOptions extends ConnectOptions {
|
|||
hints?: number;
|
||||
family?: number;
|
||||
lookup?: LookupFunction;
|
||||
autoSelectFamily?: boolean | undefined;
|
||||
autoSelectFamilyAttemptTimeout?: number | undefined;
|
||||
}
|
||||
|
||||
interface IpcSocketConnectOptions extends ConnectOptions {
|
||||
|
@ -316,12 +337,6 @@ export function _normalizeArgs(args: unknown[]): NormalizedArgs {
|
|||
return arr;
|
||||
}
|
||||
|
||||
function _isTCPConnectWrap(
|
||||
req: TCPConnectWrap | PipeConnectWrap,
|
||||
): req is TCPConnectWrap {
|
||||
return "localAddress" in req && "localPort" in req;
|
||||
}
|
||||
|
||||
function _afterConnect(
|
||||
status: number,
|
||||
// deno-lint-ignore no-explicit-any
|
||||
|
@ -372,7 +387,7 @@ function _afterConnect(
|
|||
socket.connecting = false;
|
||||
let details;
|
||||
|
||||
if (_isTCPConnectWrap(req)) {
|
||||
if (req.localAddress && req.localPort) {
|
||||
details = req.localAddress + ":" + req.localPort;
|
||||
}
|
||||
|
||||
|
@ -384,7 +399,7 @@ function _afterConnect(
|
|||
details,
|
||||
);
|
||||
|
||||
if (_isTCPConnectWrap(req)) {
|
||||
if (details) {
|
||||
ex.localAddress = req.localAddress;
|
||||
ex.localPort = req.localPort;
|
||||
}
|
||||
|
@ -393,6 +408,107 @@ function _afterConnect(
|
|||
}
|
||||
}
|
||||
|
||||
function _createConnectionError(req, status) {
|
||||
let details;
|
||||
|
||||
if (req.localAddress && req.localPort) {
|
||||
details = req.localAddress + ":" + req.localPort;
|
||||
}
|
||||
|
||||
const ex = exceptionWithHostPort(
|
||||
status,
|
||||
"connect",
|
||||
req.address,
|
||||
req.port,
|
||||
details,
|
||||
);
|
||||
if (details) {
|
||||
ex.localAddress = req.localAddress;
|
||||
ex.localPort = req.localPort;
|
||||
}
|
||||
|
||||
return ex;
|
||||
}
|
||||
|
||||
function _afterConnectMultiple(
|
||||
context,
|
||||
current,
|
||||
status,
|
||||
handle,
|
||||
req,
|
||||
readable,
|
||||
writable,
|
||||
) {
|
||||
debug(
|
||||
"connect/multiple: connection attempt to %s:%s completed with status %s",
|
||||
req.address,
|
||||
req.port,
|
||||
status,
|
||||
);
|
||||
|
||||
// Make sure another connection is not spawned
|
||||
clearTimeout(context[kTimeout]);
|
||||
|
||||
// One of the connection has completed and correctly dispatched but after timeout, ignore this one
|
||||
if (status === 0 && current !== context.current - 1) {
|
||||
debug(
|
||||
"connect/multiple: ignoring successful but timedout connection to %s:%s",
|
||||
req.address,
|
||||
req.port,
|
||||
);
|
||||
handle.close();
|
||||
return;
|
||||
}
|
||||
|
||||
const self = context.socket;
|
||||
|
||||
// Some error occurred, add to the list of exceptions
|
||||
if (status !== 0) {
|
||||
const ex = _createConnectionError(req, status);
|
||||
ArrayPrototypePush(context.errors, ex);
|
||||
|
||||
self.emit(
|
||||
"connectionAttemptFailed",
|
||||
req.address,
|
||||
req.port,
|
||||
req.addressType,
|
||||
ex,
|
||||
);
|
||||
|
||||
// Try the next address, unless we were aborted
|
||||
if (context.socket.connecting) {
|
||||
_internalConnectMultiple(context, status === UV_ECANCELED);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
_afterConnect(status, self._handle, req, readable, writable);
|
||||
}
|
||||
|
||||
function _internalConnectMultipleTimeout(context, req, handle) {
|
||||
debug(
|
||||
"connect/multiple: connection to %s:%s timed out",
|
||||
req.address,
|
||||
req.port,
|
||||
);
|
||||
context.socket.emit(
|
||||
"connectionAttemptTimeout",
|
||||
req.address,
|
||||
req.port,
|
||||
req.addressType,
|
||||
);
|
||||
|
||||
req.oncomplete = undefined;
|
||||
ArrayPrototypePush(context.errors, _createConnectionError(req, UV_ETIMEDOUT));
|
||||
handle.close();
|
||||
|
||||
// Try the next address, unless we were aborted
|
||||
if (context.socket.connecting) {
|
||||
_internalConnectMultiple(context);
|
||||
}
|
||||
}
|
||||
|
||||
function _checkBindError(err: number, port: number, handle: TCP) {
|
||||
// EADDRINUSE may not be reported until we call `listen()` or `connect()`.
|
||||
// To complicate matters, a failed `bind()` followed by `listen()` or `connect()`
|
||||
|
@ -495,6 +611,131 @@ function _internalConnect(
|
|||
}
|
||||
}
|
||||
|
||||
function _internalConnectMultiple(context, canceled?: boolean) {
|
||||
clearTimeout(context[kTimeout]);
|
||||
const self = context.socket;
|
||||
|
||||
// We were requested to abort. Stop all operations
|
||||
if (self._aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// All connections have been tried without success, destroy with error
|
||||
if (canceled || context.current === context.addresses.length) {
|
||||
if (context.errors.length === 0) {
|
||||
self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT());
|
||||
return;
|
||||
}
|
||||
|
||||
self.destroy(new NodeAggregateError(context.errors));
|
||||
return;
|
||||
}
|
||||
|
||||
assert(self.connecting);
|
||||
|
||||
const current = context.current++;
|
||||
|
||||
if (current > 0) {
|
||||
self[kReinitializeHandle](new TCP(TCPConstants.SOCKET));
|
||||
}
|
||||
|
||||
const { localPort, port, flags } = context;
|
||||
const { address, family: addressType } = context.addresses[current];
|
||||
let localAddress;
|
||||
let err;
|
||||
|
||||
if (localPort) {
|
||||
if (addressType === 4) {
|
||||
localAddress = DEFAULT_IPV4_ADDR;
|
||||
err = self._handle.bind(localAddress, localPort);
|
||||
} else { // addressType === 6
|
||||
localAddress = DEFAULT_IPV6_ADDR;
|
||||
err = self._handle.bind6(localAddress, localPort, flags);
|
||||
}
|
||||
|
||||
debug(
|
||||
"connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)",
|
||||
localAddress,
|
||||
localPort,
|
||||
addressType,
|
||||
);
|
||||
|
||||
err = _checkBindError(err, localPort, self._handle);
|
||||
if (err) {
|
||||
ArrayPrototypePush(
|
||||
context.errors,
|
||||
exceptionWithHostPort(err, "bind", localAddress, localPort),
|
||||
);
|
||||
_internalConnectMultiple(context);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
debug(
|
||||
"connect/multiple: attempting to connect to %s:%d (addressType: %d)",
|
||||
address,
|
||||
port,
|
||||
addressType,
|
||||
);
|
||||
self.emit("connectionAttempt", address, port, addressType);
|
||||
|
||||
const req = new TCPConnectWrap();
|
||||
req.oncomplete = FunctionPrototypeBind(
|
||||
_afterConnectMultiple,
|
||||
undefined,
|
||||
context,
|
||||
current,
|
||||
);
|
||||
req.address = address;
|
||||
req.port = port;
|
||||
req.localAddress = localAddress;
|
||||
req.localPort = localPort;
|
||||
req.addressType = addressType;
|
||||
|
||||
ArrayPrototypePush(
|
||||
self.autoSelectFamilyAttemptedAddresses,
|
||||
`${address}:${port}`,
|
||||
);
|
||||
|
||||
if (addressType === 4) {
|
||||
err = self._handle.connect(req, address, port);
|
||||
} else {
|
||||
err = self._handle.connect6(req, address, port);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
const sockname = self._getsockname();
|
||||
let details;
|
||||
|
||||
if (sockname) {
|
||||
details = sockname.address + ":" + sockname.port;
|
||||
}
|
||||
|
||||
const ex = exceptionWithHostPort(err, "connect", address, port, details);
|
||||
ArrayPrototypePush(context.errors, ex);
|
||||
|
||||
self.emit("connectionAttemptFailed", address, port, addressType, ex);
|
||||
_internalConnectMultiple(context);
|
||||
return;
|
||||
}
|
||||
|
||||
if (current < context.addresses.length - 1) {
|
||||
debug(
|
||||
"connect/multiple: setting the attempt timeout to %d ms",
|
||||
context.timeout,
|
||||
);
|
||||
|
||||
// If the attempt has not returned an error, start the connection timer
|
||||
context[kTimeout] = setTimeout(
|
||||
_internalConnectMultipleTimeout,
|
||||
context.timeout,
|
||||
context,
|
||||
req,
|
||||
self._handle,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Provide a better error message when we call end() as a result
|
||||
// of the other side sending a FIN. The standard "write after end"
|
||||
// is overly vague, and makes it seem like the user's code is to blame.
|
||||
|
@ -597,7 +838,7 @@ function _lookupAndConnect(
|
|||
) {
|
||||
const { localAddress, localPort } = options;
|
||||
const host = options.host || "localhost";
|
||||
let { port } = options;
|
||||
let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options;
|
||||
|
||||
if (localAddress && !isIP(localAddress)) {
|
||||
throw new ERR_INVALID_IP_ADDRESS(localAddress);
|
||||
|
@ -621,6 +862,22 @@ function _lookupAndConnect(
|
|||
|
||||
port |= 0;
|
||||
|
||||
if (autoSelectFamily != null) {
|
||||
validateBoolean(autoSelectFamily, "options.autoSelectFamily");
|
||||
} else {
|
||||
autoSelectFamily = autoSelectFamilyDefault;
|
||||
}
|
||||
|
||||
if (autoSelectFamilyAttemptTimeout !== undefined) {
|
||||
validateInt32(autoSelectFamilyAttemptTimeout);
|
||||
|
||||
if (autoSelectFamilyAttemptTimeout < 10) {
|
||||
autoSelectFamilyAttemptTimeout = 10;
|
||||
}
|
||||
} else {
|
||||
autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault;
|
||||
}
|
||||
|
||||
// If host is an IP, skip performing a lookup
|
||||
const addressType = isIP(host);
|
||||
if (addressType) {
|
||||
|
@ -649,6 +906,7 @@ function _lookupAndConnect(
|
|||
const dnsOpts = {
|
||||
family: options.family,
|
||||
hints: options.hints || 0,
|
||||
all: false,
|
||||
};
|
||||
|
||||
if (
|
||||
|
@ -665,6 +923,31 @@ function _lookupAndConnect(
|
|||
self._host = host;
|
||||
const lookup = options.lookup || dnsLookup;
|
||||
|
||||
if (
|
||||
dnsOpts.family !== 4 && dnsOpts.family !== 6 && !localAddress &&
|
||||
autoSelectFamily
|
||||
) {
|
||||
debug("connect: autodetecting");
|
||||
|
||||
dnsOpts.all = true;
|
||||
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
|
||||
_lookupAndConnectMultiple(
|
||||
self,
|
||||
asyncIdSymbol,
|
||||
lookup,
|
||||
host,
|
||||
options,
|
||||
dnsOpts,
|
||||
port,
|
||||
localAddress,
|
||||
localPort,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
|
||||
lookup(
|
||||
host,
|
||||
|
@ -719,6 +1002,143 @@ function _lookupAndConnect(
|
|||
});
|
||||
}
|
||||
|
||||
function _lookupAndConnectMultiple(
|
||||
self: Socket,
|
||||
asyncIdSymbol: number,
|
||||
// deno-lint-ignore no-explicit-any
|
||||
lookup: any,
|
||||
host: string,
|
||||
options: TcpSocketConnectOptions,
|
||||
dnsopts,
|
||||
port: number,
|
||||
localAddress: string,
|
||||
localPort: number,
|
||||
timeout: number | undefined,
|
||||
) {
|
||||
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function emitLookup() {
|
||||
lookup(host, dnsopts, function emitLookup(err, addresses) {
|
||||
// It's possible we were destroyed while looking this up.
|
||||
// XXX it would be great if we could cancel the promise returned by
|
||||
// the look up.
|
||||
if (!self.connecting) {
|
||||
return;
|
||||
} else if (err) {
|
||||
self.emit("lookup", err, undefined, undefined, host);
|
||||
|
||||
// net.createConnection() creates a net.Socket object and immediately
|
||||
// calls net.Socket.connect() on it (that's us). There are no event
|
||||
// listeners registered yet so defer the error event to the next tick.
|
||||
nextTick(_connectErrorNT, self, err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter addresses by only keeping the one which are either IPv4 or IPV6.
|
||||
// The first valid address determines which group has preference on the
|
||||
// alternate family sorting which happens later.
|
||||
const validAddresses = [[], []];
|
||||
const validIps = [[], []];
|
||||
let destinations;
|
||||
for (let i = 0, l = addresses.length; i < l; i++) {
|
||||
const address = addresses[i];
|
||||
const { address: ip, family: addressType } = address;
|
||||
self.emit("lookup", err, ip, addressType, host);
|
||||
// It's possible we were destroyed while looking this up.
|
||||
if (!self.connecting) {
|
||||
return;
|
||||
}
|
||||
if (isIP(ip) && (addressType === 4 || addressType === 6)) {
|
||||
destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 };
|
||||
|
||||
const destination = destinations[addressType];
|
||||
|
||||
// Only try an address once
|
||||
if (!ArrayPrototypeIncludes(validIps[destination], ip)) {
|
||||
ArrayPrototypePush(validAddresses[destination], address);
|
||||
ArrayPrototypePush(validIps[destination], ip);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// When no AAAA or A records are available, fail on the first one
|
||||
if (!validAddresses[0].length && !validAddresses[1].length) {
|
||||
const { address: firstIp, family: firstAddressType } = addresses[0];
|
||||
|
||||
if (!isIP(firstIp)) {
|
||||
err = new ERR_INVALID_IP_ADDRESS(firstIp);
|
||||
nextTick(_connectErrorNT, self, err);
|
||||
} else if (firstAddressType !== 4 && firstAddressType !== 6) {
|
||||
err = new ERR_INVALID_ADDRESS_FAMILY(
|
||||
firstAddressType,
|
||||
options.host,
|
||||
options.port,
|
||||
);
|
||||
nextTick(_connectErrorNT, self, err);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort addresses alternating families
|
||||
const toAttempt = [];
|
||||
for (
|
||||
let i = 0,
|
||||
l = MathMax(validAddresses[0].length, validAddresses[1].length);
|
||||
i < l;
|
||||
i++
|
||||
) {
|
||||
if (i in validAddresses[0]) {
|
||||
ArrayPrototypePush(toAttempt, validAddresses[0][i]);
|
||||
}
|
||||
if (i in validAddresses[1]) {
|
||||
ArrayPrototypePush(toAttempt, validAddresses[1][i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (toAttempt.length === 1) {
|
||||
debug(
|
||||
"connect/multiple: only one address found, switching back to single connection",
|
||||
);
|
||||
const { address: ip, family: addressType } = toAttempt[0];
|
||||
|
||||
self._unrefTimer();
|
||||
defaultTriggerAsyncIdScope(
|
||||
self[asyncIdSymbol],
|
||||
_internalConnect,
|
||||
self,
|
||||
ip,
|
||||
port,
|
||||
addressType,
|
||||
localAddress,
|
||||
localPort,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
self.autoSelectFamilyAttemptedAddresses = [];
|
||||
debug("connect/multiple: will try the following addresses", toAttempt);
|
||||
|
||||
const context = {
|
||||
socket: self,
|
||||
addresses: toAttempt,
|
||||
current: 0,
|
||||
port,
|
||||
localPort,
|
||||
timeout,
|
||||
[kTimeout]: null,
|
||||
errors: [],
|
||||
};
|
||||
|
||||
self._unrefTimer();
|
||||
defaultTriggerAsyncIdScope(
|
||||
self[asyncIdSymbol],
|
||||
_internalConnectMultiple,
|
||||
context,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function _afterShutdown(this: ShutdownWrap<TCP>) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const self: any = this.handle[ownerSymbol];
|
||||
|
@ -777,6 +1197,7 @@ export class Socket extends Duplex {
|
|||
_host: string | null = null;
|
||||
// deno-lint-ignore no-explicit-any
|
||||
_parent: any = null;
|
||||
autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined;
|
||||
|
||||
constructor(options: SocketOptions | number) {
|
||||
if (typeof options === "number") {
|
||||
|
@ -1546,6 +1967,16 @@ export class Socket extends Duplex {
|
|||
set _handle(v: Handle | null) {
|
||||
this[kHandle] = v;
|
||||
}
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
[kReinitializeHandle](handle: any) {
|
||||
this._handle?.close();
|
||||
|
||||
this._handle = handle;
|
||||
this._handle[ownerSymbol] = this;
|
||||
|
||||
_initSocketHandle(this);
|
||||
}
|
||||
}
|
||||
|
||||
export const Stream = Socket;
|
||||
|
@ -1593,6 +2024,33 @@ export function connect(...args: unknown[]) {
|
|||
|
||||
export const createConnection = connect;
|
||||
|
||||
/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamily */
|
||||
export function getDefaultAutoSelectFamily() {
|
||||
return autoSelectFamilyDefault;
|
||||
}
|
||||
|
||||
/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamily */
|
||||
export function setDefaultAutoSelectFamily(value: boolean) {
|
||||
validateBoolean(value, "value");
|
||||
autoSelectFamilyDefault = value;
|
||||
}
|
||||
|
||||
/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamilyattempttimeout */
|
||||
export function getDefaultAutoSelectFamilyAttemptTimeout() {
|
||||
return autoSelectFamilyAttemptTimeoutDefault;
|
||||
}
|
||||
|
||||
/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamilyattempttimeout */
|
||||
export function setDefaultAutoSelectFamilyAttemptTimeout(value: number) {
|
||||
validateInt32(value, "value", 1);
|
||||
|
||||
if (value < 10) {
|
||||
value = 10;
|
||||
}
|
||||
|
||||
autoSelectFamilyAttemptTimeoutDefault = value;
|
||||
}
|
||||
|
||||
export interface ListenOptions extends Abortable {
|
||||
fd?: number;
|
||||
port?: number | undefined;
|
||||
|
@ -2478,15 +2936,19 @@ export { BlockList, isIP, isIPv4, isIPv6, SocketAddress };
|
|||
export default {
|
||||
_createServerHandle,
|
||||
_normalizeArgs,
|
||||
isIP,
|
||||
isIPv4,
|
||||
isIPv6,
|
||||
BlockList,
|
||||
SocketAddress,
|
||||
connect,
|
||||
createConnection,
|
||||
createServer,
|
||||
getDefaultAutoSelectFamily,
|
||||
getDefaultAutoSelectFamilyAttemptTimeout,
|
||||
isIP,
|
||||
isIPv4,
|
||||
isIPv6,
|
||||
Server,
|
||||
setDefaultAutoSelectFamily,
|
||||
setDefaultAutoSelectFamilyAttemptTimeout,
|
||||
Socket,
|
||||
SocketAddress,
|
||||
Stream,
|
||||
};
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
|
||||
import { warnNotImplemented } from "ext:deno_node/_utils.ts";
|
||||
import { EventEmitter } from "node:events";
|
||||
import Module from "node:module";
|
||||
import Module, { getBuiltinModule } from "node:module";
|
||||
import { report } from "ext:deno_node/internal/process/report.ts";
|
||||
import { validateString } from "ext:deno_node/internal/validators.mjs";
|
||||
import {
|
||||
|
@ -38,7 +38,15 @@ import {
|
|||
versions,
|
||||
} from "ext:deno_node/_process/process.ts";
|
||||
import { _exiting } from "ext:deno_node/_process/exiting.ts";
|
||||
export { _nextTick as nextTick, chdir, cwd, env, version, versions };
|
||||
export {
|
||||
_nextTick as nextTick,
|
||||
chdir,
|
||||
cwd,
|
||||
env,
|
||||
getBuiltinModule,
|
||||
version,
|
||||
versions,
|
||||
};
|
||||
import {
|
||||
createWritableStdioStream,
|
||||
initStdin,
|
||||
|
@ -728,6 +736,8 @@ Process.prototype.getegid = getegid;
|
|||
/** This method is removed on Windows */
|
||||
Process.prototype.geteuid = geteuid;
|
||||
|
||||
Process.prototype.getBuiltinModule = getBuiltinModule;
|
||||
|
||||
// TODO(kt3k): Implement this when we added -e option to node compat mode
|
||||
Process.prototype._eval = undefined;
|
||||
|
||||
|
@ -909,7 +919,7 @@ Object.defineProperty(argv, "1", {
|
|||
if (Deno.mainModule?.startsWith("file:")) {
|
||||
return pathFromURL(new URL(Deno.mainModule));
|
||||
} else {
|
||||
return join(Deno.cwd(), "$deno$node.js");
|
||||
return join(Deno.cwd(), "$deno$node.mjs");
|
||||
}
|
||||
},
|
||||
});
|
||||
|
|
|
@ -40,6 +40,58 @@ import {
|
|||
createBrotliCompress,
|
||||
createBrotliDecompress,
|
||||
} from "ext:deno_node/_brotli.js";
|
||||
import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts";
|
||||
import { validateUint32 } from "ext:deno_node/internal/validators.mjs";
|
||||
import { op_zlib_crc32 } from "ext:core/ops";
|
||||
import { core, primordials } from "ext:core/mod.js";
|
||||
import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
|
||||
const {
|
||||
Uint8Array,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
TypedArrayPrototypeGetByteLength,
|
||||
TypedArrayPrototypeGetByteOffset,
|
||||
DataViewPrototypeGetBuffer,
|
||||
DataViewPrototypeGetByteLength,
|
||||
DataViewPrototypeGetByteOffset,
|
||||
} = primordials;
|
||||
const { isTypedArray, isDataView } = core;
|
||||
|
||||
const enc = new TextEncoder();
|
||||
const toU8 = (input) => {
|
||||
if (typeof input === "string") {
|
||||
return enc.encode(input);
|
||||
}
|
||||
|
||||
if (isTypedArray(input)) {
|
||||
return new Uint8Array(
|
||||
TypedArrayPrototypeGetBuffer(input),
|
||||
TypedArrayPrototypeGetByteOffset(input),
|
||||
TypedArrayPrototypeGetByteLength(input),
|
||||
);
|
||||
} else if (isDataView(input)) {
|
||||
return new Uint8Array(
|
||||
DataViewPrototypeGetBuffer(input),
|
||||
DataViewPrototypeGetByteOffset(input),
|
||||
DataViewPrototypeGetByteLength(input),
|
||||
);
|
||||
}
|
||||
|
||||
return input;
|
||||
};
|
||||
|
||||
export function crc32(data, value = 0) {
|
||||
if (typeof data !== "string" && !isArrayBufferView(data)) {
|
||||
throw new ERR_INVALID_ARG_TYPE("data", [
|
||||
"Buffer",
|
||||
"TypedArray",
|
||||
"DataView",
|
||||
"string",
|
||||
], data);
|
||||
}
|
||||
validateUint32(value, "value");
|
||||
|
||||
return op_zlib_crc32(toU8(data), value);
|
||||
}
|
||||
|
||||
export class Options {
|
||||
constructor() {
|
||||
|
@ -87,6 +139,7 @@ export default {
|
|||
BrotliOptions,
|
||||
codes,
|
||||
constants,
|
||||
crc32,
|
||||
createBrotliCompress,
|
||||
createBrotliDecompress,
|
||||
createDeflate,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_tls"
|
||||
version = "0.163.0"
|
||||
version = "0.164.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_url"
|
||||
version = "0.176.0"
|
||||
version = "0.177.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue