1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge remote-tracking branch 'upstream/main' into check-workspace-member-compiler-options

This commit is contained in:
Nayeem Rahman 2025-01-06 13:26:51 +00:00
commit f61286bf4b
80 changed files with 1603 additions and 996 deletions

39
Cargo.lock generated
View file

@ -1355,7 +1355,7 @@ dependencies = [
"typed-arena",
"uuid",
"walkdir",
"which 4.4.2",
"which",
"winapi",
"winres",
"zeromq",
@ -1749,9 +1749,9 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.86.6"
version = "0.86.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83af194ca492ea7b624d21055f933676d3f3d27586de93be31c8f1babcc73510"
checksum = "ace3acf321fac446636ae605b01723f2120b40ab3d32c6836aeb7d603a8e08f9"
dependencies = [
"anyhow",
"async-trait",
@ -1904,9 +1904,9 @@ dependencies = [
[[package]]
name = "deno_media_type"
version = "0.2.2"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaa135b8a9febc9a51c16258e294e268a1276750780d69e46edb31cced2826e4"
checksum = "a417f8bd3f1074185c4c8ccb6ea6261ae173781596cc358e68ad07aaac11009d"
dependencies = [
"data-url",
"serde",
@ -1995,7 +1995,6 @@ dependencies = [
"faster-hex",
"h2 0.4.4",
"hkdf",
"home",
"http 1.1.0",
"http-body-util",
"hyper 1.4.1",
@ -2178,7 +2177,7 @@ dependencies = [
"percent-encoding",
"serde",
"thiserror 2.0.3",
"which 4.4.2",
"which",
"winapi",
]
@ -2267,7 +2266,7 @@ dependencies = [
"tokio-metrics",
"twox-hash",
"uuid",
"which 4.4.2",
"which",
"winapi",
"windows-sys 0.59.0",
]
@ -4518,12 +4517,12 @@ dependencies = [
[[package]]
name = "junction"
version = "0.2.0"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be39922b087cecaba4e2d5592dedfc8bda5d4a5a1231f143337cca207950b61d"
checksum = "72bbdfd737a243da3dfc1f99ee8d6e166480f17ab4ac84d7c34aacd73fc7bd16"
dependencies = [
"scopeguard",
"winapi",
"windows-sys 0.52.0",
]
[[package]]
@ -7679,9 +7678,9 @@ dependencies = [
[[package]]
name = "sys_traits"
version = "0.1.4"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6683465f4e1d8fd75069cbc36c646258c05b7d8d6676bcb5d71968b99b7d5ae2"
checksum = "b1c12873696bde6de3aea3cd27de8e52897177c5b368a6a30987fd4926e30f85"
dependencies = [
"filetime",
"getrandom",
@ -8431,7 +8430,7 @@ dependencies = [
"miniz_oxide",
"once_cell",
"paste",
"which 6.0.1",
"which",
]
[[package]]
@ -8741,18 +8740,6 @@ dependencies = [
"web-sys",
]
[[package]]
name = "which"
version = "4.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
dependencies = [
"either",
"home",
"once_cell",
"rustix",
]
[[package]]
name = "which"
version = "6.0.1"

View file

@ -54,7 +54,7 @@ deno_bench_util = { version = "0.178.0", path = "./bench_util" }
# TODO(nayeemrmn): Use proper version when https://github.com/denoland/deno_config/pull/143 lands!
deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "4cbb63704442a7834dc6bed2e7e310a0d46ade09", features = ["workspace", "sync"] }
deno_lockfile = "=0.24.0"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_media_type = { version = "0.2.3", features = ["module_specifier"] }
deno_npm = "=0.27.0"
deno_path_util = "=0.3.0"
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" }
@ -194,7 +194,7 @@ slab = "0.4"
smallvec = "1.8"
socket2 = { version = "0.5.3", features = ["all"] }
spki = "0.7.2"
sys_traits = "=0.1.4"
sys_traits = "=0.1.6"
tar = "=0.4.40"
tempfile = "3.4.0"
termcolor = "1.1.3"
@ -213,7 +213,7 @@ url = { version = "2.5", features = ["serde", "expose_internals"] }
uuid = { version = "1.3.0", features = ["v4"] }
webpki-root-certs = "0.26.5"
webpki-roots = "0.26"
which = "4.2.5"
which = "6"
yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4"
@ -241,7 +241,7 @@ syn = { version = "2", features = ["full", "extra-traits"] }
nix = "=0.27.1"
# windows deps
junction = "=0.2.0"
junction = "=1.2.0"
winapi = "=0.3.9"
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] }
winres = "=0.1.12"

View file

@ -74,7 +74,7 @@ deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.86.6" }
deno_graph = { version = "=0.86.7" }
deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm.workspace = true

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::env;
use std::ffi::OsString;
@ -34,7 +33,6 @@ use deno_core::url::Url;
use deno_graph::GraphKind;
use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_permissions::SysDescriptor;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
@ -44,8 +42,6 @@ use serde::Deserialize;
use serde::Serialize;
use super::flags_net;
use super::jsr_url;
use crate::args::resolve_no_prompt;
use crate::util::fs::canonicalize_path;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
@ -692,97 +688,6 @@ impl PermissionFlags {
|| self.deny_write.is_some()
|| self.allow_import.is_some()
}
pub fn to_options(&self, cli_arg_urls: &[Cow<Url>]) -> PermissionsOptions {
fn handle_allow<T: Default>(
allow_all: bool,
value: Option<T>,
) -> Option<T> {
if allow_all {
assert!(value.is_none());
Some(T::default())
} else {
value
}
}
fn handle_imports(
cli_arg_urls: &[Cow<Url>],
imports: Option<Vec<String>>,
) -> Option<Vec<String>> {
if imports.is_some() {
return imports;
}
let builtin_allowed_import_hosts = [
"jsr.io:443",
"deno.land:443",
"esm.sh:443",
"cdn.jsdelivr.net:443",
"raw.githubusercontent.com:443",
"gist.githubusercontent.com:443",
];
let mut imports =
Vec::with_capacity(builtin_allowed_import_hosts.len() + 1);
imports
.extend(builtin_allowed_import_hosts.iter().map(|s| s.to_string()));
// also add the JSR_URL env var
if let Some(jsr_host) = allow_import_host_from_url(jsr_url()) {
imports.push(jsr_host);
}
// include the cli arg urls
for url in cli_arg_urls {
if let Some(host) = allow_import_host_from_url(url) {
imports.push(host);
}
}
Some(imports)
}
PermissionsOptions {
allow_all: self.allow_all,
allow_env: handle_allow(self.allow_all, self.allow_env.clone()),
deny_env: self.deny_env.clone(),
allow_net: handle_allow(self.allow_all, self.allow_net.clone()),
deny_net: self.deny_net.clone(),
allow_ffi: handle_allow(self.allow_all, self.allow_ffi.clone()),
deny_ffi: self.deny_ffi.clone(),
allow_read: handle_allow(self.allow_all, self.allow_read.clone()),
deny_read: self.deny_read.clone(),
allow_run: handle_allow(self.allow_all, self.allow_run.clone()),
deny_run: self.deny_run.clone(),
allow_sys: handle_allow(self.allow_all, self.allow_sys.clone()),
deny_sys: self.deny_sys.clone(),
allow_write: handle_allow(self.allow_all, self.allow_write.clone()),
deny_write: self.deny_write.clone(),
allow_import: handle_imports(
cli_arg_urls,
handle_allow(self.allow_all, self.allow_import.clone()),
),
prompt: !resolve_no_prompt(self),
}
}
}
/// Gets the --allow-import host from the provided url
fn allow_import_host_from_url(url: &Url) -> Option<String> {
let host = url.host()?;
if let Some(port) = url.port() {
Some(format!("{}:{}", host, port))
} else {
use deno_core::url::Host::*;
match host {
Domain(domain) if domain == "jsr.io" && url.scheme() == "https" => None,
_ => match url.scheme() {
"https" => Some(format!("{}:443", host)),
"http" => Some(format!("{}:80", host)),
_ => None,
},
}
}
}
fn join_paths(allowlist: &[String], d: &str) -> String {
@ -11557,8 +11462,6 @@ mod tests {
..Default::default()
}
);
// just make sure this doesn't panic
let _ = flags.permissions.to_options(&[]);
}
#[test]
@ -11634,29 +11537,6 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
)
}
#[test]
fn test_allow_import_host_from_url() {
fn parse(text: &str) -> Option<String> {
allow_import_host_from_url(&Url::parse(text).unwrap())
}
assert_eq!(parse("https://jsr.io"), None);
assert_eq!(
parse("http://127.0.0.1:4250"),
Some("127.0.0.1:4250".to_string())
);
assert_eq!(parse("http://jsr.io"), Some("jsr.io:80".to_string()));
assert_eq!(
parse("https://example.com"),
Some("example.com:443".to_string())
);
assert_eq!(
parse("http://example.com"),
Some("example.com:80".to_string())
);
assert_eq!(parse("file:///example.com"), None);
}
#[test]
fn allow_all_conflicts_allow_perms() {
let flags = [

View file

@ -1564,20 +1564,100 @@ impl CliOptions {
self.flags.no_npm
}
pub fn permission_flags(&self) -> &PermissionFlags {
&self.flags.permissions
}
pub fn permissions_options(&self) -> PermissionsOptions {
fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
files
.iter()
.filter_map(|f| Url::parse(f).ok().map(Cow::Owned))
.collect()
// bury this in here to ensure people use cli_options.permissions_options()
fn flags_to_options(flags: &PermissionFlags) -> PermissionsOptions {
fn handle_allow<T: Default>(
allow_all: bool,
value: Option<T>,
) -> Option<T> {
if allow_all {
assert!(value.is_none());
Some(T::default())
} else {
value
}
}
PermissionsOptions {
allow_all: flags.allow_all,
allow_env: handle_allow(flags.allow_all, flags.allow_env.clone()),
deny_env: flags.deny_env.clone(),
allow_net: handle_allow(flags.allow_all, flags.allow_net.clone()),
deny_net: flags.deny_net.clone(),
allow_ffi: handle_allow(flags.allow_all, flags.allow_ffi.clone()),
deny_ffi: flags.deny_ffi.clone(),
allow_read: handle_allow(flags.allow_all, flags.allow_read.clone()),
deny_read: flags.deny_read.clone(),
allow_run: handle_allow(flags.allow_all, flags.allow_run.clone()),
deny_run: flags.deny_run.clone(),
allow_sys: handle_allow(flags.allow_all, flags.allow_sys.clone()),
deny_sys: flags.deny_sys.clone(),
allow_write: handle_allow(flags.allow_all, flags.allow_write.clone()),
deny_write: flags.deny_write.clone(),
allow_import: handle_allow(flags.allow_all, flags.allow_import.clone()),
prompt: !resolve_no_prompt(flags),
}
}
// get a list of urls to imply for --allow-import
let cli_arg_urls = self
let mut permissions_options = flags_to_options(&self.flags.permissions);
self.augment_import_permissions(&mut permissions_options);
permissions_options
}
fn augment_import_permissions(&self, options: &mut PermissionsOptions) {
// do not add if the user specified --allow-all or --allow-import
if !options.allow_all && options.allow_import.is_none() {
options.allow_import = Some(self.implicit_allow_import());
}
}
fn implicit_allow_import(&self) -> Vec<String> {
// allow importing from anywhere when using cached only
if self.cache_setting() == CacheSetting::Only {
vec![] // allow all imports
} else {
// implicitly allow some trusted hosts and the CLI arg urls
let cli_arg_urls = self.get_cli_arg_urls();
let builtin_allowed_import_hosts = [
"jsr.io:443",
"deno.land:443",
"esm.sh:443",
"cdn.jsdelivr.net:443",
"raw.githubusercontent.com:443",
"gist.githubusercontent.com:443",
];
let mut imports = Vec::with_capacity(
builtin_allowed_import_hosts.len() + cli_arg_urls.len() + 1,
);
imports
.extend(builtin_allowed_import_hosts.iter().map(|s| s.to_string()));
// also add the JSR_URL env var
if let Some(jsr_host) = allow_import_host_from_url(jsr_url()) {
if jsr_host != "jsr.io:443" {
imports.push(jsr_host);
}
}
// include the cli arg urls
for url in cli_arg_urls {
if let Some(host) = allow_import_host_from_url(&url) {
imports.push(host);
}
}
imports
}
}
fn get_cli_arg_urls(&self) -> Vec<Cow<'_, Url>> {
fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
files.iter().filter_map(|f| file_to_url(f)).collect()
}
fn file_to_url(file: &str) -> Option<Cow<'_, Url>> {
Url::parse(file).ok().map(Cow::Owned)
}
self
.resolve_main_module()
.ok()
.map(|url| vec![Cow::Borrowed(url)])
@ -1589,18 +1669,18 @@ impl CliOptions {
Some(files_to_urls(&check_flags.files))
}
DenoSubcommand::Install(InstallFlags::Global(flags)) => {
Url::parse(&flags.module_url)
.ok()
.map(|url| vec![Cow::Owned(url)])
file_to_url(&flags.module_url).map(|url| vec![url])
}
DenoSubcommand::Doc(DocFlags {
source_files: DocSourceFileFlag::Paths(paths),
..
}) => Some(files_to_urls(paths)),
DenoSubcommand::Info(InfoFlags {
file: Some(file), ..
}) => file_to_url(file).map(|url| vec![url]),
_ => None,
})
.unwrap_or_default();
self.flags.permissions.to_options(&cli_arg_urls)
.unwrap_or_default()
}
pub fn reload_flag(&self) -> bool {
@ -2036,6 +2116,20 @@ fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
}
}
/// Gets the --allow-import host from the provided url
fn allow_import_host_from_url(url: &Url) -> Option<String> {
let host = url.host()?;
if let Some(port) = url.port() {
Some(format!("{}:{}", host, port))
} else {
match url.scheme() {
"https" => Some(format!("{}:443", host)),
"http" => Some(format!("{}:80", host)),
_ => None,
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum NpmCachingStrategy {
Eager,
@ -2043,7 +2137,7 @@ pub enum NpmCachingStrategy {
Manual,
}
pub(crate) fn otel_runtime_config() -> OtelRuntimeConfig {
pub fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
@ -2140,4 +2234,26 @@ mod test {
let reg_api_url = jsr_api_url();
assert!(reg_api_url.as_str().ends_with('/'));
}
#[test]
fn test_allow_import_host_from_url() {
fn parse(text: &str) -> Option<String> {
allow_import_host_from_url(&Url::parse(text).unwrap())
}
assert_eq!(
parse("http://127.0.0.1:4250"),
Some("127.0.0.1:4250".to_string())
);
assert_eq!(parse("http://jsr.io"), Some("jsr.io:80".to_string()));
assert_eq!(
parse("https://example.com"),
Some("example.com:443".to_string())
);
assert_eq!(
parse("http://example.com"),
Some("example.com:80".to_string())
);
assert_eq!(parse("file:///example.com"), None);
}
}

View file

@ -26,51 +26,55 @@ fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str {
"SyntaxError"
}
fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError;
pub fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str {
match err {
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
get_resolution_error_class(err)
}
ModuleGraphError::ModuleError(err) => match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
"NotFound"
}
ModuleError::LoadingErr(_, _, err) => match err {
ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()),
ModuleLoadError::HttpsChecksumIntegrity(_)
| ModuleLoadError::TooManyRedirects => "Error",
ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound",
ModuleLoadError::Decode(_) => "TypeError",
ModuleLoadError::Npm(err) => match err {
NpmLoadError::NotSupportedEnvironment
| NpmLoadError::PackageReqResolution(_)
| NpmLoadError::RegistryInfo(_) => "Error",
NpmLoadError::PackageReqReferenceParse(_) => "TypeError",
},
ModuleLoadError::Jsr(err) => match err {
JsrLoadError::UnsupportedManifestChecksum
| JsrLoadError::PackageFormat(_) => "TypeError",
JsrLoadError::ContentLoadExternalSpecifier
| JsrLoadError::ContentLoad(_)
| JsrLoadError::ContentChecksumIntegrity(_)
| JsrLoadError::PackageManifestLoad(_, _)
| JsrLoadError::PackageVersionManifestChecksumIntegrity(..)
| JsrLoadError::PackageVersionManifestLoad(_, _)
| JsrLoadError::RedirectInPackage(_) => "Error",
JsrLoadError::PackageNotFound(_)
| JsrLoadError::PackageReqNotFound(_)
| JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound",
},
ModuleGraphError::ModuleError(err) => get_module_error_class(err),
}
}
pub fn get_module_error_class(err: &ModuleError) -> &'static str {
use deno_graph::JsrLoadError;
use deno_graph::NpmLoadError;
match err {
ModuleError::InvalidTypeAssertion { .. } => "SyntaxError",
ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic),
ModuleError::WasmParseErr(..) => "SyntaxError",
ModuleError::UnsupportedMediaType { .. }
| ModuleError::UnsupportedImportAttributeType { .. } => "TypeError",
ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => {
"NotFound"
}
ModuleError::LoadingErr(_, _, err) => match err {
ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()),
ModuleLoadError::HttpsChecksumIntegrity(_)
| ModuleLoadError::TooManyRedirects => "Error",
ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound",
ModuleLoadError::Decode(_) => "TypeError",
ModuleLoadError::Npm(err) => match err {
NpmLoadError::NotSupportedEnvironment
| NpmLoadError::PackageReqResolution(_)
| NpmLoadError::RegistryInfo(_) => "Error",
NpmLoadError::PackageReqReferenceParse(_) => "TypeError",
},
ModuleLoadError::Jsr(err) => match err {
JsrLoadError::UnsupportedManifestChecksum
| JsrLoadError::PackageFormat(_) => "TypeError",
JsrLoadError::ContentLoadExternalSpecifier
| JsrLoadError::ContentLoad(_)
| JsrLoadError::ContentChecksumIntegrity(_)
| JsrLoadError::PackageManifestLoad(_, _)
| JsrLoadError::PackageVersionManifestChecksumIntegrity(..)
| JsrLoadError::PackageVersionManifestLoad(_, _)
| JsrLoadError::RedirectInPackage(_) => "Error",
JsrLoadError::PackageNotFound(_)
| JsrLoadError::PackageReqNotFound(_)
| JsrLoadError::PackageVersionNotFound(_)
| JsrLoadError::UnknownExport { .. } => "NotFound",
},
},
}

View file

@ -81,6 +81,8 @@ use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::npm::NpmRegistryReadPermissionCheckerMode;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolver;
use crate::resolver::CliNpmReqResolver;
@ -764,6 +766,7 @@ impl CliFactory {
self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
self.sys(),
)))
})
.await
@ -954,6 +957,19 @@ impl CliFactory {
let cjs_tracker = self.cjs_tracker()?.clone();
let pkg_json_resolver = self.pkg_json_resolver().clone();
let npm_req_resolver = self.npm_req_resolver().await?;
let npm_registry_permission_checker = {
let mode = if cli_options.use_byonm() {
NpmRegistryReadPermissionCheckerMode::Byonm
} else if let Some(node_modules_dir) = cli_options.node_modules_dir_path()
{
NpmRegistryReadPermissionCheckerMode::Local(node_modules_dir.clone())
} else {
NpmRegistryReadPermissionCheckerMode::Global(
self.npm_cache_dir()?.root_dir().to_path_buf(),
)
};
Arc::new(NpmRegistryReadPermissionChecker::new(self.sys(), mode))
};
Ok(CliMainWorkerFactory::new(
self.blob_store().clone(),
@ -981,13 +997,14 @@ impl CliFactory {
self.module_load_preparer().await?.clone(),
node_code_translator.clone(),
node_resolver.clone(),
npm_req_resolver.clone(),
cli_npm_resolver.clone(),
NpmModuleLoader::new(
self.cjs_tracker()?.clone(),
fs.clone(),
node_code_translator.clone(),
),
npm_registry_permission_checker,
npm_req_resolver.clone(),
cli_npm_resolver.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.sys(),

View file

@ -48,6 +48,7 @@ use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
use crate::errors::get_module_graph_error_class;
use crate::file_fetcher::CliFileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
@ -161,29 +162,15 @@ pub fn graph_walk_errors<'a>(
roots.contains(error.specifier())
}
};
let mut message = match &error {
ModuleGraphError::ResolutionError(resolution_error) => {
enhanced_resolution_error_message(resolution_error)
}
ModuleGraphError::TypesResolutionError(resolution_error) => {
format!(
"Failed resolving types. {}",
enhanced_resolution_error_message(resolution_error)
)
}
ModuleGraphError::ModuleError(error) => {
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(sys, error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
};
if let Some(range) = error.maybe_range() {
if !is_root && !range.specifier.as_str().contains("/$deno$eval") {
message.push_str("\n at ");
message.push_str(&format_range_with_colors(range));
}
}
let message = enhance_graph_error(
sys,
&error,
if is_root {
EnhanceGraphErrorMode::HideRange
} else {
EnhanceGraphErrorMode::ShowRange
},
);
if graph.graph_kind() == GraphKind::TypesOnly
&& matches!(
@ -195,10 +182,61 @@ pub fn graph_walk_errors<'a>(
return None;
}
Some(custom_error(get_error_class_name(&error.into()), message))
if graph.graph_kind().include_types()
&& (message.contains(RUN_WITH_SLOPPY_IMPORTS_MSG)
|| matches!(
error,
ModuleGraphError::ModuleError(ModuleError::Missing(..))
))
{
// ignore and let typescript surface this as a diagnostic instead
log::debug!("Ignoring: {}", message);
return None;
}
Some(custom_error(get_module_graph_error_class(&error), message))
})
}
#[derive(Debug, PartialEq, Eq)]
pub enum EnhanceGraphErrorMode {
ShowRange,
HideRange,
}
pub fn enhance_graph_error(
sys: &CliSys,
error: &ModuleGraphError,
mode: EnhanceGraphErrorMode,
) -> String {
let mut message = match &error {
ModuleGraphError::ResolutionError(resolution_error) => {
enhanced_resolution_error_message(resolution_error)
}
ModuleGraphError::TypesResolutionError(resolution_error) => {
format!(
"Failed resolving types. {}",
enhanced_resolution_error_message(resolution_error)
)
}
ModuleGraphError::ModuleError(error) => {
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(sys, error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
};
if let Some(range) = error.maybe_range() {
if mode == EnhanceGraphErrorMode::ShowRange
&& !range.specifier.as_str().contains("/$deno$eval")
{
message.push_str("\n at ");
message.push_str(&format_range_with_colors(range));
}
}
message
}
pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
for error in graph.module_errors() {
exit_for_integrity_error(error);
@ -833,6 +871,9 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
message
}
static RUN_WITH_SLOPPY_IMPORTS_MSG: &str =
"or run with --unstable-sloppy-imports";
fn enhanced_sloppy_imports_error_message(
sys: &CliSys,
error: &ModuleError,
@ -840,11 +881,9 @@ fn enhanced_sloppy_imports_error_message(
match error {
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => {
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone()))
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message();
let additional_message = maybe_additional_sloppy_imports_message(sys, specifier)?;
Some(format!(
"{} {} or run with --unstable-sloppy-imports",
"{} {}",
error,
additional_message,
))
@ -853,6 +892,19 @@ fn enhanced_sloppy_imports_error_message(
}
}
pub fn maybe_additional_sloppy_imports_message(
sys: &CliSys,
specifier: &ModuleSpecifier,
) -> Option<String> {
Some(format!(
"{} {}",
CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone()))
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message(),
RUN_WITH_SLOPPY_IMPORTS_MSG
))
}
fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err {
ModuleError::LoadingErr(

View file

@ -64,6 +64,9 @@ pub enum LanguageId {
Markdown,
Html,
Css,
Scss,
Sass,
Less,
Yaml,
Sql,
Svelte,
@ -86,6 +89,9 @@ impl LanguageId {
LanguageId::Markdown => Some("md"),
LanguageId::Html => Some("html"),
LanguageId::Css => Some("css"),
LanguageId::Scss => Some("scss"),
LanguageId::Sass => Some("sass"),
LanguageId::Less => Some("less"),
LanguageId::Yaml => Some("yaml"),
LanguageId::Sql => Some("sql"),
LanguageId::Svelte => Some("svelte"),
@ -107,6 +113,9 @@ impl LanguageId {
LanguageId::Markdown => Some("text/markdown"),
LanguageId::Html => Some("text/html"),
LanguageId::Css => Some("text/css"),
LanguageId::Scss => None,
LanguageId::Sass => None,
LanguageId::Less => None,
LanguageId::Yaml => Some("application/yaml"),
LanguageId::Sql => None,
LanguageId::Svelte => None,
@ -140,6 +149,9 @@ impl FromStr for LanguageId {
"markdown" => Ok(Self::Markdown),
"html" => Ok(Self::Html),
"css" => Ok(Self::Css),
"scss" => Ok(Self::Scss),
"sass" => Ok(Self::Sass),
"less" => Ok(Self::Less),
"yaml" => Ok(Self::Yaml),
"sql" => Ok(Self::Sql),
"svelte" => Ok(Self::Svelte),

View file

@ -36,6 +36,7 @@ use deno_graph::JsModule;
use deno_graph::JsonModule;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::Resolution;
use deno_graph::WasmModule;
use deno_runtime::code_cache;
@ -58,14 +59,18 @@ use crate::cache::CodeCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::errors::get_module_error_class;
use crate::graph_container::MainModuleGraphContainer;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::enhance_graph_error;
use crate::graph_util::CreateGraphOptions;
use crate::graph_util::EnhanceGraphErrorMode;
use crate::graph_util::ModuleGraphBuilder;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::npm::CliNpmResolver;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
@ -222,9 +227,10 @@ struct SharedCliModuleLoaderState {
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: NpmModuleLoader,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
sys: CliSys,
@ -282,9 +288,10 @@ impl CliModuleLoaderFactory {
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: NpmModuleLoader,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
sys: CliSys,
@ -308,9 +315,10 @@ impl CliModuleLoaderFactory {
module_load_preparer,
node_code_translator,
node_resolver,
npm_module_loader,
npm_registry_permission_checker,
npm_req_resolver,
npm_resolver,
npm_module_loader,
parsed_source_cache,
resolver,
sys,
@ -349,7 +357,10 @@ impl CliModuleLoaderFactory {
sys: self.shared.sys.clone(),
graph_container,
in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(),
npm_resolver: self.shared.npm_resolver.clone(),
npm_registry_permission_checker: self
.shared
.npm_registry_permission_checker
.clone(),
});
CreateModuleLoaderResult {
module_loader,
@ -697,7 +708,21 @@ impl<TGraphContainer: ModuleGraphContainer>
unreachable!("Deno bug. {} was misconfigured internally.", specifier);
}
match graph.get(specifier) {
let maybe_module = match graph.try_get(specifier) {
Ok(module) => module,
Err(err) => {
return Err(custom_error(
get_module_error_class(err),
enhance_graph_error(
&self.shared.sys,
&ModuleGraphError::ModuleError(err.clone()),
EnhanceGraphErrorMode::ShowRange,
),
))
}
};
match maybe_module {
Some(deno_graph::Module::Json(JsonModule {
source,
media_type,
@ -1096,7 +1121,7 @@ struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
sys: CliSys,
graph_container: TGraphContainer,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
npm_registry_permission_checker: Arc<NpmRegistryReadPermissionChecker>,
}
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
@ -1113,7 +1138,9 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
return Ok(std::borrow::Cow::Borrowed(path));
}
}
self.npm_resolver.ensure_read_permission(permissions, path)
self
.npm_registry_permission_checker
.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(

View file

@ -1,15 +1,12 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider;
use node_resolver::NpmPackageFolderResolver;
@ -73,21 +70,6 @@ impl CliNpmResolver for CliByonmNpmResolver {
self.root_node_modules_dir()
}
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if !path
.components()
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
{
permissions.check_read_path(path).map_err(Into::into)
} else {
Ok(Cow::Borrowed(path))
}
}
fn check_state_hash(&self) -> Option<u64> {
// it is very difficult to determine the check state hash for byonm
// so we just return None to signify check caching is not supported

View file

@ -24,7 +24,6 @@ use deno_npm_cache::NpmCacheSetting;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::colors;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
@ -167,6 +166,7 @@ fn create_inner(
sys.clone(),
npm_rc.clone(),
));
let fs_resolver = create_npm_fs_resolver(
npm_cache.clone(),
&npm_install_deps_provider,
@ -754,14 +754,6 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.fs_resolver.node_modules_path()
}
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self.fs_resolver.ensure_read_permission(permissions, path)
}
fn check_state_hash(&self) -> Option<u64> {
// We could go further and check all the individual
// npm packages, but that's probably overkill.

View file

@ -3,30 +3,17 @@
pub mod bin_entries;
pub mod lifecycle_scripts;
use std::borrow::Cow;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::StreamExt;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError;
use sys_traits::FsCanonicalize;
use super::super::PackageCaching;
use crate::npm::CliNpmTarballCache;
use crate::sys::CliSys;
/// Part of the resolution that interacts with the file system.
#[async_trait(?Send)]
@ -63,101 +50,4 @@ pub trait NpmPackageFsResolver: Send + Sync {
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError>;
}
#[derive(Debug)]
pub struct RegistryReadPermissionChecker {
sys: CliSys,
cache: Mutex<HashMap<PathBuf, PathBuf>>,
registry_path: PathBuf,
}
impl RegistryReadPermissionChecker {
pub fn new(sys: CliSys, registry_path: PathBuf) -> Self {
Self {
sys,
registry_path,
cache: Default::default(),
}
}
pub fn ensure_registry_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if permissions.query_read_all() {
return Ok(Cow::Borrowed(path)); // skip permissions checks below
}
// allow reading if it's in the node_modules
let is_path_in_node_modules = path.starts_with(&self.registry_path)
&& path
.components()
.all(|c| !matches!(c, std::path::Component::ParentDir));
if is_path_in_node_modules {
let mut cache = self.cache.lock().unwrap();
let mut canonicalize =
|path: &Path| -> Result<Option<PathBuf>, AnyError> {
match cache.get(path) {
Some(canon) => Ok(Some(canon.clone())),
None => match self.sys.fs_canonicalize(path) {
Ok(canon) => {
cache.insert(path.to_path_buf(), canon.clone());
Ok(Some(canon))
}
Err(e) => {
if e.kind() == ErrorKind::NotFound {
return Ok(None);
}
Err(AnyError::from(e)).with_context(|| {
format!("failed canonicalizing '{}'", path.display())
})
}
},
}
};
if let Some(registry_path_canon) = canonicalize(&self.registry_path)? {
if let Some(path_canon) = canonicalize(path)? {
if path_canon.starts_with(registry_path_canon) {
return Ok(Cow::Owned(path_canon));
}
} else if path.starts_with(registry_path_canon)
|| path.starts_with(&self.registry_path)
{
return Ok(Cow::Borrowed(path));
}
}
}
permissions.check_read_path(path).map_err(Into::into)
}
}
/// Caches all the packages in parallel.
pub async fn cache_packages(
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), AnyError> {
let mut futures_unordered = futures::stream::FuturesUnordered::new();
for package in packages {
futures_unordered.push(async move {
tarball_cache
.ensure_package(&package.id.nv, &package.dist)
.await
});
}
while let Some(result) = futures_unordered.next().await {
// surface the first error
result?;
}
Ok(())
}

View file

@ -10,27 +10,25 @@ use std::sync::Arc;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use super::super::resolution::NpmResolution;
use super::common::cache_packages;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::sys::CliSys;
/// Resolves packages from the global npm cache.
#[derive(Debug)]
@ -39,7 +37,6 @@ pub struct GlobalNpmPackageResolver {
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker,
lifecycle_scripts: LifecycleScriptsConfig,
}
@ -48,15 +45,10 @@ impl GlobalNpmPackageResolver {
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
sys: CliSys,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
) -> Self {
Self {
registry_read_permission_checker: RegistryReadPermissionChecker::new(
sys,
cache.root_dir_path().to_path_buf(),
),
cache,
tarball_cache,
resolution,
@ -186,16 +178,25 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
Ok(())
}
}
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self
.registry_read_permission_checker
.ensure_registry_read_permission(permissions, path)
async fn cache_packages(
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), AnyError> {
let mut futures_unordered = FuturesUnordered::new();
for package in packages {
futures_unordered.push(async move {
tarball_cache
.ensure_package(&package.id.nv, &package.dist)
.await
});
}
while let Some(result) = futures_unordered.next().await {
// surface the first error
result?;
}
Ok(())
}
struct GlobalLifecycleScripts<'a> {

View file

@ -33,7 +33,6 @@ use deno_npm::NpmSystemInfo;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
use deno_runtime::deno_node::NodePermissions;
use deno_semver::package::PackageNv;
use deno_semver::StackString;
use node_resolver::errors::PackageFolderResolveError;
@ -47,7 +46,6 @@ use sys_traits::FsMetadata;
use super::super::resolution::NpmResolution;
use super::common::bin_entries;
use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::cache::CACHE_PERM;
@ -75,7 +73,6 @@ pub struct LocalNpmPackageResolver {
root_node_modules_path: PathBuf,
root_node_modules_url: Url,
system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker,
lifecycle_scripts: LifecycleScriptsConfig,
}
@ -98,10 +95,6 @@ impl LocalNpmPackageResolver {
progress_bar,
resolution,
tarball_cache,
registry_read_permission_checker: RegistryReadPermissionChecker::new(
sys.clone(),
node_modules_folder.clone(),
),
sys,
root_node_modules_url: Url::from_directory_path(&node_modules_folder)
.unwrap(),
@ -275,16 +268,6 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
)
.await
}
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self
.registry_read_permission_checker
.ensure_registry_read_permission(permissions, path)
}
}
/// `node_modules/.deno/<package>/node_modules/<package_name>`
@ -450,7 +433,11 @@ async fn sync_resolution_with_fs(
deno_core::unsync::spawn_blocking({
let package_path = package_path.clone();
move || {
clone_dir_recursive(&cache_folder, &package_path)?;
clone_dir_recursive(
&crate::sys::CliSys::default(),
&cache_folder,
&package_path,
)?;
// write out a file that indicates this folder has been initialized
fs::write(initialized_file, tags)?;
@ -507,7 +494,11 @@ async fn sync_resolution_with_fs(
&package.id.nv.name,
);
clone_dir_recursive(&source_path, &package_path)?;
clone_dir_recursive(
&crate::sys::CliSys::default(),
&source_path,
&package_path,
)?;
// write out a file that indicates this folder has been initialized
fs::write(initialized_file, "")?;
}
@ -1074,7 +1065,8 @@ fn symlink_package_dir(
}
#[cfg(not(windows))]
{
symlink_dir(&old_path_relative, new_path).map_err(Into::into)
symlink_dir(&crate::sys::CliSys::default(), &old_path_relative, new_path)
.map_err(Into::into)
}
}
@ -1096,7 +1088,8 @@ fn junction_or_symlink_dir(
.context("Failed creating junction in node_modules folder");
}
match symlink_dir(old_path_relative, new_path) {
match symlink_dir(&crate::sys::CliSys::default(), old_path_relative, new_path)
{
Ok(()) => Ok(()),
Err(symlink_err)
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>

View file

@ -48,7 +48,6 @@ pub fn create_npm_fs_resolver(
npm_cache,
tarball_cache,
resolution,
sys,
system_info,
lifecycle_scripts,
)),

View file

@ -2,8 +2,8 @@
mod byonm;
mod managed;
mod permission_checker;
use std::borrow::Cow;
use std::path::Path;
use std::sync::Arc;
@ -17,7 +17,6 @@ use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::CliNpmReqResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
@ -34,6 +33,8 @@ pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver;
pub use self::managed::PackageCaching;
pub use self::permission_checker::NpmRegistryReadPermissionChecker;
pub use self::permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::sys::CliSys;
@ -183,12 +184,6 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
fn root_node_modules_path(&self) -> Option<&Path>;
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError>;
/// Returns a hash returning the state of the npm resolver
/// or `None` if the state currently can't be determined.
fn check_state_hash(&self) -> Option<u64>;

View file

@ -0,0 +1,105 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_runtime::deno_node::NodePermissions;
use sys_traits::FsCanonicalize;
use crate::sys::CliSys;
#[derive(Debug)]
pub enum NpmRegistryReadPermissionCheckerMode {
Byonm,
Global(PathBuf),
Local(PathBuf),
}
#[derive(Debug)]
pub struct NpmRegistryReadPermissionChecker {
sys: CliSys,
cache: Mutex<HashMap<PathBuf, PathBuf>>,
mode: NpmRegistryReadPermissionCheckerMode,
}
impl NpmRegistryReadPermissionChecker {
pub fn new(sys: CliSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self {
Self {
sys,
cache: Default::default(),
mode,
}
}
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
pub fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if permissions.query_read_all() {
return Ok(Cow::Borrowed(path)); // skip permissions checks below
}
match &self.mode {
NpmRegistryReadPermissionCheckerMode::Byonm => {
if path.components().any(|c| c.as_os_str() == "node_modules") {
Ok(Cow::Borrowed(path))
} else {
permissions.check_read_path(path).map_err(Into::into)
}
}
NpmRegistryReadPermissionCheckerMode::Global(registry_path)
| NpmRegistryReadPermissionCheckerMode::Local(registry_path) => {
// allow reading if it's in the node_modules
let is_path_in_node_modules = path.starts_with(registry_path)
&& path
.components()
.all(|c| !matches!(c, std::path::Component::ParentDir));
if is_path_in_node_modules {
let mut cache = self.cache.lock();
let mut canonicalize =
|path: &Path| -> Result<Option<PathBuf>, AnyError> {
match cache.get(path) {
Some(canon) => Ok(Some(canon.clone())),
None => match self.sys.fs_canonicalize(path) {
Ok(canon) => {
cache.insert(path.to_path_buf(), canon.clone());
Ok(Some(canon))
}
Err(e) => {
if e.kind() == ErrorKind::NotFound {
return Ok(None);
}
Err(AnyError::from(e)).with_context(|| {
format!("failed canonicalizing '{}'", path.display())
})
}
},
}
};
if let Some(registry_path_canon) = canonicalize(registry_path)? {
if let Some(path_canon) = canonicalize(path)? {
if path_canon.starts_with(registry_path_canon) {
return Ok(Cow::Owned(path_canon));
}
} else if path.starts_with(registry_path_canon)
|| path.starts_with(registry_path)
{
return Ok(Cow::Borrowed(path));
}
}
}
permissions.check_read_path(path).map_err(Into::into)
}
}
}
}

View file

@ -51,6 +51,7 @@ use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
@ -188,7 +189,7 @@ pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionFlags,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<Level>,
@ -793,7 +794,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
seed: self.cli_options.seed(),
code_cache_key,
location: self.cli_options.location_flag().clone(),
permissions: self.cli_options.permission_flags().clone(),
permissions: self.cli_options.permissions_options(),
v8_flags: self.cli_options.v8_flags().clone(),
unsafely_ignore_certificate_errors: self
.cli_options

View file

@ -23,6 +23,7 @@ use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsMetadataValue;
use sys_traits::boxed::FsMetadataBoxed;
use sys_traits::boxed::FsReadDirBoxed;
use sys_traits::FsCopy;
use sys_traits::FsMetadata;
use super::virtual_fs::FileBackedVfs;
@ -47,24 +48,32 @@ impl DenoCompileFileSystem {
}
}
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
fn copy_to_real_path(
&self,
oldpath: &Path,
newpath: &Path,
) -> std::io::Result<u64> {
let old_file = self.0.file_entry(oldpath)?;
let old_file_bytes =
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
RealFs.write_file_sync(
newpath,
OpenOptions {
read: false,
write: true,
create: true,
truncate: true,
append: false,
create_new: false,
mode: None,
},
None,
&old_file_bytes,
)
let len = old_file_bytes.len() as u64;
RealFs
.write_file_sync(
newpath,
OpenOptions {
read: false,
write: true,
create: true,
truncate: true,
append: false,
create_new: false,
mode: None,
},
None,
&old_file_bytes,
)
.map_err(|err| err.into_io_error())?;
Ok(len)
}
}
@ -191,7 +200,10 @@ impl FileSystem for DenoCompileFileSystem {
fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(newpath)?;
if self.0.is_path_within(oldpath) {
self.copy_to_real_path(oldpath, newpath)
self
.copy_to_real_path(oldpath, newpath)
.map(|_| ())
.map_err(FsError::Io)
} else {
RealFs.copy_file_sync(oldpath, newpath)
}
@ -206,6 +218,8 @@ impl FileSystem for DenoCompileFileSystem {
let fs = self.clone();
tokio::task::spawn_blocking(move || {
fs.copy_to_real_path(&oldpath, &newpath)
.map(|_| ())
.map_err(FsError::Io)
})
.await?
} else {
@ -593,6 +607,32 @@ impl sys_traits::BaseFsMetadata for DenoCompileFileSystem {
}
}
impl sys_traits::BaseFsCopy for DenoCompileFileSystem {
#[inline]
fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result<u64> {
self
.error_if_in_vfs(to)
.map_err(|err| err.into_io_error())?;
if self.0.is_path_within(from) {
self.copy_to_real_path(from, to)
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_copy(from, to)
}
}
}
impl sys_traits::BaseFsCloneFile for DenoCompileFileSystem {
fn base_fs_clone_file(
&self,
_from: &Path,
_to: &Path,
) -> std::io::Result<()> {
// will cause a fallback in the code that uses this
Err(not_supported("cloning files"))
}
}
impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem {
#[inline]
fn base_fs_create_dir(
@ -794,6 +834,14 @@ impl sys_traits::BaseFsOpen for DenoCompileFileSystem {
}
}
impl sys_traits::BaseFsSymlinkDir for DenoCompileFileSystem {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
self
.symlink_sync(src, dst, Some(FsFileType::Directory))
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::SystemRandom for DenoCompileFileSystem {
#[inline]
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {

View file

@ -6,6 +6,7 @@
#![allow(unused_imports)]
use std::borrow::Cow;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
@ -88,6 +89,8 @@ use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::NpmRegistryReadPermissionChecker;
use crate::npm::NpmRegistryReadPermissionCheckerMode;
use crate::resolver::CjsTracker;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::NpmModuleLoader;
@ -123,6 +126,7 @@ struct SharedModuleLoaderState {
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: Arc<NpmModuleLoader>,
npm_registry_permission_checker: NpmRegistryReadPermissionChecker,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
source_maps: SourceMapStore,
@ -557,7 +561,7 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
self
.shared
.npm_resolver
.npm_registry_permission_checker
.ensure_read_permission(permissions, path)
}
@ -662,6 +666,23 @@ pub async fn run(
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let cache_setting = CacheSetting::Only;
let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone()));
let npm_registry_permission_checker = {
let mode = match &metadata.node_modules {
Some(binary::NodeModules::Managed {
node_modules_dir: Some(path),
}) => NpmRegistryReadPermissionCheckerMode::Local(PathBuf::from(path)),
Some(binary::NodeModules::Byonm { .. }) => {
NpmRegistryReadPermissionCheckerMode::Byonm
}
Some(binary::NodeModules::Managed {
node_modules_dir: None,
})
| None => NpmRegistryReadPermissionCheckerMode::Global(
npm_global_cache_dir.clone(),
),
};
NpmRegistryReadPermissionChecker::new(sys.clone(), mode)
};
let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules {
Some(binary::NodeModules::Managed { node_modules_dir }) => {
// create an npmrc that uses the fake npm_registry_url to resolve packages
@ -889,6 +910,7 @@ pub async fn run(
fs.clone(),
node_code_translator,
)),
npm_registry_permission_checker,
npm_resolver: npm_resolver.clone(),
npm_req_resolver,
source_maps,
@ -898,8 +920,7 @@ pub async fn run(
};
let permissions = {
let mut permissions =
metadata.permissions.to_options(/* cli_arg_urls */ &[]);
let mut permissions = metadata.permissions;
// grant read access to the vfs
match &mut permissions.allow_read {
Some(vec) if vec.is_empty() => {

View file

@ -1685,6 +1685,7 @@ mod test {
temp_dir.write("src/a.txt", "data");
temp_dir.write("src/b.txt", "data");
util::fs::symlink_dir(
&crate::sys::CliSys::default(),
temp_dir_path.join("src/nested/sub_dir").as_path(),
temp_dir_path.join("src/sub_dir_link").as_path(),
)

View file

@ -7,6 +7,8 @@
// denort or the deno binary. We should extract out denort to a separate binary.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsFile;
@ -35,12 +37,35 @@ impl Default for CliSys {
impl deno_runtime::deno_node::ExtNodeSys for CliSys {}
impl sys_traits::BaseFsCloneFile for CliSys {
fn base_fs_clone_file(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_clone_file(src, dst),
Self::DenoCompile(sys) => sys.base_fs_clone_file(src, dst),
}
}
}
impl sys_traits::BaseFsSymlinkDir for CliSys {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_symlink_dir(src, dst),
Self::DenoCompile(sys) => sys.base_fs_symlink_dir(src, dst),
}
}
}
impl sys_traits::BaseFsCopy for CliSys {
fn base_fs_copy(&self, src: &Path, dst: &Path) -> std::io::Result<u64> {
match self {
Self::Real(sys) => sys.base_fs_copy(src, dst),
Self::DenoCompile(sys) => sys.base_fs_copy(src, dst),
}
}
}
impl sys_traits::BaseFsHardLink for CliSys {
fn base_fs_hard_link(
&self,
src: &std::path::Path,
dst: &std::path::Path,
) -> std::io::Result<()> {
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_hard_link(src, dst),
Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst),
@ -49,10 +74,7 @@ impl sys_traits::BaseFsHardLink for CliSys {
}
impl sys_traits::BaseFsRead for CliSys {
fn base_fs_read(
&self,
p: &std::path::Path,
) -> std::io::Result<Cow<'static, [u8]>> {
fn base_fs_read(&self, p: &Path) -> std::io::Result<Cow<'static, [u8]>> {
match self {
Self::Real(sys) => sys.base_fs_read(p),
Self::DenoCompile(sys) => sys.base_fs_read(p),
@ -65,7 +87,7 @@ impl sys_traits::BaseFsReadDir for CliSys {
fn base_fs_read_dir(
&self,
p: &std::path::Path,
p: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
> {
@ -77,10 +99,7 @@ impl sys_traits::BaseFsReadDir for CliSys {
}
impl sys_traits::BaseFsCanonicalize for CliSys {
fn base_fs_canonicalize(
&self,
p: &std::path::Path,
) -> std::io::Result<std::path::PathBuf> {
fn base_fs_canonicalize(&self, p: &Path) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.base_fs_canonicalize(p),
Self::DenoCompile(sys) => sys.base_fs_canonicalize(p),
@ -91,10 +110,7 @@ impl sys_traits::BaseFsCanonicalize for CliSys {
impl sys_traits::BaseFsMetadata for CliSys {
type Metadata = BoxedFsMetadataValue;
fn base_fs_metadata(
&self,
path: &std::path::Path,
) -> std::io::Result<Self::Metadata> {
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_metadata_boxed(path),
Self::DenoCompile(sys) => sys.fs_metadata_boxed(path),
@ -103,7 +119,7 @@ impl sys_traits::BaseFsMetadata for CliSys {
fn base_fs_symlink_metadata(
&self,
path: &std::path::Path,
path: &Path,
) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_symlink_metadata_boxed(path),
@ -115,7 +131,7 @@ impl sys_traits::BaseFsMetadata for CliSys {
impl sys_traits::BaseFsCreateDir for CliSys {
fn base_fs_create_dir(
&self,
p: &std::path::Path,
p: &Path,
options: &CreateDirOptions,
) -> std::io::Result<()> {
match self {
@ -130,7 +146,7 @@ impl sys_traits::BaseFsOpen for CliSys {
fn base_fs_open(
&self,
path: &std::path::Path,
path: &Path,
options: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
match self {
@ -141,7 +157,7 @@ impl sys_traits::BaseFsOpen for CliSys {
}
impl sys_traits::BaseFsRemoveFile for CliSys {
fn base_fs_remove_file(&self, p: &std::path::Path) -> std::io::Result<()> {
fn base_fs_remove_file(&self, p: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_remove_file(p),
Self::DenoCompile(sys) => sys.base_fs_remove_file(p),
@ -150,11 +166,7 @@ impl sys_traits::BaseFsRemoveFile for CliSys {
}
impl sys_traits::BaseFsRename for CliSys {
fn base_fs_rename(
&self,
old: &std::path::Path,
new: &std::path::Path,
) -> std::io::Result<()> {
fn base_fs_rename(&self, old: &Path, new: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_rename(old, new),
Self::DenoCompile(sys) => sys.base_fs_rename(old, new),
@ -190,7 +202,7 @@ impl sys_traits::ThreadSleep for CliSys {
}
impl sys_traits::EnvCurrentDir for CliSys {
fn env_current_dir(&self) -> std::io::Result<std::path::PathBuf> {
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.env_current_dir(),
Self::DenoCompile(sys) => sys.env_current_dir(),
@ -211,7 +223,7 @@ impl sys_traits::BaseEnvVar for CliSys {
}
impl sys_traits::EnvHomeDir for CliSys {
fn env_home_dir(&self) -> Option<std::path::PathBuf> {
fn env_home_dir(&self) -> Option<PathBuf> {
#[allow(clippy::disallowed_types)] // ok because sys impl
sys_traits::impls::RealSys.env_home_dir()
}

View file

@ -11,7 +11,9 @@ use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleLoadError;
use deno_terminal::colors;
use once_cell::sync::Lazy;
use regex::Regex;
@ -31,6 +33,7 @@ use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::factory::SpecifierInfo;
use crate::factory::WorkspaceFilesFactory;
use crate::graph_util::maybe_additional_sloppy_imports_message;
use crate::graph_util::BuildFastCheckGraphOptions;
use crate::graph_util::ModuleGraphBuilder;
use crate::node::CliNodeResolver;
@ -133,6 +136,7 @@ pub struct TypeChecker {
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
}
impl TypeChecker {
@ -143,6 +147,7 @@ impl TypeChecker {
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
sys: CliSys,
) -> Self {
Self {
caches,
@ -151,6 +156,7 @@ impl TypeChecker {
module_graph_builder,
node_resolver,
npm_resolver,
sys,
}
}
@ -205,58 +211,8 @@ impl TypeChecker {
let type_check_mode = options.type_check_mode;
let ts_config = ts_config_result.ts_config;
let maybe_check_hash = match self.npm_resolver.check_state_hash() {
Some(npm_check_hash) => {
match get_check_hash(
&graph,
npm_check_hash,
type_check_mode,
&ts_config,
) {
CheckHashResult::NoFiles => {
return Ok((graph.into(), Default::default()))
}
CheckHashResult::Hash(hash) => Some(hash),
}
}
None => None, // we can't determine a check hash
};
// do not type check if we know this is type checked
let cache = TypeCheckCache::new(self.caches.type_checking_cache_db());
if !options.reload {
if let Some(check_hash) = maybe_check_hash {
if cache.has_check_hash(check_hash) {
log::debug!("Already type checked.");
return Ok((graph.into(), Default::default()));
}
}
}
for root in &graph.roots {
let root_str = root.as_str();
log::info!(
"{} {}",
colors::green("Check"),
to_percent_decoded_str(root_str)
);
}
let check_js = ts_config.get_check_js();
// while there might be multiple roots, we can't "merge" the build info, so we
// try to retrieve the build info for first root, which is the most common use
// case.
let maybe_tsbuildinfo = if options.reload {
None
} else {
cache.get_tsbuildinfo(&graph.roots[0])
};
// to make tsc build info work, we need to consistently hash modules, so that
// tsc can better determine if an emit is still valid or not, so we provide
// that data here.
let hash_data = FastInsecureHasher::new_deno_versioned()
.write(&ts_config.as_bytes())
.finish();
// add fast check to the graph before getting the roots
if options.build_fast_check_graph {
@ -268,24 +224,7 @@ impl TypeChecker {
)?;
}
let root_names = get_tsc_roots(&graph, check_js);
let graph = Arc::new(graph);
let response = tsc::exec(tsc::Request {
config: ts_config,
debug: self.cli_options.log_level() == Some(log::Level::Debug),
graph: graph.clone(),
hash_data,
maybe_npm: Some(tsc::RequestNpmState {
cjs_tracker: self.cjs_tracker.clone(),
node_resolver: self.node_resolver.clone(),
npm_resolver: self.npm_resolver.clone(),
}),
maybe_tsbuildinfo,
root_names,
check_mode: type_check_mode,
})?;
let mut diagnostics = response.diagnostics.filter(|d| {
let is_visible_diagnostic = |d: &tsc::Diagnostic| {
if self.is_remote_diagnostic(d) {
return type_check_mode == TypeCheckMode::All
&& d.include_when_remote()
@ -314,7 +253,79 @@ impl TypeChecker {
.iter()
.rfind(|s| specifier.as_str().starts_with(s.as_str()));
scope == scope_options.scope.as_ref()
});
};
let TscRoots {
roots: root_names,
missing_diagnostics,
maybe_check_hash,
} = get_tsc_roots(
&self.sys,
&graph,
check_js,
self.npm_resolver.check_state_hash(),
type_check_mode,
&ts_config,
);
let missing_diagnostics = missing_diagnostics.filter(is_visible_diagnostic);
if root_names.is_empty() && missing_diagnostics.is_empty() {
return Ok((graph.into(), Default::default()));
}
if !options.reload {
// do not type check if we know this is type checked
if let Some(check_hash) = maybe_check_hash {
if cache.has_check_hash(check_hash) {
log::debug!("Already type checked.");
return Ok((graph.into(), Default::default()));
}
}
}
for root in &graph.roots {
let root_str = root.as_str();
log::info!(
"{} {}",
colors::green("Check"),
to_percent_decoded_str(root_str)
);
}
// while there might be multiple roots, we can't "merge" the build info, so we
// try to retrieve the build info for first root, which is the most common use
// case.
let maybe_tsbuildinfo = if options.reload {
None
} else {
cache.get_tsbuildinfo(&graph.roots[0])
};
// to make tsc build info work, we need to consistently hash modules, so that
// tsc can better determine if an emit is still valid or not, so we provide
// that data here.
let tsconfig_hash_data = FastInsecureHasher::new_deno_versioned()
.write(&ts_config.as_bytes())
.finish();
let graph = Arc::new(graph);
let response = tsc::exec(tsc::Request {
config: ts_config,
debug: self.cli_options.log_level() == Some(log::Level::Debug),
graph: graph.clone(),
hash_data: tsconfig_hash_data,
maybe_npm: Some(tsc::RequestNpmState {
cjs_tracker: self.cjs_tracker.clone(),
node_resolver: self.node_resolver.clone(),
npm_resolver: self.npm_resolver.clone(),
}),
maybe_tsbuildinfo,
root_names,
check_mode: type_check_mode,
})?;
let response_diagnostics =
response.diagnostics.filter(is_visible_diagnostic);
let mut diagnostics = missing_diagnostics;
diagnostics.extend(response_diagnostics);
diagnostics.apply_fast_check_source_maps(&graph);
@ -348,108 +359,10 @@ impl TypeChecker {
}
}
enum CheckHashResult {
Hash(CacheDBHash),
NoFiles,
}
/// Gets a hash of the inputs for type checking. This can then
/// be used to tell
fn get_check_hash(
graph: &ModuleGraph,
package_reqs_hash: u64,
type_check_mode: TypeCheckMode,
ts_config: &TsConfig,
) -> CheckHashResult {
let mut hasher = FastInsecureHasher::new_deno_versioned();
hasher.write_u8(match type_check_mode {
TypeCheckMode::All => 0,
TypeCheckMode::Local => 1,
TypeCheckMode::None => 2,
});
hasher.write(&ts_config.as_bytes());
let check_js = ts_config.get_check_js();
let mut has_file = false;
let mut has_file_to_type_check = false;
// this iterator of modules is already deterministic, so no need to sort it
for module in graph.modules() {
match module {
Module::Js(module) => {
let ts_check = has_ts_check(module.media_type, &module.source);
if ts_check {
has_file_to_type_check = true;
}
match module.media_type {
MediaType::TypeScript
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Mts
| MediaType::Cts
| MediaType::Tsx => {
has_file = true;
has_file_to_type_check = true;
}
MediaType::JavaScript
| MediaType::Mjs
| MediaType::Cjs
| MediaType::Jsx => {
has_file = true;
if !check_js && !ts_check {
continue;
}
}
MediaType::Json
| MediaType::Css
| MediaType::SourceMap
| MediaType::Wasm
| MediaType::Unknown => continue,
}
hasher.write_str(module.specifier.as_str());
hasher.write_str(
// the fast check module will only be set when publishing
module
.fast_check_module()
.map(|s| s.source.as_ref())
.unwrap_or(&module.source),
);
}
Module::Node(_) => {
// the @types/node package will be in the resolved
// snapshot below so don't bother including it here
}
Module::Npm(_) => {
// don't bother adding this specifier to the hash
// because what matters is the resolved npm snapshot,
// which is hashed below
}
Module::Json(module) => {
has_file_to_type_check = true;
hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source);
}
Module::Wasm(module) => {
has_file_to_type_check = true;
hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source_dts);
}
Module::External(module) => {
hasher.write_str(module.specifier.as_str());
}
}
}
hasher.write_hashable(package_reqs_hash);
if !has_file || !check_js && !has_file_to_type_check {
// no files to type check
CheckHashResult::NoFiles
} else {
CheckHashResult::Hash(CacheDBHash::new(hasher.finish()))
}
struct TscRoots {
roots: Vec<(ModuleSpecifier, MediaType)>,
missing_diagnostics: tsc::Diagnostics,
maybe_check_hash: Option<CacheDBHash>,
}
/// Transform the graph into root specifiers that we can feed `tsc`. We have to
@ -459,52 +372,115 @@ fn get_check_hash(
/// the roots, so they get type checked and optionally emitted,
/// otherwise they would be ignored if only imported into JavaScript.
fn get_tsc_roots(
sys: &CliSys,
graph: &ModuleGraph,
check_js: bool,
) -> Vec<(ModuleSpecifier, MediaType)> {
npm_cache_state_hash: Option<u64>,
type_check_mode: TypeCheckMode,
ts_config: &TsConfig,
) -> TscRoots {
fn maybe_get_check_entry(
module: &deno_graph::Module,
check_js: bool,
hasher: Option<&mut FastInsecureHasher>,
) -> Option<(ModuleSpecifier, MediaType)> {
match module {
Module::Js(module) => match module.media_type {
MediaType::TypeScript
| MediaType::Tsx
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts => {
Some((module.specifier.clone(), module.media_type))
}
MediaType::JavaScript
| MediaType::Mjs
| MediaType::Cjs
| MediaType::Jsx => {
if check_js || has_ts_check(module.media_type, &module.source) {
Module::Js(module) => {
let result = match module.media_type {
MediaType::TypeScript
| MediaType::Tsx
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts => {
Some((module.specifier.clone(), module.media_type))
} else {
None
}
MediaType::JavaScript
| MediaType::Mjs
| MediaType::Cjs
| MediaType::Jsx => {
if check_js || has_ts_check(module.media_type, &module.source) {
Some((module.specifier.clone(), module.media_type))
} else {
None
}
}
MediaType::Json
| MediaType::Wasm
| MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => None,
};
if result.is_some() {
if let Some(hasher) = hasher {
hasher.write_str(module.specifier.as_str());
hasher.write_str(
// the fast check module will only be set when publishing
module
.fast_check_module()
.map(|s| s.source.as_ref())
.unwrap_or(&module.source),
);
}
}
MediaType::Json
| MediaType::Wasm
| MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => None,
},
Module::Wasm(module) => Some((module.specifier.clone(), MediaType::Dmts)),
Module::External(_)
| Module::Node(_)
| Module::Npm(_)
| Module::Json(_) => None,
result
}
Module::Node(_) => {
// the @types/node package will be in the resolved
// snapshot so don't bother including it in the hash
None
}
Module::Npm(_) => {
// don't bother adding this specifier to the hash
// because what matters is the resolved npm snapshot,
// which is hashed below
None
}
Module::Json(module) => {
if let Some(hasher) = hasher {
hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source);
}
None
}
Module::Wasm(module) => {
if let Some(hasher) = hasher {
hasher.write_str(module.specifier.as_str());
hasher.write_str(&module.source_dts);
}
Some((module.specifier.clone(), MediaType::Dmts))
}
Module::External(module) => {
if let Some(hasher) = hasher {
hasher.write_str(module.specifier.as_str());
}
None
}
}
}
let mut result = Vec::with_capacity(graph.specifiers_count());
let mut result = TscRoots {
roots: Vec::with_capacity(graph.specifiers_count()),
missing_diagnostics: Default::default(),
maybe_check_hash: None,
};
let mut maybe_hasher = npm_cache_state_hash.map(|npm_cache_state_hash| {
let mut hasher = FastInsecureHasher::new_deno_versioned();
hasher.write_hashable(npm_cache_state_hash);
hasher.write_u8(match type_check_mode {
TypeCheckMode::All => 0,
TypeCheckMode::Local => 1,
TypeCheckMode::None => 2,
});
hasher.write_hashable(graph.has_node_specifier);
hasher.write(&ts_config.as_bytes());
hasher
});
if graph.has_node_specifier {
// inject a specifier that will resolve node types
result.push((
result.roots.push((
ModuleSpecifier::parse("asset:///node_types.d.ts").unwrap(),
MediaType::Dts,
));
@ -515,65 +491,134 @@ fn get_tsc_roots(
let mut pending = VecDeque::new();
// put in the global types first so that they're resolved before anything else
for import in graph.imports.values() {
for dep in import.dependencies.values() {
let specifier = dep.get_type().or_else(|| dep.get_code());
if let Some(specifier) = &specifier {
let specifier = graph.resolve(specifier);
if seen.insert(specifier.clone()) {
pending.push_back(specifier);
}
}
let get_import_specifiers = || {
graph
.imports
.values()
.flat_map(|i| i.dependencies.values())
.filter_map(|dep| dep.get_type().or_else(|| dep.get_code()))
};
for specifier in get_import_specifiers() {
let specifier = graph.resolve(specifier);
if seen.insert(specifier) {
pending.push_back((specifier, false));
}
}
// then the roots
for root in &graph.roots {
let specifier = graph.resolve(root);
if seen.insert(specifier.clone()) {
pending.push_back(specifier);
if seen.insert(specifier) {
pending.push_back((specifier, false));
}
}
// now walk the graph that only includes the fast check dependencies
while let Some(specifier) = pending.pop_front() {
let Some(module) = graph.get(specifier) else {
continue;
while let Some((specifier, is_dynamic)) = pending.pop_front() {
let module = match graph.try_get(specifier) {
Ok(Some(module)) => module,
Ok(None) => continue,
Err(ModuleError::Missing(specifier, maybe_range)) => {
if !is_dynamic {
result
.missing_diagnostics
.push(tsc::Diagnostic::from_missing_error(
specifier,
maybe_range.as_ref(),
maybe_additional_sloppy_imports_message(sys, specifier),
));
}
continue;
}
Err(ModuleError::LoadingErr(
specifier,
maybe_range,
ModuleLoadError::Loader(_),
)) => {
// these will be errors like attempting to load a directory
if !is_dynamic {
result
.missing_diagnostics
.push(tsc::Diagnostic::from_missing_error(
specifier,
maybe_range.as_ref(),
maybe_additional_sloppy_imports_message(sys, specifier),
));
}
continue;
}
Err(_) => continue,
};
if let Some(entry) = maybe_get_check_entry(module, check_js) {
result.push(entry);
if is_dynamic && !seen.insert(specifier) {
continue;
}
if let Some(module) = module.js() {
let deps = module.dependencies_prefer_fast_check();
if let Some(entry) =
maybe_get_check_entry(module, check_js, maybe_hasher.as_mut())
{
result.roots.push(entry);
}
let mut maybe_module_dependencies = None;
let mut maybe_types_dependency = None;
if let Module::Js(module) = module {
maybe_module_dependencies = Some(module.dependencies_prefer_fast_check());
maybe_types_dependency = module
.maybe_types_dependency
.as_ref()
.and_then(|d| d.dependency.ok());
} else if let Module::Wasm(module) = module {
maybe_module_dependencies = Some(&module.dependencies);
}
fn handle_specifier<'a>(
graph: &'a ModuleGraph,
seen: &mut HashSet<&'a ModuleSpecifier>,
pending: &mut VecDeque<(&'a ModuleSpecifier, bool)>,
specifier: &'a ModuleSpecifier,
is_dynamic: bool,
) {
let specifier = graph.resolve(specifier);
if is_dynamic {
if !seen.contains(specifier) {
pending.push_back((specifier, true));
}
} else if seen.insert(specifier) {
pending.push_back((specifier, false));
}
}
if let Some(deps) = maybe_module_dependencies {
for dep in deps.values() {
// walk both the code and type dependencies
if let Some(specifier) = dep.get_code() {
let specifier = graph.resolve(specifier);
if seen.insert(specifier.clone()) {
pending.push_back(specifier);
}
handle_specifier(
graph,
&mut seen,
&mut pending,
specifier,
dep.is_dynamic,
);
}
if let Some(specifier) = dep.get_type() {
let specifier = graph.resolve(specifier);
if seen.insert(specifier.clone()) {
pending.push_back(specifier);
}
}
}
if let Some(dep) = module
.maybe_types_dependency
.as_ref()
.and_then(|d| d.dependency.ok())
{
let specifier = graph.resolve(&dep.specifier);
if seen.insert(specifier.clone()) {
pending.push_back(specifier);
handle_specifier(
graph,
&mut seen,
&mut pending,
specifier,
dep.is_dynamic,
);
}
}
}
if let Some(dep) = maybe_types_dependency {
handle_specifier(graph, &mut seen, &mut pending, &dep.specifier, false);
}
}
result.maybe_check_hash =
maybe_hasher.map(|hasher| CacheDBHash::new(hasher.finish()));
result
}

View file

@ -409,9 +409,20 @@ delete Object.prototype.__proto__;
messageText = formatMessage(msgText, ri.code);
}
if (start !== undefined && length !== undefined && file) {
const startPos = file.getLineAndCharacterOfPosition(start);
const sourceLine = file.getFullText().split("\n")[startPos.line];
const fileName = file.fileName;
let startPos = file.getLineAndCharacterOfPosition(start);
let sourceLine = file.getFullText().split("\n")[startPos.line];
const originalFileName = file.fileName;
const fileName = ops.op_remap_specifier
? (ops.op_remap_specifier(file.fileName) ?? file.fileName)
: file.fileName;
// Bit of a hack to detect when we have a .wasm file and want to hide
// the .d.ts text. This is not perfect, but will work in most scenarios
if (
fileName.endsWith(".wasm") && originalFileName.endsWith(".wasm.d.mts")
) {
startPos = { line: 0, character: 0 };
sourceLine = undefined;
}
return {
start: startPos,
end: file.getLineAndCharacterOfPosition(start + length),
@ -475,6 +486,9 @@ delete Object.prototype.__proto__;
2792,
// TS2307: Cannot find module '{0}' or its corresponding type declarations.
2307,
// Relative import errors to add an extension
2834,
2835,
// TS5009: Cannot find the common subdirectory path for the input files.
5009,
// TS5055: Cannot write file
@ -1037,24 +1051,27 @@ delete Object.prototype.__proto__;
configFileParsingDiagnostics,
});
const checkFiles = localOnly
? rootNames
.filter((n) => !n.startsWith("http"))
.map((checkName) => {
const sourceFile = program.getSourceFile(checkName);
if (sourceFile == null) {
throw new Error("Could not find source file for: " + checkName);
}
return sourceFile;
})
: undefined;
let checkFiles = undefined;
if (localOnly) {
const checkFileNames = new Set();
checkFiles = [];
for (const checkName of rootNames) {
if (checkName.startsWith("http")) {
continue;
}
const sourceFile = program.getSourceFile(checkName);
if (sourceFile != null) {
checkFiles.push(sourceFile);
}
checkFileNames.add(checkName);
}
if (checkFiles != null) {
// When calling program.getSemanticDiagnostics(...) with a source file, we
// need to call this code first in order to get it to invalidate cached
// diagnostics correctly. This is what program.getSemanticDiagnostics()
// does internally when calling without any arguments.
const checkFileNames = new Set(checkFiles.map((f) => f.fileName));
while (
program.getSemanticDiagnosticsOfNextAffectedFile(
undefined,

View file

@ -90,9 +90,9 @@ impl DiagnosticMessageChain {
s.push_str(&" ".repeat(level * 2));
s.push_str(&self.message_text);
if let Some(next) = &self.next {
s.push('\n');
let arr = next.clone();
for dm in arr {
s.push('\n');
s.push_str(&dm.format_message(level + 1));
}
}
@ -110,6 +110,15 @@ pub struct Position {
pub character: u64,
}
impl Position {
pub fn from_deno_graph(deno_graph_position: deno_graph::Position) -> Self {
Self {
line: deno_graph_position.line as u64,
character: deno_graph_position.character as u64,
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Diagnostic {
@ -142,6 +151,38 @@ pub struct Diagnostic {
}
impl Diagnostic {
pub fn from_missing_error(
specifier: &ModuleSpecifier,
maybe_range: Option<&deno_graph::Range>,
additional_message: Option<String>,
) -> Self {
Self {
category: DiagnosticCategory::Error,
code: 2307,
start: maybe_range.map(|r| Position::from_deno_graph(r.range.start)),
end: maybe_range.map(|r| Position::from_deno_graph(r.range.end)),
original_source_start: None, // will be applied later
message_text: Some(format!(
"Cannot find module '{}'.{}{}",
specifier,
if additional_message.is_none() {
""
} else {
" "
},
additional_message.unwrap_or_default()
)),
message_chain: None,
source: None,
source_line: None,
file_name: maybe_range.map(|r| r.specifier.to_string()),
related_information: None,
reports_deprecated: None,
reports_unnecessary: None,
other: Default::default(),
}
}
/// If this diagnostic should be included when it comes from a remote module.
pub fn include_when_remote(&self) -> bool {
/// TS6133: value is declared but its value is never read (noUnusedParameters and noUnusedLocals)
@ -299,6 +340,14 @@ impl Diagnostics {
});
}
pub fn push(&mut self, diagnostic: Diagnostic) {
self.0.push(diagnostic);
}
pub fn extend(&mut self, diagnostic: Diagnostics) {
self.0.extend(diagnostic.0);
}
/// Return a set of diagnostics where only the values where the predicate
/// returns `true` are included.
pub fn filter<P>(self, predicate: P) -> Self

View file

@ -293,7 +293,8 @@ declare namespace Deno {
* executions. Each element in the array represents the number of milliseconds
* to wait before retrying the execution. For example, `[1000, 5000, 10000]`
* means that a failed execution will be retried at most 3 times, with 1
* second, 5 seconds, and 10 seconds delay between each retry.
* second, 5 seconds, and 10 seconds delay between each retry. There is a
* limit of 5 retries and a maximum interval of 1 hour (3600000 milliseconds).
*
* @category Cloud
* @experimental

View file

@ -129,6 +129,7 @@ fn get_asset_texts_from_new_runtime() -> Result<Vec<AssetText>, AnyError> {
op_emit,
op_is_node_file,
op_load,
op_remap_specifier,
op_resolve,
op_respond,
]
@ -275,30 +276,6 @@ fn hash_url(specifier: &ModuleSpecifier, media_type: MediaType) -> String {
)
}
/// If the provided URLs derivable tsc media type doesn't match the media type,
/// we will add an extension to the output. This is to avoid issues with
/// specifiers that don't have extensions, that tsc refuses to emit because they
/// think a `.js` version exists, when it doesn't.
fn maybe_remap_specifier(
specifier: &ModuleSpecifier,
media_type: MediaType,
) -> Option<String> {
let path = if specifier.scheme() == "file" {
if let Ok(path) = specifier.to_file_path() {
path
} else {
PathBuf::from(specifier.path())
}
} else {
PathBuf::from(specifier.path())
};
if path.extension().is_none() {
Some(format!("{}{}", specifier, media_type.as_ts_extension()))
} else {
None
}
}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct EmittedFile {
pub data: String,
@ -316,7 +293,7 @@ pub fn into_specifier_and_media_type(
(specifier, media_type)
}
None => (
Url::parse("internal:///missing_dependency.d.ts").unwrap(),
Url::parse(MISSING_DEPENDENCY_SPECIFIER).unwrap(),
MediaType::Dts,
),
}
@ -422,6 +399,8 @@ struct State {
maybe_tsbuildinfo: Option<String>,
maybe_response: Option<RespondArgs>,
maybe_npm: Option<RequestNpmState>,
// todo(dsherret): it looks like the remapped_specifiers and
// root_map could be combined... what is the point of the separation?
remapped_specifiers: HashMap<String, ModuleSpecifier>,
root_map: HashMap<String, ModuleSpecifier>,
current_dir: PathBuf,
@ -463,6 +442,16 @@ impl State {
current_dir,
}
}
pub fn maybe_remapped_specifier(
&self,
specifier: &str,
) -> Option<&ModuleSpecifier> {
self
.remapped_specifiers
.get(specifier)
.or_else(|| self.root_map.get(specifier))
}
}
fn normalize_specifier(
@ -607,10 +596,7 @@ fn op_load_inner(
maybe_source.map(Cow::Borrowed)
} else {
let specifier = if let Some(remapped_specifier) =
state.remapped_specifiers.get(load_specifier)
{
remapped_specifier
} else if let Some(remapped_specifier) = state.root_map.get(load_specifier)
state.maybe_remapped_specifier(load_specifier)
{
remapped_specifier
} else {
@ -713,6 +699,18 @@ pub struct ResolveArgs {
pub specifiers: Vec<(bool, String)>,
}
#[op2]
#[string]
fn op_remap_specifier(
state: &mut OpState,
#[string] specifier: &str,
) -> Option<String> {
let state = state.borrow::<State>();
state
.maybe_remapped_specifier(specifier)
.map(|url| url.to_string())
}
#[op2]
#[serde]
fn op_resolve(
@ -732,11 +730,9 @@ fn op_resolve_inner(
let mut resolved: Vec<(String, &'static str)> =
Vec::with_capacity(args.specifiers.len());
let referrer = if let Some(remapped_specifier) =
state.remapped_specifiers.get(&args.base)
state.maybe_remapped_specifier(&args.base)
{
remapped_specifier.clone()
} else if let Some(remapped_base) = state.root_map.get(&args.base) {
remapped_base.clone()
} else {
normalize_specifier(&args.base, &state.current_dir).context(
"Error converting a string module specifier for \"op_resolve\".",
@ -759,8 +755,12 @@ fn op_resolve_inner(
}
let resolved_dep = referrer_module
.and_then(|m| m.js())
.and_then(|m| m.dependencies_prefer_fast_check().get(&specifier))
.and_then(|m| match m {
Module::Js(m) => m.dependencies_prefer_fast_check().get(&specifier),
Module::Json(_) => None,
Module::Wasm(m) => m.dependencies.get(&specifier),
Module::Npm(_) | Module::Node(_) | Module::External(_) => None,
})
.and_then(|d| d.maybe_type.ok().or_else(|| d.maybe_code.ok()));
let resolution_mode = if is_cjs {
ResolutionMode::Require
@ -816,7 +816,7 @@ fn op_resolve_inner(
}
_ => {
if let Some(specifier_str) =
maybe_remap_specifier(&specifier, media_type)
mapped_specifier_for_tsc(&specifier, media_type)
{
state
.remapped_specifiers
@ -840,7 +840,7 @@ fn op_resolve_inner(
MediaType::Dts.as_ts_extension(),
),
};
log::debug!("Resolved {} to {:?}", specifier, result);
log::debug!("Resolved {} from {} to {:?}", specifier, referrer, result);
resolved.push(result);
}
@ -1072,6 +1072,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> {
op_emit,
op_is_node_file,
op_load,
op_remap_specifier,
op_resolve,
op_respond,
],

View file

@ -17,6 +17,9 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier;
use sys_traits::FsCreateDirAll;
use sys_traits::FsDirEntry;
use sys_traits::FsSymlinkDir;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
@ -148,87 +151,74 @@ pub async fn remove_dir_all_if_exists(path: &Path) -> std::io::Result<()> {
}
}
mod clone_dir_imp {
#[cfg(target_vendor = "apple")]
mod apple {
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use deno_core::error::AnyError;
use super::super::copy_dir_recursive;
fn clonefile(from: &Path, to: &Path) -> std::io::Result<()> {
let from = std::ffi::CString::new(from.as_os_str().as_bytes())?;
let to = std::ffi::CString::new(to.as_os_str().as_bytes())?;
// SAFETY: `from` and `to` are valid C strings.
let ret = unsafe { libc::clonefile(from.as_ptr(), to.as_ptr(), 0) };
if ret != 0 {
return Err(std::io::Error::last_os_error());
}
Ok(())
}
pub fn clone_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
if let Some(parent) = to.parent() {
std::fs::create_dir_all(parent)?;
}
// Try to clone the whole directory
if let Err(err) = clonefile(from, to) {
if err.kind() != std::io::ErrorKind::AlreadyExists {
log::warn!(
"Failed to clone dir {:?} to {:?} via clonefile: {}",
from,
to,
err
);
}
// clonefile won't overwrite existing files, so if the dir exists
// we need to handle it recursively.
copy_dir_recursive(from, to)?;
}
Ok(())
}
}
#[cfg(target_vendor = "apple")]
pub(super) use apple::clone_dir_recursive;
#[cfg(not(target_vendor = "apple"))]
pub(super) fn clone_dir_recursive(
from: &std::path::Path,
to: &std::path::Path,
) -> Result<(), deno_core::error::AnyError> {
use crate::sys::CliSys;
if let Err(e) =
deno_npm_cache::hard_link_dir_recursive(&CliSys::default(), from, to)
{
log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e);
super::copy_dir_recursive(from, to)?;
}
Ok(())
}
}
/// Clones a directory to another directory. The exact method
/// is not guaranteed - it may be a hardlink, copy, or other platform-specific
/// operation.
///
/// Note: Does not handle symlinks.
pub fn clone_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
clone_dir_imp::clone_dir_recursive(from, to)
pub fn clone_dir_recursive<
TSys: sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir
+ sys_traits::FsRemoveFile
+ sys_traits::ThreadSleep,
>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
if cfg!(target_vendor = "apple") {
if let Some(parent) = to.parent() {
sys.fs_create_dir_all(parent)?;
}
// Try to clone the whole directory
if let Err(err) = sys.fs_clone_file(from, to) {
if !matches!(
err.kind(),
std::io::ErrorKind::AlreadyExists | std::io::ErrorKind::Unsupported
) {
log::warn!(
"Failed to clone dir {:?} to {:?} via clonefile: {}",
from,
to,
err
);
}
// clonefile won't overwrite existing files, so if the dir exists
// we need to handle it recursively.
copy_dir_recursive(sys, from, to)?;
}
} else if let Err(e) = deno_npm_cache::hard_link_dir_recursive(sys, from, to)
{
log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e);
copy_dir_recursive(sys, from, to)?;
}
Ok(())
}
/// Copies a directory to another directory.
///
/// Note: Does not handle symlinks.
pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
std::fs::create_dir_all(to)
pub fn copy_dir_recursive<
TSys: sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir,
>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
sys
.fs_create_dir_all(to)
.with_context(|| format!("Creating {}", to.display()))?;
let read_dir = std::fs::read_dir(from)
let read_dir = sys
.fs_read_dir(from)
.with_context(|| format!("Reading {}", from.display()))?;
for entry in read_dir {
@ -238,11 +228,11 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
copy_dir_recursive(&new_from, &new_to).with_context(|| {
copy_dir_recursive(sys, &new_from, &new_to).with_context(|| {
format!("Dir {} to {}", new_from.display(), new_to.display())
})?;
} else if file_type.is_file() {
std::fs::copy(&new_from, &new_to).with_context(|| {
sys.fs_copy(&new_from, &new_to).with_context(|| {
format!("Copying {} to {}", new_from.display(), new_to.display())
})?;
}
@ -251,7 +241,11 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
Ok(())
}
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
pub fn symlink_dir<TSys: sys_traits::BaseFsSymlinkDir>(
sys: &TSys,
oldpath: &Path,
newpath: &Path,
) -> Result<(), Error> {
let err_mapper = |err: Error, kind: Option<ErrorKind>| {
Error::new(
kind.unwrap_or_else(|| err.kind()),
@ -263,26 +257,18 @@ pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
),
)
};
#[cfg(unix)]
{
use std::os::unix::fs::symlink;
symlink(oldpath, newpath).map_err(|e| err_mapper(e, None))?;
}
#[cfg(not(unix))]
{
use std::os::windows::fs::symlink_dir;
symlink_dir(oldpath, newpath).map_err(|err| {
if let Some(code) = err.raw_os_error() {
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION
{
return err_mapper(err, Some(ErrorKind::PermissionDenied));
}
sys.fs_symlink_dir(oldpath, newpath).map_err(|err| {
#[cfg(windows)]
if let Some(code) = err.raw_os_error() {
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION
{
return err_mapper(err, Some(ErrorKind::PermissionDenied));
}
err_mapper(err, None)
})?;
}
Ok(())
}
err_mapper(err, None)
})
}
/// Gets the total size (in bytes) of a directory.

View file

@ -370,7 +370,25 @@ class InnerRequest {
return null;
}
this.#streamRid = op_http_read_request_body(this.#external);
this.#body = new InnerBody(readableStreamForRid(this.#streamRid, false));
this.#body = new InnerBody(
readableStreamForRid(
this.#streamRid,
false,
undefined,
(controller, error) => {
if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) {
// TODO(kt3k): We would like to pass `error` as `cause` when BadResource supports it.
controller.error(
new error.constructor(
`Cannot read request body as underlying resource unavailable`,
),
);
} else {
controller.error(error);
}
},
),
);
return this.#body;
}

View file

@ -77,7 +77,9 @@ const maxQueueBackoffInterval = 60 * 60 * 1000;
function validateBackoffSchedule(backoffSchedule: number[]) {
if (backoffSchedule.length > maxQueueBackoffIntervals) {
throw new TypeError("Invalid backoffSchedule");
throw new TypeError(
`Invalid backoffSchedule, max ${maxQueueBackoffIntervals} intervals allowed`,
);
}
for (let i = 0; i < backoffSchedule.length; ++i) {
const interval = backoffSchedule[i];
@ -85,7 +87,9 @@ function validateBackoffSchedule(backoffSchedule: number[]) {
interval < 0 || interval > maxQueueBackoffInterval ||
NumberIsNaN(interval)
) {
throw new TypeError("Invalid backoffSchedule");
throw new TypeError(
`Invalid backoffSchedule, interval at index ${i} is invalid`,
);
}
}
}

View file

@ -49,7 +49,6 @@ errno = "0.2.8"
faster-hex.workspace = true
h2.workspace = true
hkdf.workspace = true
home = "0.5.9"
http.workspace = true
http-body-util.workspace = true
hyper.workspace = true
@ -93,7 +92,7 @@ simd-json = "0.14.0"
sm3 = "0.4.2"
spki.workspace = true
stable_deref_trait = "1.2.0"
sys_traits = { workspace = true, features = ["real"] }
sys_traits = { workspace = true, features = ["real", "winapi", "libc"] }
thiserror.workspace = true
tokio.workspace = true
tokio-eld = "0.2"

View file

@ -4,6 +4,7 @@ use std::mem::MaybeUninit;
use deno_core::op2;
use deno_core::OpState;
use sys_traits::EnvHomeDir;
use crate::NodePermissions;
@ -282,5 +283,9 @@ where
permissions.check_sys("homedir", "node:os.homedir()")?;
}
Ok(home::home_dir().map(|path| path.to_string_lossy().to_string()))
Ok(
sys_traits::impls::RealSys
.env_home_dir()
.map(|path| path.to_string_lossy().to_string()),
)
}

View file

@ -908,7 +908,7 @@ const _original = Symbol("[[original]]");
* @param {boolean=} autoClose If the resource should be auto-closed when the stream closes. Defaults to true.
* @returns {ReadableStream<Uint8Array>}
*/
function readableStreamForRid(rid, autoClose = true, Super) {
function readableStreamForRid(rid, autoClose = true, Super, onError) {
const stream = new (Super ?? ReadableStream)(_brand);
stream[_resourceBacking] = { rid, autoClose };
@ -947,7 +947,11 @@ function readableStreamForRid(rid, autoClose = true, Super) {
controller.byobRequest.respond(bytesRead);
}
} catch (e) {
controller.error(e);
if (onError) {
onError(controller, e);
} else {
controller.error(e);
}
tryClose();
}
},

View file

@ -1510,11 +1510,10 @@ impl AllowRunDescriptor {
match which::which_in(text, std::env::var_os("PATH"), cwd) {
Ok(path) => path,
Err(err) => match err {
which::Error::BadAbsolutePath | which::Error::BadRelativePath => {
which::Error::CannotGetCurrentDirAndPathListEmpty => {
return Err(err);
}
which::Error::CannotFindBinaryPath
| which::Error::CannotGetCurrentDir
| which::Error::CannotCanonicalize => {
return Ok(AllowRunDescriptorParseResult::Unresolved(Box::new(err)))
}

View file

@ -66,12 +66,11 @@ fn fast_check_cache() {
// ensure cache works
let output = check_debug_cmd.run();
assert_contains!(output.combined_output(), "Already type checked.");
let building_fast_check_msg = "Building fast check graph";
assert_not_contains!(output.combined_output(), building_fast_check_msg);
// now validated
type_check_cache_path.remove_file();
let output = check_debug_cmd.run();
let building_fast_check_msg = "Building fast check graph";
assert_contains!(output.combined_output(), building_fast_check_msg);
assert_contains!(
output.combined_output(),

View file

@ -11889,13 +11889,22 @@ fn lsp_format_html() {
fn lsp_format_css() {
let context = TestContextBuilder::new().use_temp_cwd().build();
let temp_dir = context.temp_dir();
let file = source_file(temp_dir.path().join("file.css"), " foo {}");
let css_file = source_file(temp_dir.path().join("file.css"), " foo {}\n");
let scss_file = source_file(temp_dir.path().join("file.scss"), " $font-stack: Helvetica, sans-serif;\n\nbody {\n font: 100% $font-stack;\n}\n");
let sass_file = source_file(
temp_dir.path().join("file.sass"),
" $font-stack: Helvetica, sans-serif\n\nbody\n font: 100% $font-stack\n",
);
let less_file = source_file(
temp_dir.path().join("file.less"),
" @width: 10px;\n\n#header {\n width: @width;\n}\n",
);
let mut client = context.new_lsp_command().build();
client.initialize_default();
let res = client.write_request(
"textDocument/formatting",
json!({
"textDocument": { "uri": file.url() },
"textDocument": { "uri": css_file.url() },
"options": {
"tabSize": 2,
"insertSpaces": true,
@ -11912,12 +11921,71 @@ fn lsp_format_css() {
},
"newText": "",
},
]),
);
let res = client.write_request(
"textDocument/formatting",
json!({
"textDocument": { "uri": scss_file.url() },
"options": {
"tabSize": 2,
"insertSpaces": true,
},
}),
);
assert_eq!(
res,
json!([
{
"range": {
"start": { "line": 0, "character": 8 },
"end": { "line": 0, "character": 8 },
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 2 },
},
"newText": "\n",
"newText": "",
},
]),
);
let res = client.write_request(
"textDocument/formatting",
json!({
"textDocument": { "uri": sass_file.url() },
"options": {
"tabSize": 2,
"insertSpaces": true,
},
}),
);
assert_eq!(
res,
json!([
{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 2 },
},
"newText": "",
},
]),
);
let res = client.write_request(
"textDocument/formatting",
json!({
"textDocument": { "uri": less_file.url() },
"options": {
"tabSize": 2,
"insertSpaces": true,
},
}),
);
assert_eq!(
res,
json!([
{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 2 },
},
"newText": "",
},
]),
);

View file

@ -10,6 +10,10 @@
"args": "check not_exists.ts",
"output": "not_exists.out",
"exitCode": 1
}, {
"args": "run --check not_exists.ts",
"output": "not_exists.out",
"exitCode": 1
}, {
"args": "check exists_and_try_uses.ts",
"output": "exists_and_try_uses.out",

View file

@ -1,2 +1,3 @@
error: Module not found "file:///[WILDLINE]/not_exists.css".
Check [WILDLINE]exists.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/not_exists.css'.
at file:///[WILDLINE]/not_exists.ts:1:8

View file

@ -1,2 +1,3 @@
error: Module not found "file:///[WILDLINE]/test".
Check file:///[WILDLINE]/dts_importing_non_existent/index.js
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/test'.
at file:///[WILDLINE]/index.d.ts:1:22

View file

@ -0,0 +1,14 @@
{
"tests": {
"not_all": {
"args": "check --allow-import import_remote.ts",
"output": "[WILDCARD]",
"exitCode": 0
},
"all": {
"args": "check --all --allow-import import_remote.ts",
"output": "check_all.out",
"exitCode": 1
}
}
}

View file

@ -0,0 +1,5 @@
Download http://localhost:4545/check/import_non_existent.ts
Download http://localhost:4545/check/non-existent-module.ts
Check file:///[WILDLINE]/import_remote.ts
error: TS2307 [ERROR]: Cannot find module 'http://localhost:4545/check/non-existent-module.ts'.
at http://localhost:4545/check/import_non_existent.ts:1:22

View file

@ -0,0 +1,3 @@
import { Other } from "http://localhost:4545/check/import_non_existent.ts";
console.log(Other);

View file

@ -0,0 +1,6 @@
// Regression test for https://github.com/denoland/deno/issues/27411.
{
"args": "check --quiet message_chain_formatting.ts",
"output": "message_chain_formatting.out",
"exitCode": 1
}

View file

@ -0,0 +1,10 @@
error: TS2769 [ERROR]: No overload matches this call.
Overload 1 of 3, '(s: string, b: boolean): void', gave the following error.
Argument of type 'number' is not assignable to parameter of type 'boolean'.
Overload 2 of 3, '(ss: string[], b: boolean): void', gave the following error.
Argument of type 'string' is not assignable to parameter of type 'string[]'.
Overload 3 of 3, '(ss: string[], b: Date): void', gave the following error.
Argument of type 'string' is not assignable to parameter of type 'string[]'.
foo("hello", 42);
~~~
at [WILDLINE]/message_chain_formatting.ts:8:1

View file

@ -0,0 +1,8 @@
function foo(s: string, b: boolean): void;
function foo(ss: string[], b: boolean): void;
function foo(ss: string[], b: Date): void;
function foo(sOrSs: string | string[], b: boolean | Date): void {
console.log(sOrSs, b);
}
foo("hello", 42);

View file

@ -0,0 +1,24 @@
{
"tests": {
"check": {
"args": "check --allow-import main.ts",
"output": "main.out",
"exitCode": 1
},
"run": {
"args": "run --check --allow-import main.ts",
"output": "main.out",
"exitCode": 1
},
"missing_local_root": {
"args": "check --allow-import non_existent.ts",
"output": "missing_local_root.out",
"exitCode": 1
},
"missing_remote_root": {
"args": "check --allow-import http://localhost:4545/missing_non_existent.ts",
"output": "missing_remote_root.out",
"exitCode": 1
}
}
}

View file

@ -0,0 +1,9 @@
Download http://localhost:4545/remote.ts
Check file:///[WILDLINE]/module_not_found/main.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/other.js'.
at file:///[WILDLINE]/main.ts:1:22
TS2307 [ERROR]: Cannot find module 'http://localhost:4545/remote.ts'.
at file:///[WILDLINE]/main.ts:2:24
Found 2 errors.

View file

@ -0,0 +1,5 @@
import { Test } from "./other.js";
import { Remote } from "http://localhost:4545/remote.ts";
console.log(new Test());
console.log(new Remote());

View file

@ -0,0 +1,2 @@
Check file:///[WILDLINE]/non_existent.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/non_existent.ts'.

View file

@ -0,0 +1,3 @@
Download http://localhost:4545/missing_non_existent.ts
Check http://localhost:4545/missing_non_existent.ts
error: TS2307 [ERROR]: Cannot find module 'http://localhost:4545/missing_non_existent.ts'.

View file

@ -1,3 +1,4 @@
[# It should be resolving relative the config in sub_dir instead of the cwd]
error: Module not found "file:///[WILDLINE]/sub_dir/a.d.ts".
Check file:///[WILDLINE]/main.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/sub_dir/a.d.ts'.
at file:///[WILDLINE]/sub_dir/deno.json:1:1

View file

@ -1,9 +1,5 @@
{
"steps": [
{
"args": "info preact/debug",
"output": "with_import_map.out",
"exitCode": 0
}
]
"args": "info preact/debug",
"output": "with_import_map.out",
"exitCode": 0
}

View file

@ -0,0 +1,22 @@
{
"tempDir": true,
"tests": {
"no_flag": {
// ensure what we're testing will fail without the flags
"args": "run main.ts",
"output": "fail.out",
"exitCode": 1
},
"with_flags": {
"steps": [{
"args": "cache --allow-import main.ts",
"output": "[WILDLINE]",
"exitCode": 0
}, {
"args": "run --cached-only main.ts",
"output": "success.out",
"exitCode": 0
}]
}
}
}

View file

@ -0,0 +1,3 @@
{
"lock": true
}

View file

@ -0,0 +1,2 @@
error: Requires import access to "localhost:4545", run again with the --allow-import flag
at file:///[WILDLINE]/main.ts:1:8

View file

@ -0,0 +1 @@
import "http://localhost:4545/welcome.ts";

View file

@ -0,0 +1 @@
Welcome to Deno!

View file

@ -0,0 +1,22 @@
{
"envs": {
"JSR_URL": ""
},
"tests": {
"implicit": {
"args": "info http://localhost:4545/welcome.ts",
"output": "success.out",
"exitCode": 0
},
"via_import_not_allowed": {
"args": "info main.ts",
"output": "import_not_allowed.out",
"exitCode": 0
},
"via_import_allowed": {
"args": "info --allow-import main.ts",
"output": "import_allowed.out",
"exitCode": 0
}
}
}

View file

@ -0,0 +1,8 @@
Download http://localhost:4545/welcome.ts
local: [WILDLINE]
type: TypeScript
dependencies: [WILDLINE]
size: [WILDLINE]
file:///[WILDLINE]/main.ts ([WILDLINE])
└── http://localhost:4545/welcome.ts ([WILDLINE]B)

View file

@ -0,0 +1,7 @@
local: [WILDLINE]
type: TypeScript
dependencies: [WILDLINE]
size: [WILDLINE]
file:///[WILDLINE]/allow_import_info/main.ts ([WILDLINE])
└── http://localhost:4545/welcome.ts (not capable, requires --allow-import)

View file

@ -0,0 +1 @@
import "http://localhost:4545/welcome.ts";

View file

@ -0,0 +1,7 @@
Download http://localhost:4545/welcome.ts
local: [WILDLINE]
type: TypeScript
dependencies: [WILDLINE]
size: [WILDLINE]
http://localhost:4545/welcome.ts ([WILDLINE]B)

View file

@ -1,2 +1,3 @@
error: [WILDCARD] Maybe specify path to 'index.ts' file in directory instead or run with --unstable-sloppy-imports
at file:///[WILDCARD]/mod.ts:1:20
Check file:///[WILDLINE]/mod.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/b'. Maybe specify path to 'index.ts' file in directory instead or run with --unstable-sloppy-imports
at file:///[WILDLINE]/mod.ts:1:20

View file

@ -1,2 +1,3 @@
error: Module not found "file:///[WILDCARD]/nonexistent/jsx-runtime".
at file:///[WILDCARD]/jsx_import_source_no_pragma.tsx:1:1
Check file:///[WILDLINE]/jsx_import_source_no_pragma.tsx
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDCARD]/nonexistent/jsx-runtime'.
at file:///[WILDLINE]/jsx_import_source_no_pragma.tsx:1:1

View file

@ -1,2 +1,3 @@
error: Module not found "file:///[WILDCARD]/nonexistent.d.ts".
at file:///[WILDCARD]/reference_types_error.js:1:22
Check file:///[WILDLINE]/reference_types_error.js
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/nonexistent.d.ts'.
at file:///[WILDLINE]/reference_types_error.js:1:22

View file

@ -1,2 +1,3 @@
error: Module not found "file:///[WILDCARD]/nonexistent.d.ts".
at file:///[WILDCARD]/reference_types_error.js:1:22
Check file:///[WILDLINE]/reference_types_error.js
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/nonexistent.d.ts'.
at file:///[WILDLINE]/reference_types_error.js:1:22

View file

@ -1,2 +1,26 @@
error: Module not found "file:///[WILDCARD]/a.js". Maybe change the extension to '.ts' or run with --unstable-sloppy-imports
Check file:///[WILDLINE]/main.ts
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/a.js'. Maybe change the extension to '.ts' or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:1:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/b'. Maybe add a '.js' extension or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:2:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/c'. Maybe add a '.mts' extension or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:3:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/d'. Maybe add a '.mjs' extension or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:4:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/e'. Maybe add a '.tsx' extension or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:5:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/e.js'. Maybe change the extension to '.tsx' or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:6:21
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/f'. Maybe add a '.jsx' extension or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:7:20
TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/dir'. Maybe specify path to 'index.tsx' file in directory instead or run with --unstable-sloppy-imports
at file:///[WILDLINE]/main.ts:8:20
Found 8 errors.

View file

@ -1,5 +1,14 @@
{
"args": "--allow-import main.js",
"output": "main.out",
"exitCode": 1
"tests": {
"run": {
"args": "--allow-import main.js",
"output": "main.out",
"exitCode": 1
},
"check": {
"args": "check --all --allow-import main.js",
"output": "check.out",
"exitCode": 1
}
}
}

View file

@ -0,0 +1,4 @@
Download http://localhost:4545/wasm/math_with_import.wasm
Check file:///[WILDLINE]/main.js
error: TS2307 [ERROR]: Cannot find module 'file:///[WILDLINE]/local_math.ts'.
at http://localhost:4545/wasm/math_with_import.wasm:1:87

View file

@ -1,3 +1,4 @@
// @ts-check
import {
add,
subtract,

View file

@ -1,3 +1,3 @@
Download http://localhost:4545/wasm/math_with_import.wasm
error: Module not found "file:///[WILDLINE]/local_math.ts".
at http://localhost:4545/wasm/math_with_import.wasm:1:8
at http://localhost:4545/wasm/math_with_import.wasm:1:87

View file

@ -1,5 +1,14 @@
{
"args": "--allow-import main.js",
"output": "main.out",
"exitCode": 1
"tests": {
"run": {
"args": "--allow-import main.js",
"output": "main.out",
"exitCode": 1
},
"check": {
"args": "check --all --allow-import main.js",
"output": "check.out",
"exitCode": 1
}
}
}

View file

@ -0,0 +1,9 @@
Download http://localhost:4545/wasm/math_with_import.wasm
Check file:///[WILDLINE]/main.js
error: TS2305 [ERROR]: Module '"file:///[WILDLINE]/local_math.ts"' has no exported member '"add"'.
at http://localhost:4545/wasm/math_with_import.wasm:1:1
TS2305 [ERROR]: Module '"file:///[WILDLINE]/local_math.ts"' has no exported member '"subtract"'.
at http://localhost:4545/wasm/math_with_import.wasm:1:1
Found 2 errors.

View file

@ -1,3 +1,4 @@
// @ts-check
import {
add,
subtract,

View file

@ -0,0 +1,5 @@
import { Test } from "./non-existent-module.ts";
console.log(Test);
export class Other {}

View file

@ -1951,14 +1951,14 @@ dbTest("Invalid backoffSchedule", async (db) => {
await db.enqueue("foo", { backoffSchedule: [1, 1, 1, 1, 1, 1] });
},
TypeError,
"Invalid backoffSchedule",
"Invalid backoffSchedule, max 5 intervals allowed",
);
await assertRejects(
async () => {
await db.enqueue("foo", { backoffSchedule: [3600001] });
},
TypeError,
"Invalid backoffSchedule",
"Invalid backoffSchedule, interval at index 0 is invalid",
);
});

View file

@ -2,7 +2,7 @@
// deno-lint-ignore-file no-console
import { assertMatch, assertRejects } from "@std/assert";
import { assertIsError, assertMatch, assertRejects } from "@std/assert";
import { Buffer, BufReader, BufWriter, type Reader } from "@std/io";
import { TextProtoReader } from "../testdata/run/textproto.ts";
import {
@ -387,7 +387,7 @@ Deno.test(async function httpServerCanResolveHostnames() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (_req) => new Response("ok"),
hostname: "localhost",
port: servePort,
@ -410,7 +410,7 @@ Deno.test(async function httpServerRejectsOnAddrInUse() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (_req) => new Response("ok"),
hostname: "localhost",
port: servePort,
@ -441,7 +441,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerBasic() {
const deferred = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<Deno.NetAddr>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request, { remoteAddr }) => {
// FIXME(bartlomieju):
// make sure that request can be inspected
@ -483,7 +483,7 @@ Deno.test(
const deferred = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers();
const listener = Deno.listen({ port: servePort });
const server = serveHttpOnListener(
await using server = serveHttpOnListener(
listener,
ac.signal,
async (
@ -532,7 +532,7 @@ Deno.test(
headers: { "connection": "close" },
});
const server = serveHttpOnConnection(
await using server = serveHttpOnConnection(
await acceptPromise,
ac.signal,
async (
@ -572,7 +572,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerOnError() {
const { promise, resolve } = Promise.withResolvers<void>();
let requestStash: Request | null;
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request: Request) => {
requestStash = request;
await new Promise((r) => setTimeout(r, 100));
@ -607,7 +607,7 @@ Deno.test(
// deno-lint-ignore no-unused-vars
let requestStash: Request | null;
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request: Request) => {
requestStash = request;
await new Promise((r) => setTimeout(r, 100));
@ -640,7 +640,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const response = new Response("Hello World");
let hadError = false;
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
return response;
},
@ -684,7 +684,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
let hadError = false;
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
return Response.error();
},
@ -717,7 +717,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload1() {
const deferred = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
port: servePort,
signal: ac.signal,
onListen: onListen(listeningDeferred.resolve),
@ -752,7 +752,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload2() {
const deferred = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
port: servePort,
signal: ac.signal,
onListen: onListen(listeningDeferred.resolve),
@ -807,7 +807,7 @@ Deno.test(
Deno.test({ permissions: { net: true } }, async function httpServerPort0() {
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler() {
return new Response("Hello World");
},
@ -841,7 +841,7 @@ Deno.test(
};
try {
const server = Deno.serve({
await using server = Deno.serve({
handler() {
return new Response("Hello World");
},
@ -866,7 +866,7 @@ Deno.test(
const ac = new AbortController();
let headers: Headers;
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
await request.text();
headers = request.headers;
@ -896,7 +896,7 @@ Deno.test(
);
Deno.test({ permissions: { net: true } }, async function validPortString() {
const server = Deno.serve({
await using server = Deno.serve({
handler: (_request) => new Response(),
port: "4501" as unknown as number,
});
@ -921,7 +921,7 @@ Deno.test({ permissions: { net: true } }, async function ipv6Hostname() {
};
try {
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response(),
hostname: "::1",
port: 0,
@ -1017,7 +1017,7 @@ function createUrlTest(
const listeningDeferred = Promise.withResolvers<number>();
const urlDeferred = Promise.withResolvers<string>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request: Request) => {
urlDeferred.resolve(request.url);
return new Response("");
@ -1117,7 +1117,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.body, null);
deferred.resolve();
@ -1157,7 +1157,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
await assertRejects(async () => {
await request.text();
@ -1221,7 +1221,7 @@ function createStreamTest(count: number, delay: number, action: string) {
Deno.test(`httpServerStreamCount${count}Delay${delay}${action}`, async () => {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (_request) => {
return new Response(makeStream(count, delay));
},
@ -1275,7 +1275,7 @@ Deno.test(
writer.close();
const { promise, resolve } = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
const reqBody = await request.text();
assertEquals("hello world", reqBody);
@ -1303,7 +1303,7 @@ Deno.test(
Deno.test({ permissions: { net: true } }, async function httpServerClose() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response("ok"),
port: servePort,
signal: ac.signal,
@ -1323,7 +1323,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerCloseGet() {
const listeningDeferred = Promise.withResolvers<void>();
const requestDeferred = Promise.withResolvers<void>();
const responseDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async () => {
requestDeferred.resolve();
await new Promise((r) => setTimeout(r, 500));
@ -1349,13 +1349,12 @@ Deno.test({ permissions: { net: true } }, async function httpServerCloseGet() {
await server.finished;
});
// FIXME:
Deno.test(
{ permissions: { net: true } },
async function httpServerEmptyBlobResponse() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response(new Blob([])),
port: servePort,
signal: ac.signal,
@ -1380,7 +1379,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const errorDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
const body = new ReadableStream({
start(controller) {
@ -1421,7 +1420,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response("韓國".repeat(10)),
port: servePort,
signal: ac.signal,
@ -1456,7 +1455,7 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() {
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const doneDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
const {
response,
@ -1501,7 +1500,7 @@ Deno.test(
async function httpServerWebSocketRaw() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
const { conn, response } = upgradeHttpRaw(request);
const buf = new Uint8Array(1024);
@ -1581,7 +1580,7 @@ Deno.test(
const ac = new AbortController();
const done = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
const {
response,
@ -1635,7 +1634,7 @@ Deno.test(
const ac = new AbortController();
const done = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
const {
response,
@ -1673,7 +1672,7 @@ Deno.test(
const ac = new AbortController();
const done = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
const {
response,
@ -1723,7 +1722,7 @@ Deno.test(
const ac = new AbortController();
let headers: Headers;
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
headers = request.headers;
deferred.resolve();
@ -1762,7 +1761,7 @@ Deno.test(
let headers: Headers;
let text: string;
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
headers = request.headers;
text = await request.text();
@ -1807,7 +1806,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
deferred.resolve();
return new Response("");
@ -1858,7 +1857,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{ port: servePort, signal: ac.signal },
(request: Request) => {
assert(request.body);
@ -1889,7 +1888,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{ port: servePort, signal: ac.signal },
(request: Request) => {
assert(request.body);
@ -2005,7 +2004,7 @@ Deno.test(
}).pipeThrough(new TextEncoderStream());
}
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
deferred.resolve();
return new Response(periodicStream());
@ -2037,7 +2036,7 @@ Deno.test(
{ permissions: { net: true } },
async function httpLargeReadableStreamChunk() {
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler() {
return new Response(
new ReadableStream({
@ -2077,7 +2076,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const deferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.headers.get("X-Header-Test"), "á");
deferred.resolve();
@ -2123,7 +2122,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
// FIXME:
// assertEquals(new URL(request.url).href, `http://127.0.0.1:${servePort}/`);
@ -2177,7 +2176,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(await request.text(), "");
assertEquals(request.headers.get("cookie"), "foo=bar; bar=foo");
@ -2221,7 +2220,7 @@ Deno.test(
const hostname = "localhost";
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
deferred.resolve();
return new Response("ok");
@ -2256,7 +2255,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.body, null);
deferred.resolve();
@ -2292,7 +2291,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.method, "GET");
assertEquals(request.headers.get("host"), "deno.land");
@ -2326,7 +2325,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.method, "GET");
assertEquals(request.headers.get("server"), "hello\tworld");
@ -2360,7 +2359,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(request.method, "GET");
assertEquals(await request.text(), "");
@ -2396,7 +2395,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(request.method, "POST");
assertEquals(await request.text(), "I'm a good request.");
@ -2443,7 +2442,7 @@ function createServerLengthTest(name: string, testCase: TestCase) {
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
assertEquals(request.method, "GET");
deferred.resolve();
@ -2575,7 +2574,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(request.method, "POST");
assertEquals(request.headers.get("content-length"), "5");
@ -2611,7 +2610,7 @@ Deno.test(
async function httpServerPostWithInvalidPrefixContentLength() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
throw new Error("unreachable");
},
@ -2651,7 +2650,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(request.method, "POST");
assertEquals(await request.text(), "qwert");
@ -2688,7 +2687,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (r) => {
deferred.resolve();
assertEquals(await r.text(), "12345");
@ -2724,7 +2723,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
deferred.resolve();
return new Response("NaN".repeat(100));
@ -2867,7 +2866,7 @@ for (const testCase of compressionTestCases) {
const deferred = Promise.withResolvers<void>();
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (_request) => {
const f = await makeTempFile(testCase.length);
deferred.resolve();
@ -2923,7 +2922,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (request) => {
assertEquals(
await request.bytes(),
@ -2971,7 +2970,7 @@ for (const delay of ["delay", "nodelay"]) {
const listeningDeferred = Promise.withResolvers<void>();
const waitForAbort = Promise.withResolvers<void>();
const waitForRequest = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
port: servePort,
signal: ac.signal,
onListen: onListen(listeningDeferred.resolve),
@ -3121,7 +3120,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const hostname = "127.0.0.1";
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response("Hello World"),
hostname,
port: servePort,
@ -3151,7 +3150,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const hostname = "127.0.0.1";
const server = Deno.serve({
await using server = Deno.serve({
handler: () => new Response("Hello World"),
hostname,
port: servePort,
@ -3186,7 +3185,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const deferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
assertEquals(await req.text(), "");
deferred.resolve();
@ -3221,7 +3220,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
throw new Error("oops");
},
@ -3268,7 +3267,7 @@ Deno.test(
async function httpServer204ResponseDoesntSendContentLength() {
const { promise, resolve } = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: (_request) => new Response(null, { status: 204 }),
port: servePort,
signal: ac.signal,
@ -3298,7 +3297,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
deferred.resolve();
return new Response(null, { status: 304 });
@ -3343,7 +3342,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
deferred.resolve();
assertEquals(await req.text(), "hello");
@ -3404,7 +3403,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
deferred.resolve();
assertEquals(await req.text(), "");
@ -3458,7 +3457,7 @@ for (const [name, req] of badRequests) {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
throw new Error("oops");
},
@ -3505,7 +3504,7 @@ Deno.test(
let reqCount = -1;
let timerId: number | undefined;
const server = Deno.serve({
await using server = Deno.serve({
handler: (_req) => {
reqCount++;
if (reqCount === 0) {
@ -3600,7 +3599,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
let count = 0;
const server = Deno.serve({
await using server = Deno.serve({
async onListen({ port }: { port: number }) {
const res1 = await fetch(`http://localhost:${port}/`);
assertEquals(await res1.text(), "hello world 1");
@ -3630,7 +3629,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<number>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
const cloned = req.clone();
assertEquals(req.headers, cloned.headers);
@ -3684,7 +3683,7 @@ Deno.test(
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<number>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
await req.text();
@ -3733,7 +3732,7 @@ Deno.test({
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<number>();
const server = Deno.serve({
await using server = Deno.serve({
handler: async (req) => {
const _reader = req.body?.getReader();
@ -3780,7 +3779,7 @@ Deno.test(
async function testIssue16567() {
const ac = new AbortController();
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
async onListen({ port }) {
const res1 = await fetch(`http://localhost:${port}/`);
assertEquals((await res1.text()).length, 40 * 50_000);
@ -3947,7 +3946,7 @@ Deno.test(
},
async function httpServeCurlH2C() {
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{ port: servePort, signal: ac.signal },
() => new Response("hello world!"),
);
@ -3982,7 +3981,7 @@ Deno.test(
const ac = new AbortController();
const { resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: () => {
const response = new Response("Hello World", {
headers: {
@ -4025,7 +4024,7 @@ Deno.test(
},
async function httpsServeCurlH2C() {
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{
signal: ac.signal,
port: servePort,
@ -4082,7 +4081,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<Deno.UnixAddr>();
const ac = new AbortController();
const filePath = tmpUnixSocketPath();
const server = Deno.serve(
await using server = Deno.serve(
{
signal: ac.signal,
path: filePath,
@ -4115,7 +4114,7 @@ Deno.test(
const listeningDeferred = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{
port: servePort,
onListen: onListen(listeningDeferred.resolve),
@ -4151,7 +4150,7 @@ Deno.test(
const { promise, resolve } = Promise.withResolvers<void>();
const ac = new AbortController();
const server = Deno.serve(
await using server = Deno.serve(
{
port: servePort,
onListen: onListen(resolve),
@ -4187,7 +4186,7 @@ Deno.test(
let timer: number | undefined = undefined;
let _controller;
const server = Deno.serve(
await using server = Deno.serve(
{
port: servePort,
onListen: onListen(resolve),
@ -4237,7 +4236,7 @@ Deno.test({
await assertRejects(
async () => {
const ac = new AbortController();
const server = Deno.serve({
await using server = Deno.serve({
path: "path/to/socket",
handler: (_req) => new Response("Hello, world"),
signal: ac.signal,
@ -4260,7 +4259,7 @@ Deno.test({
}, async () => {
const { promise, resolve } = Promise.withResolvers<{ hostname: string }>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (_) => new Response("ok"),
hostname: "0.0.0.0",
port: 0,
@ -4278,7 +4277,7 @@ Deno.test({
let cancelled = false;
const server = Deno.serve({
await using server = Deno.serve({
hostname: "0.0.0.0",
port: servePort,
onListen: () => resolve(),
@ -4305,7 +4304,7 @@ Deno.test({
}, async () => {
const { promise, resolve } = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
hostname: "0.0.0.0",
port: servePort,
onListen: () => resolve(),
@ -4335,7 +4334,7 @@ Deno.test(
const ac = new AbortController();
const listeningDeferred = Promise.withResolvers<void>();
const doneDeferred = Promise.withResolvers<void>();
const server = Deno.serve({
await using server = Deno.serve({
handler: (request) => {
const {
response,
@ -4379,3 +4378,46 @@ Deno.test(
await server.finished;
},
);
Deno.test({
name:
"req.body.getReader().read() throws the error with reasonable error message",
}, async () => {
const { promise, resolve, reject } = Promise.withResolvers<Error>();
const server = Deno.serve({ onListen, port: 0 }, async (req) => {
const reader = req.body!.getReader();
try {
while (true) {
const { done } = await reader.read();
if (done) break;
}
} catch (e) {
// deno-lint-ignore no-explicit-any
resolve(e as any);
}
reject(new Error("Should not reach here"));
server.shutdown();
return new Response();
});
async function onListen({ port }: { port: number }) {
const body = "a".repeat(1000);
const request = `POST / HTTP/1.1\r\n` +
`Host: 127.0.0.1:${port}\r\n` +
`Content-Length: 1000\r\n` +
"\r\n" + body;
const connection = await Deno.connect({ hostname: "127.0.0.1", port });
await connection.write(new TextEncoder().encode(request));
connection.close();
}
await server.finished;
const e = await promise;
assertIsError(
e,
Deno.errors.BadResource,
"Cannot read request body as underlying resource unavailable",
);
});

View file

@ -1289,6 +1289,9 @@ impl SourceFile {
"md" => "markdown",
"html" => "html",
"css" => "css",
"scss" => "scss",
"sass" => "sass",
"less" => "less",
"yaml" => "yaml",
"sql" => "sql",
"svelte" => "svelte",

View file

@ -76,7 +76,7 @@ async function createDenoDtsFile() {
file.insertStatements(
0,
"// Copyright 2018-2024 the Deno authors. MIT license.\n\n",
"// Copyright 2018-2025 the Deno authors. MIT license.\n\n",
);
file.saveSync();