1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

refactor: move denort to separate crate (#27688)

This slightly degrades the performance of CJS export analysis on
subsequent runs because I changed it to no longer cache in the DENO_DIR
with this PR (denort now properly has no idea about the DENO_DIR). We'll
have to change it to embed this data in the binary and that will also
allow us to get rid of swc in denort (will do that in a follow-up PR).
This commit is contained in:
David Sherret 2025-01-17 15:39:29 -05:00 committed by GitHub
parent 054075730c
commit 57dd66ec3d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
112 changed files with 5696 additions and 5500 deletions

93
Cargo.lock generated
View file

@ -838,18 +838,16 @@ checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
name = "cli_tests"
version = "0.0.0"
dependencies = [
"anyhow",
"bytes",
"chrono",
"deno_ast",
"deno_bench_util",
"deno_cache_dir",
"deno_core",
"deno_fetch",
"deno_lockfile",
"deno_semver",
"deno_terminal 0.2.0",
"deno_tls",
"deno_tower_lsp",
"deno_unsync",
"fastwebsockets",
"file_test_runner",
"flaky_test",
@ -866,7 +864,11 @@ dependencies = [
"pretty_assertions",
"regex",
"reqwest",
"rustls",
"rustls-pemfile",
"rustls-tokio-stream",
"serde",
"serde_json",
"sys_traits",
"test_server",
"tokio",
@ -1280,6 +1282,7 @@ dependencies = [
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_snapshots",
"deno_task_shell",
"deno_telemetry",
"deno_terminal 0.2.0",
@ -1291,7 +1294,6 @@ dependencies = [
"dprint-plugin-jupyter",
"dprint-plugin-markdown",
"dprint-plugin-typescript",
"env_logger",
"fancy-regex",
"faster-hex",
"flate2",
@ -1896,24 +1898,33 @@ dependencies = [
name = "deno_lib"
version = "0.2.0"
dependencies = [
"deno_cache_dir",
"capacity_builder 0.5.0",
"deno_config",
"deno_error",
"deno_fs",
"deno_media_type",
"deno_node",
"deno_npm",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_terminal 0.2.0",
"env_logger",
"faster-hex",
"indexmap 2.3.0",
"libsui",
"log",
"node_resolver",
"parking_lot",
"ring",
"serde",
"serde_json",
"sys_traits",
"test_server",
"thiserror 2.0.3",
"tokio",
"twox-hash",
"url",
]
@ -2026,7 +2037,6 @@ dependencies = [
"deno_fetch",
"deno_fs",
"deno_io",
"deno_media_type",
"deno_net",
"deno_package_json",
"deno_path_util",
@ -2040,7 +2050,7 @@ dependencies = [
"ecdsa",
"ed25519-dalek",
"elliptic-curve",
"errno 0.2.8",
"errno",
"faster-hex",
"h2 0.4.4",
"hkdf",
@ -2379,6 +2389,13 @@ dependencies = [
"url",
]
[[package]]
name = "deno_snapshots"
version = "0.1.0"
dependencies = [
"deno_runtime",
]
[[package]]
name = "deno_task_shell"
version = "0.20.2"
@ -2660,6 +2677,42 @@ dependencies = [
"v8_valueserializer",
]
[[package]]
name = "denort"
version = "2.1.5"
dependencies = [
"async-trait",
"deno_ast",
"deno_cache_dir",
"deno_config",
"deno_core",
"deno_error",
"deno_graph",
"deno_lib",
"deno_media_type",
"deno_npm",
"deno_package_json",
"deno_path_util",
"deno_resolver",
"deno_runtime",
"deno_semver",
"deno_snapshots",
"deno_terminal 0.2.0",
"import_map",
"indexmap 2.3.0",
"libsui",
"log",
"node_resolver",
"pretty_assertions",
"serde",
"sys_traits",
"test_server",
"tokio",
"tokio-util",
"twox-hash",
"url",
]
[[package]]
name = "der"
version = "0.7.9"
@ -3189,17 +3242,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno"
version = "0.3.8"
@ -3210,16 +3252,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "error-code"
version = "3.2.0"
@ -5207,7 +5239,6 @@ dependencies = [
"async-trait",
"boxed_error",
"deno_error",
"deno_media_type",
"deno_package_json",
"deno_path_util",
"futures",
@ -6612,7 +6643,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
dependencies = [
"bitflags 2.6.0",
"errno 0.3.8",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",

View file

@ -6,6 +6,8 @@ members = [
"bench_util",
"cli",
"cli/lib",
"cli/rt",
"cli/snapshot",
"ext/broadcast_channel",
"ext/cache",
"ext/canvas",
@ -100,6 +102,7 @@ deno_webstorage = { version = "0.181.0", path = "./ext/webstorage" }
deno_lib = { version = "0.2.0", path = "./cli/lib" }
deno_npm_cache = { version = "0.5.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.17.0", path = "./resolvers/deno" }
deno_snapshots = { version = "0.1.0", path = "./cli/snapshot" }
node_resolver = { version = "0.24.0", path = "./resolvers/node" }
aes = "=0.8.3"
@ -154,6 +157,7 @@ ipnet = "2.3"
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3"
libc = "0.2.168"
libsui = "0.5.0"
libz-sys = { version = "1.1.20", default-features = false }
log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases

View file

@ -16,11 +16,6 @@ name = "deno"
path = "main.rs"
doc = false
[[bin]]
name = "denort"
path = "mainrt.rs"
doc = false
[[test]]
name = "integration"
path = "integration_tests_runner.rs"
@ -49,7 +44,7 @@ dhat-heap = ["dhat"]
upgrade = []
# A dev feature to disable creations and loading of snapshots in favor of
# loading JS sources at runtime.
hmr = ["deno_runtime/hmr"]
hmr = ["deno_runtime/hmr", "deno_snapshots/disable"]
# Vendor zlib as zlib-ng
__vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"]
@ -60,10 +55,12 @@ lazy-regex.workspace = true
serde.workspace = true
serde_json.workspace = true
zstd.workspace = true
glibc_version = "0.1.2"
flate2 = { workspace = true, features = ["default"] }
deno_error.workspace = true
[target.'cfg(unix)'.build-dependencies]
glibc_version = "0.1.2"
[target.'cfg(windows)'.build-dependencies]
winapi.workspace = true
winres.workspace = true
@ -86,10 +83,11 @@ deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_snapshots = { workspace = true }
deno_task_shell = "=0.20.2"
deno_telemetry.workspace = true
deno_terminal.workspace = true
libsui = "0.5.0"
libsui.workspace = true
node_resolver.workspace = true
anstream = "0.6.14"
@ -115,7 +113,6 @@ dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.3"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
@ -156,7 +153,6 @@ rustyline-derive = "=0.7.0"
serde.workspace = true
serde_repr.workspace = true
sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] }
sqlformat = "=0.3.2"
strsim = "0.11.1"
@ -185,6 +181,7 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "
[target.'cfg(unix)'.dependencies]
nix.workspace = true
shell-escape = "=0.1.5"
[dev-dependencies]
deno_bench_util.workspace = true

View file

@ -31,6 +31,9 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_graph::GraphKind;
use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::version::DENO_VERSION_INFO;
use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::SysDescriptor;
@ -546,15 +549,6 @@ impl Default for TypeCheckMode {
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
// Info needed to run NPM lifecycle scripts
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct LifecycleScriptsConfig {
@ -582,19 +576,6 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
}
}
#[derive(
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct InternalFlags {
/// Used when the language server is configured with an
@ -1484,14 +1465,15 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) {
}
pub fn clap_root() -> Command {
debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION);
let long_version = format!(
"{} ({}, {}, {})\nv8 {}\ntypescript {}",
crate::version::DENO_VERSION_INFO.deno,
crate::version::DENO_VERSION_INFO.release_channel.name(),
DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.release_channel.name(),
env!("PROFILE"),
env!("TARGET"),
deno_core::v8::VERSION_STRING,
crate::version::DENO_VERSION_INFO.typescript
DENO_VERSION_INFO.typescript
);
run_args(Command::new("deno"), true)
@ -1507,7 +1489,7 @@ pub fn clap_root() -> Command {
)
.color(ColorChoice::Auto)
.term_width(800)
.version(crate::version::DENO_VERSION_INFO.deno)
.version(DENO_VERSION_INFO.deno)
.long_version(long_version)
.disable_version_flag(true)
.disable_help_flag(true)

View file

@ -10,10 +10,6 @@ mod package_json;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::net::SocketAddr;
use std::num::NonZeroUsize;
use std::path::Path;
@ -58,8 +54,12 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::GraphKind;
pub use deno_json::check_warn_tsconfig;
use deno_lib::cache::DenoDirProvider;
use deno_lib::env::has_flag_env_var;
use deno_lib::args::has_flag_env_var;
use deno_lib::args::npm_pkg_req_ref_to_binary_command;
use deno_lib::args::CaData;
use deno_lib::args::NpmProcessStateKind;
use deno_lib::args::NPM_PROCESS_STATE;
use deno_lib::version::DENO_VERSION_INFO;
use deno_lib::worker::StorageKeyResolver;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::NpmRc;
@ -68,16 +68,10 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::rustls_pemfile;
use deno_runtime::deno_tls::webpki_roots;
use deno_runtime::inspector_server::InspectorServer;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::StackString;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelRuntimeConfig;
use deno_terminal::colors;
use dotenvy::from_filename;
pub use flags::*;
@ -88,15 +82,13 @@ pub use lockfile::CliLockfileReadFromPathOptions;
use once_cell::sync::Lazy;
pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::EnvHomeDir;
use thiserror::Error;
use crate::cache::DenoDirProvider;
use crate::file_fetcher::CliFileFetcher;
use crate::sys::CliSys;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version;
pub fn npm_registry_url() -> &'static Url {
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
@ -608,147 +600,6 @@ pub fn create_default_npmrc() -> Arc<ResolvedNpmRc> {
})
}
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
)]
UnknownStore(String),
#[error("Unable to add pem file to certificate store: {0}")]
FailedAddPemFile(String),
#[error("Failed opening CA file: {0}")]
CaFileOpenError(String),
}
/// Create and populate a root cert store based on the passed options and
/// environment.
pub fn get_root_cert_store(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Result<RootCertStore, RootCertStoreLoadError> {
let mut root_cert_store = RootCertStore::empty();
let ca_stores: Vec<String> = maybe_ca_stores
.or_else(|| {
let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?;
Some(
env_ca_store
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
})
.unwrap_or_else(|| vec!["mozilla".to_string()]);
for store in ca_stores.iter() {
match store.as_str() {
"mozilla" => {
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
}
"system" => {
let roots = load_native_certs().expect("could not load platform certs");
for root in roots {
if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
{
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
}
}
_ => {
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
}
}
}
let ca_data =
maybe_ca_data.or_else(|| env::var("DENO_CERT").ok().map(CaData::File));
if let Some(ca_data) = ca_data {
let result = match ca_data {
CaData::File(ca_file) => {
let ca_file = if let Some(root) = &maybe_root_path {
root.join(&ca_file)
} else {
PathBuf::from(ca_file)
};
let certfile = std::fs::File::open(ca_file).map_err(|err| {
RootCertStoreLoadError::CaFileOpenError(err.to_string())
})?;
let mut reader = BufReader::new(certfile);
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
CaData::Bytes(data) => {
let mut reader = BufReader::new(Cursor::new(data));
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
};
match result {
Ok(certs) => {
root_cert_store.add_parsable_certificates(certs);
}
Err(e) => {
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
}
}
}
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
/// Overrides for the options below that when set will
/// use these values over the values derived from the
/// CLI flags or config file.
@ -771,7 +622,7 @@ pub struct CliOptions {
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
overrides: CliOptionOverrides,
pub start_dir: Arc<WorkspaceDirectory>,
pub deno_dir_provider: Arc<DenoDirProvider<CliSys>>,
pub deno_dir_provider: Arc<DenoDirProvider>,
}
impl CliOptions {
@ -1294,7 +1145,7 @@ impl CliOptions {
Ok(Some(InspectorServer::new(
host,
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
)?))
}
@ -1884,7 +1735,7 @@ fn resolve_node_modules_folder(
cwd: &Path,
flags: &Flags,
workspace: &Workspace,
deno_dir_provider: &Arc<DenoDirProvider<CliSys>>,
deno_dir_provider: &Arc<DenoDirProvider>,
) -> Result<Option<PathBuf>, AnyError> {
fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf {
root_folder
@ -1993,15 +1844,6 @@ pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool {
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
}
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn config_to_deno_graph_workspace_member(
config: &ConfigFile,
) -> Result<deno_graph::WorkspaceMember, AnyError> {
@ -2062,13 +1904,6 @@ pub enum NpmCachingStrategy {
Manual,
}
pub fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;

View file

@ -5,7 +5,6 @@ use std::path::PathBuf;
use deno_core::snapshot::*;
use deno_runtime::*;
mod shared;
mod ts {
use std::collections::HashMap;
@ -310,57 +309,6 @@ mod ts {
println!("cargo:rerun-if-changed={}", path.display());
}
}
pub(crate) fn version() -> String {
let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap();
let version_text = " version = \"";
for line in file_text.lines() {
if let Some(index) = line.find(version_text) {
let remaining_line = &line[index + version_text.len()..];
return remaining_line[..remaining_line.find('"').unwrap()].to_string();
}
}
panic!("Could not find ts version.")
}
}
#[cfg(not(feature = "hmr"))]
fn create_cli_snapshot(snapshot_path: PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions;
let snapshot_options = SnapshotOptions {
ts_version: ts::version(),
v8_version: deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(),
};
deno_runtime::snapshot::create_runtime_snapshot(
snapshot_path,
snapshot_options,
vec![],
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
}
fn main() {
@ -370,7 +318,7 @@ fn main() {
}
deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
deno_webgpu::print_linker_flags("deno");
// Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap();
@ -389,51 +337,15 @@ fn main() {
}
println!("cargo:rerun-if-env-changed=DENO_CANARY");
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash());
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&git_commit_hash()[..7]
);
let ts_version = ts::version();
debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
println!("cargo:rerun-if-env-changed=TS_VERSION");
println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap());
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll");
println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin=deno=delayimp.lib");
println!("cargo:rustc-link-arg-bin=denort=delayimp.lib");
}
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin");
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
#[cfg(not(feature = "hmr"))]
{
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
create_cli_snapshot(cli_snapshot_path);
}
#[cfg(target_os = "windows")]
{
let mut res = winres::WindowsResource::new();

View file

@ -9,14 +9,13 @@ use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::unsync::spawn_blocking;
use deno_lib::util::hash::FastInsecureHasher;
use deno_runtime::deno_webstorage::rusqlite;
use deno_runtime::deno_webstorage::rusqlite::Connection;
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
use deno_runtime::deno_webstorage::rusqlite::Params;
use once_cell::sync::OnceCell;
use super::FastInsecureHasher;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CacheDBHash(u64);

16
cli/cache/caches.rs vendored
View file

@ -3,7 +3,7 @@
use std::path::PathBuf;
use std::sync::Arc;
use deno_lib::cache::DenoDirProvider;
use deno_lib::version::DENO_VERSION_INFO;
use once_cell::sync::OnceCell;
use super::cache_db::CacheDB;
@ -14,10 +14,10 @@ use super::fast_check::FAST_CHECK_CACHE_DB;
use super::incremental::INCREMENTAL_CACHE_DB;
use super::module_info::MODULE_INFO_CACHE_DB;
use super::node::NODE_ANALYSIS_CACHE_DB;
use crate::sys::CliSys;
use crate::cache::DenoDirProvider;
pub struct Caches {
dir_provider: Arc<DenoDirProvider<CliSys>>,
dir_provider: Arc<DenoDirProvider>,
fmt_incremental_cache_db: OnceCell<CacheDB>,
lint_incremental_cache_db: OnceCell<CacheDB>,
dep_analysis_db: OnceCell<CacheDB>,
@ -28,7 +28,7 @@ pub struct Caches {
}
impl Caches {
pub fn new(dir: Arc<DenoDirProvider<CliSys>>) -> Self {
pub fn new(dir: Arc<DenoDirProvider>) -> Self {
Self {
dir_provider: dir,
fmt_incremental_cache_db: Default::default(),
@ -49,13 +49,9 @@ impl Caches {
cell
.get_or_init(|| {
if let Some(path) = path {
CacheDB::from_path(
config,
path,
crate::version::DENO_VERSION_INFO.deno,
)
CacheDB::from_path(config, path, DENO_VERSION_INFO.deno)
} else {
CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno)
CacheDB::in_memory(config, DENO_VERSION_INFO.deno)
}
})
.clone()

View file

@ -1,7 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_runtime::code_cache;
@ -11,7 +9,6 @@ use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheDBHash;
use super::cache_db::CacheFailure;
use crate::worker::CliCodeCache;
pub static CODE_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: concat!(
@ -85,12 +82,6 @@ impl CodeCache {
}
}
impl CliCodeCache for CodeCache {
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> {
self
}
}
impl code_cache::CodeCache for CodeCache {
fn get_sync(
&self,

View file

@ -6,18 +6,18 @@ use std::path::PathBuf;
use deno_cache_dir::DenoDirResolutionError;
use super::DiskCache;
use crate::sys::DenoLibSys;
use crate::sys::CliSys;
/// Lazily creates the deno dir which might be useful in scenarios
/// where functionality wants to continue if the DENO_DIR can't be created.
pub struct DenoDirProvider<TSys: DenoLibSys> {
sys: TSys,
pub struct DenoDirProvider {
sys: CliSys,
maybe_custom_root: Option<PathBuf>,
deno_dir: std::sync::OnceLock<Result<DenoDir<TSys>, DenoDirResolutionError>>,
deno_dir: std::sync::OnceLock<Result<DenoDir, DenoDirResolutionError>>,
}
impl<TSys: DenoLibSys> DenoDirProvider<TSys> {
pub fn new(sys: TSys, maybe_custom_root: Option<PathBuf>) -> Self {
impl DenoDirProvider {
pub fn new(sys: CliSys, maybe_custom_root: Option<PathBuf>) -> Self {
Self {
sys,
maybe_custom_root,
@ -25,9 +25,7 @@ impl<TSys: DenoLibSys> DenoDirProvider<TSys> {
}
}
pub fn get_or_create(
&self,
) -> Result<&DenoDir<TSys>, DenoDirResolutionError> {
pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> {
self
.deno_dir
.get_or_init(|| {
@ -50,16 +48,16 @@ impl<TSys: DenoLibSys> DenoDirProvider<TSys> {
/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
/// in single directory that can be controlled with `$DENO_DIR` env variable.
#[derive(Debug, Clone)]
pub struct DenoDir<TSys: DenoLibSys> {
pub struct DenoDir {
/// Example: /Users/rld/.deno/
pub root: PathBuf,
/// Used by TsCompiler to cache compiler output.
pub gen_cache: DiskCache<TSys>,
pub gen_cache: DiskCache,
}
impl<TSys: DenoLibSys> DenoDir<TSys> {
impl DenoDir {
pub fn new(
sys: TSys,
sys: CliSys,
maybe_custom_root: Option<PathBuf>,
) -> Result<Self, deno_cache_dir::DenoDirResolutionError> {
let root = deno_cache_dir::resolve_deno_dir(

View file

@ -10,21 +10,21 @@ use std::str;
use deno_cache_dir::url_to_filename;
use deno_cache_dir::CACHE_PERM;
use deno_core::url::Host;
use deno_core::url::Url;
use deno_path_util::fs::atomic_write_file_with_retries;
use url::Host;
use url::Url;
use crate::sys::DenoLibSys;
use crate::sys::CliSys;
#[derive(Debug, Clone)]
pub struct DiskCache<TSys: DenoLibSys> {
sys: TSys,
pub struct DiskCache {
sys: CliSys,
pub location: PathBuf,
}
impl<TSys: DenoLibSys> DiskCache<TSys> {
impl DiskCache {
/// `location` must be an absolute path.
pub fn new(sys: TSys, location: &Path) -> Self {
pub fn new(sys: CliSys, location: &Path) -> Self {
assert!(location.is_absolute());
Self {
sys,

12
cli/cache/emit.rs vendored
View file

@ -6,25 +6,25 @@ use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::unsync::sync::AtomicFlag;
use deno_lib::cache::DiskCache;
use deno_lib::version::DENO_VERSION_INFO;
use crate::sys::CliSys;
use super::DiskCache;
/// The cache that stores previously emitted files.
#[derive(Debug)]
pub struct EmitCache {
disk_cache: DiskCache<CliSys>,
disk_cache: DiskCache,
emit_failed_flag: AtomicFlag,
file_serializer: EmitFileSerializer,
}
impl EmitCache {
pub fn new(disk_cache: DiskCache<CliSys>) -> Self {
pub fn new(disk_cache: DiskCache) -> Self {
Self {
disk_cache,
emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer {
cli_version: crate::version::DENO_VERSION_INFO.deno,
cli_version: DENO_VERSION_INFO.deno,
},
}
}
@ -148,7 +148,7 @@ impl EmitFileSerializer {
// it's ok to use an insecure hash here because
// if someone can change the emit source then they
// can also change the version hash
crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead
.write(bytes)
// emit should not be re-used between cli versions
.write_str(self.cli_version)

7
cli/cache/mod.rs vendored
View file

@ -30,7 +30,8 @@ mod cache_db;
mod caches;
mod check;
mod code_cache;
mod common;
mod deno_dir;
mod disk_cache;
mod emit;
mod fast_check;
mod incremental;
@ -42,9 +43,11 @@ pub use cache_db::CacheDBHash;
pub use caches::Caches;
pub use check::TypeCheckCache;
pub use code_cache::CodeCache;
pub use common::FastInsecureHasher;
/// Permissions used to save a file in the disk caches.
pub use deno_cache_dir::CACHE_PERM;
pub use deno_dir::DenoDir;
pub use deno_dir::DenoDirProvider;
pub use disk_cache::DiskCache;
pub use emit::EmitCache;
pub use fast_check::FastCheckCache;
pub use incremental::IncrementalCache;

View file

@ -20,9 +20,9 @@ use deno_error::JsErrorBox;
use deno_graph::MediaType;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_lib::util::hash::FastInsecureHasher;
use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::resolver::CliCjsTracker;

View file

@ -11,8 +11,10 @@ use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
use deno_error::JsErrorBox;
use deno_lib::cache::DenoDir;
use deno_lib::cache::DenoDirProvider;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::CaData;
use deno_lib::loader::NpmModuleLoader;
use deno_lib::npm::create_npm_process_state_provider;
use deno_lib::npm::NpmRegistryReadPermissionChecker;
use deno_lib::npm::NpmRegistryReadPermissionCheckerMode;
use deno_lib::worker::LibMainWorkerFactory;
@ -42,8 +44,6 @@ use node_resolver::analyze::NodeCodeTranslator;
use once_cell::sync::OnceCell;
use crate::args::check_warn_tsconfig;
use crate::args::get_root_cert_store;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::Flags;
@ -51,6 +51,8 @@ use crate::args::NpmInstallDepsProvider;
use crate::args::TsConfigType;
use crate::cache::Caches;
use crate::cache::CodeCache;
use crate::cache::DenoDir;
use crate::cache::DenoDirProvider;
use crate::cache::EmitCache;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
@ -71,7 +73,6 @@ use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::create_npm_process_state_provider;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::CliByonmNpmResolverCreateOptions;
@ -91,7 +92,6 @@ use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::FoundPackageJsonDepFlag;
use crate::resolver::NpmModuleLoader;
use crate::standalone::binary::DenoCompileBinaryWriter;
use crate::sys::CliSys;
use crate::tools::check::TypeChecker;
@ -283,13 +283,11 @@ impl CliFactory {
})
}
pub fn deno_dir_provider(
&self,
) -> Result<&Arc<DenoDirProvider<CliSys>>, AnyError> {
pub fn deno_dir_provider(&self) -> Result<&Arc<DenoDirProvider>, AnyError> {
Ok(&self.cli_options()?.deno_dir_provider)
}
pub fn deno_dir(&self) -> Result<&DenoDir<CliSys>, AnyError> {
pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> {
Ok(self.deno_dir_provider()?.get_or_create()?)
}
@ -1031,7 +1029,6 @@ impl CliFactory {
self.cli_options()?,
self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?,
self.http_client_provider(),
self.npm_resolver().await?,
self.workspace_resolver().await?.as_ref(),
@ -1103,8 +1100,8 @@ impl CliFactory {
node_resolver.clone(),
NpmModuleLoader::new(
self.cjs_tracker()?.clone(),
fs.clone(),
node_code_translator.clone(),
self.sys(),
),
npm_registry_permission_checker,
npm_req_resolver.clone(),
@ -1138,7 +1135,6 @@ impl CliFactory {
lib_main_worker_factory,
maybe_file_watcher_communicator,
cli_options.maybe_lockfile().cloned(),
node_resolver.clone(),
self.npm_installer_if_managed()?.cloned(),
npm_resolver.clone(),
self.sys(),
@ -1180,8 +1176,6 @@ impl CliFactory {
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
deno_version: crate::version::DENO_VERSION_INFO.deno,
deno_user_agent: crate::version::DENO_VERSION_INFO.user_agent,
otel_config: self.cli_options()?.otel_config(),
startup_snapshot: crate::js::deno_isolate_init(),
})

View file

@ -14,6 +14,7 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions;
@ -28,7 +29,6 @@ use http_body_util::BodyExt;
use thiserror::Error;
use crate::util::progress_bar::UpdateGuard;
use crate::version;
#[derive(Debug, Error)]
pub enum SendError {
@ -79,7 +79,7 @@ impl HttpClientProvider {
Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())),
Entry::Vacant(entry) => {
let client = create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
root_cert_store: match &self.root_cert_store_provider {
Some(provider) => Some(provider.get_or_try_init()?.clone()),
@ -481,7 +481,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path().join("tls/RootCA.pem"),
@ -525,7 +525,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions::default(),
)
.unwrap(),
@ -566,7 +566,7 @@ mod test {
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
root_cert_store: Some(root_cert_store),
..Default::default()
@ -587,7 +587,7 @@ mod test {
.unwrap();
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()
@ -620,7 +620,7 @@ mod test {
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()
@ -661,7 +661,7 @@ mod test {
.unwrap();
let client = HttpClient::new(
create_http_client(
version::DENO_VERSION_INFO.user_agent,
DENO_VERSION_INFO.user_agent,
CreateHttpClientOptions {
ca_certs: vec![std::fs::read(
test_util::testdata_path()

View file

@ -1,18 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn main() {
let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"];
if !cfg!(debug_assertions) {
args.push("--release");
}
args.push("--");
// If any args were passed to this process, pass them through to the child
let orig_args = std::env::args().skip(1).collect::<Vec<_>>();
let orig_args: Vec<&str> =
orig_args.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
args.extend(orig_args);
test_util::spawn::exec_replace("cargo", &args).unwrap();
// this file exists to cause the executable to be built when running cargo test
}

View file

@ -2,18 +2,7 @@
use log::debug;
#[cfg(not(feature = "hmr"))]
static CLI_SNAPSHOT: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
pub fn deno_isolate_init() -> Option<&'static [u8]> {
debug!("Deno isolate init with snapshots.");
#[cfg(not(feature = "hmr"))]
{
Some(CLI_SNAPSHOT)
}
#[cfg(feature = "hmr")]
{
None
}
deno_snapshots::CLI_SNAPSHOT
}

View file

@ -14,23 +14,32 @@ description = "Shared code between the Deno CLI and denort"
path = "lib.rs"
[dependencies]
deno_cache_dir.workspace = true
capacity_builder.workspace = true
deno_config.workspace = true
deno_error.workspace = true
deno_fs = { workspace = true, features = ["sync_fs"] }
deno_media_type.workspace = true
deno_node = { workspace = true, features = ["sync_fs"] }
deno_npm.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime.workspace = true
deno_semver.workspace = true
deno_terminal.workspace = true
env_logger = "=0.10.0"
faster-hex.workspace = true
indexmap.workspace = true
libsui.workspace = true
log.workspace = true
node_resolver = { workspace = true, features = ["sync"] }
parking_lot.workspace = true
ring.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sys_traits = { workspace = true, features = ["getrandom"] }
thiserror.workspace = true
tokio.workspace = true
twox-hash.workspace = true
url.workspace = true
[dev-dependencies]

199
cli/lib/args.rs Normal file
View file

@ -0,0 +1,199 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::io::BufReader;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::path::PathBuf;
use std::sync::LazyLock;
use deno_runtime::colors;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::rustls_pemfile;
use deno_runtime::deno_tls::webpki_roots;
use deno_semver::npm::NpmPackageReqReference;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool {
let value = std::env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum CaData {
/// The string is a file path
File(String),
/// This variant is not exposed as an option in the CLI, it is used internally
/// for standalone binaries.
Bytes(Vec<u8>),
}
#[derive(Error, Debug, Clone, deno_error::JsError)]
#[class(generic)]
pub enum RootCertStoreLoadError {
#[error(
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
)]
UnknownStore(String),
#[error("Unable to add pem file to certificate store: {0}")]
FailedAddPemFile(String),
#[error("Failed opening CA file: {0}")]
CaFileOpenError(String),
}
/// Create and populate a root cert store based on the passed options and
/// environment.
pub fn get_root_cert_store(
maybe_root_path: Option<PathBuf>,
maybe_ca_stores: Option<Vec<String>>,
maybe_ca_data: Option<CaData>,
) -> Result<RootCertStore, RootCertStoreLoadError> {
let mut root_cert_store = RootCertStore::empty();
let ca_stores: Vec<String> = maybe_ca_stores
.or_else(|| {
let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?;
Some(
env_ca_store
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
})
.unwrap_or_else(|| vec!["mozilla".to_string()]);
for store in ca_stores.iter() {
match store.as_str() {
"mozilla" => {
root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec());
}
"system" => {
let roots = load_native_certs().expect("could not load platform certs");
for root in roots {
if let Err(err) = root_cert_store
.add(rustls::pki_types::CertificateDer::from(root.0.clone()))
{
log::error!(
"{}",
colors::yellow(&format!(
"Unable to add system certificate to certificate store: {:?}",
err
))
);
let hex_encoded_root = faster_hex::hex_string(&root.0);
log::error!("{}", colors::gray(&hex_encoded_root));
}
}
}
_ => {
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
}
}
}
let ca_data =
maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File));
if let Some(ca_data) = ca_data {
let result = match ca_data {
CaData::File(ca_file) => {
let ca_file = if let Some(root) = &maybe_root_path {
root.join(&ca_file)
} else {
PathBuf::from(ca_file)
};
let certfile = std::fs::File::open(ca_file).map_err(|err| {
RootCertStoreLoadError::CaFileOpenError(err.to_string())
})?;
let mut reader = BufReader::new(certfile);
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
CaData::Bytes(data) => {
let mut reader = BufReader::new(Cursor::new(data));
rustls_pemfile::certs(&mut reader).collect::<Result<Vec<_>, _>>()
}
};
match result {
Ok(certs) => {
root_cert_store.add_parsable_certificates(certs);
}
Err(e) => {
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
}
}
}
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
LazyLock::new(|| {
use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
let fd = fd.parse::<usize>().ok()?;
let mut file = {
use deno_runtime::deno_io::FromRawIoHandle;
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
};
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0)).unwrap();
file
.read_to_end(&mut buf)
.inspect_err(|e| {
log::error!("failed to read npm process state from fd {fd}: {e}");
})
.ok()?;
let state: NpmProcessState = serde_json::from_slice(&buf)
.inspect_err(|e| {
log::error!(
"failed to deserialize npm process state: {e} {}",
String::from_utf8_lossy(&buf)
)
})
.ok()?;
Some(state)
});
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}

33
cli/lib/build.rs Normal file
View file

@ -0,0 +1,33 @@
// Copyright 2018-2025 the Deno authors. MIT license.
fn main() {
let commit_hash = git_commit_hash();
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash);
println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH");
println!(
"cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}",
&commit_hash[..7]
);
}
fn git_commit_hash() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("rev-list")
.arg("-1")
.arg("HEAD")
.output()
{
if output.status.success() {
std::str::from_utf8(&output.stdout[..40])
.unwrap()
.to_string()
} else {
// When not in git repository
// (e.g. when the user install by `cargo install deno`)
"UNKNOWN".to_string()
}
} else {
// When there is no git command for some reason
"UNKNOWN".to_string()
}
}

View file

@ -1,8 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub use deno_dir::DenoDir;
pub use deno_dir::DenoDirProvider;
pub use disk_cache::DiskCache;
mod deno_dir;
mod disk_cache;

48
cli/lib/clippy.toml Normal file
View file

@ -0,0 +1,48 @@
disallowed-methods = [
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" },
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" },
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead" },
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" },
]

View file

@ -1,10 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn has_trace_permissions_enabled() -> bool {
has_flag_env_var("DENO_TRACE_PERMISSIONS")
}
pub fn has_flag_env_var(name: &str) -> bool {
let value = std::env::var(name);
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
}

View file

@ -1,9 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod cache;
pub mod env;
pub mod args;
pub mod loader;
pub mod npm;
pub mod shared;
pub mod standalone;
pub mod sys;
pub mod util;
pub mod version;
pub mod worker;

213
cli/lib/loader.rs Normal file
View file

@ -0,0 +1,213 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use deno_media_type::MediaType;
use deno_resolver::cjs::CjsTracker;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_runtime::deno_core::ModuleSourceCode;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NpmPackageFolderResolver;
use thiserror::Error;
use url::Url;
use crate::sys::DenoLibSys;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
pub found_url: Url,
pub media_type: MediaType,
}
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
pub specifier: Url,
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum NpmModuleLoadError {
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
NotSupportedKindInNpm(#[from] NotSupportedKindInNpmError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
#[class(inherit)]
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
UnableToLoad {
file_path: PathBuf,
maybe_referrer: Option<Url>,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(
"{}",
format_dir_import_message(file_path, maybe_referrer, suggestion)
)]
DirImport {
file_path: PathBuf,
maybe_referrer: Option<Url>,
suggestion: Option<&'static str>,
#[source]
#[inherit]
source: std::io::Error,
},
}
fn format_dir_import_message(
file_path: &std::path::Path,
maybe_referrer: &Option<Url>,
suggestion: &Option<&'static str>,
) -> String {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
if let Some(entrypoint_name) = suggestion {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
}
msg
}
#[derive(Clone)]
pub struct NpmModuleLoader<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
> {
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
sys: TSys,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoLibSys,
>
NpmModuleLoader<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
cjs_tracker: Arc<CjsTracker<DenoInNpmPackageChecker, TSys>>,
node_code_translator: Arc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
sys: TSys,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
sys,
}
}
pub async fn load(
&self,
specifier: &Url,
maybe_referrer: Option<&Url>,
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
let file_path = deno_path_util::url_to_file_path(specifier)?;
let code = self.sys.fs_read(&file_path).map_err(|source| {
if self.sys.fs_is_dir_no_err(&file_path) {
let suggestion = ["index.mjs", "index.js", "index.cjs"]
.into_iter()
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
NpmModuleLoadError::DirImport {
file_path,
maybe_referrer: maybe_referrer.cloned(),
suggestion,
source,
}
} else {
NpmModuleLoadError::UnableToLoad {
file_path,
maybe_referrer: maybe_referrer.cloned(),
source,
}
}
})?;
let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(NpmModuleLoadError::NotSupportedKindInNpm(
NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
},
));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
code,
found_url: specifier.clone(),
media_type: MediaType::from_specifier(specifier),
})
}
}

View file

@ -2,5 +2,79 @@
mod permission_checker;
use std::path::Path;
use std::sync::Arc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_resolver::npm::NpmResolver;
use deno_runtime::deno_process::NpmProcessStateProvider;
use deno_runtime::deno_process::NpmProcessStateProviderRc;
pub use permission_checker::NpmRegistryReadPermissionChecker;
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::DenoLibSys;
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
npm_resolver: &NpmResolver<TSys>,
) -> NpmProcessStateProviderRc {
match npm_resolver {
NpmResolver::Byonm(byonm_npm_resolver) => {
Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone()))
}
NpmResolver::Managed(managed_npm_resolver) => {
Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone()))
}
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
#[derive(Debug)]
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
pub ManagedNpmResolverRc<TSys>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ManagedNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.0.resolution().serialized_valid_snapshot(),
self.0.root_node_modules_path(),
)
}
}
#[derive(Debug)]
pub struct ByonmNpmProcessStateProvider<TSys: DenoLibSys>(
pub Arc<ByonmNpmResolver<TSys>>,
);
impl<TSys: DenoLibSys> NpmProcessStateProvider
for ByonmNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}

View file

@ -1,8 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
/// This module is shared between build script and the binaries. Use it sparsely.
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use thiserror::Error;
#[derive(Debug, Error)]
#[error("Unrecognized release channel: {0}")]
pub struct UnrecognizedReleaseChannelError(pub String);
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ReleaseChannel {
@ -50,13 +53,17 @@ impl ReleaseChannel {
// NOTE(bartlomieju): do not ever change these values, tools like `patchver`
// rely on them.
#[allow(unused)]
pub fn deserialize(str_: &str) -> Result<Self, AnyError> {
pub fn deserialize(
str_: &str,
) -> Result<Self, UnrecognizedReleaseChannelError> {
Ok(match str_ {
"stable" => Self::Stable,
"canary" => Self::Canary,
"rc" => Self::Rc,
"lts" => Self::Lts,
unknown => bail!("Unrecognized release channel: {}", unknown),
unknown => {
return Err(UnrecognizedReleaseChannelError(unknown.to_string()))
}
})
}
}

View file

@ -0,0 +1,107 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use deno_config::workspace::PackageJsonDepResolution;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_semver::Version;
use indexmap::IndexMap;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use super::virtual_fs::FileSystemCaseSensitivity;
use crate::args::UnstableConfig;
pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
/// Relative path for the node_modules directory in the vfs.
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: Option<String>,
},
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolverImportMap {
pub specifier: String,
pub json: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
// Note: Don't use hashmaps/hashsets. Ensure the serialization
// is deterministic.
#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<log::Level>,
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: IndexMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
pub struct SourceMapStore {
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
}
impl SourceMapStore {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn iter(&self) -> impl Iterator<Item = (&str, &[u8])> {
self.data.iter().map(|(k, v)| (k.as_ref(), v.as_ref()))
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.data.len()
}
pub fn add(
&mut self,
specifier: Cow<'static, str>,
source_map: Cow<'static, [u8]>,
) {
self.data.insert(specifier, source_map);
}
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
self.data.get(specifier).map(|v| v.as_ref())
}
}

View file

@ -1,3 +1,4 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod binary;
pub mod virtual_fs;

View file

@ -1,9 +1,17 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::cmp::Ordering;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::normalize_path;
use deno_path_util::strip_unc_prefix;
use deno_runtime::colors;
use deno_runtime::deno_core::anyhow::bail;
use deno_runtime::deno_core::anyhow::Context;
use deno_runtime::deno_core::error::AnyError;
use indexmap::IndexSet;
use serde::Deserialize;
use serde::Serialize;
@ -294,3 +302,474 @@ impl VfsEntry {
}
}
}
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
#[derive(Debug)]
pub struct BuiltVfs {
pub root_path: WindowsSystemRootablePath,
pub case_sensitivity: FileSystemCaseSensitivity,
pub entries: VirtualDirectoryEntries,
pub files: Vec<Vec<u8>>,
}
#[derive(Debug)]
pub struct VfsBuilder {
executable_root: VirtualDirectory,
files: Vec<Vec<u8>>,
current_offset: u64,
file_offsets: HashMap<String, u64>,
/// The minimum root directory that should be included in the VFS.
min_root_dir: Option<WindowsSystemRootablePath>,
case_sensitivity: FileSystemCaseSensitivity,
}
impl Default for VfsBuilder {
fn default() -> Self {
Self::new()
}
}
impl VfsBuilder {
pub fn new() -> Self {
Self {
executable_root: VirtualDirectory {
name: "/".to_string(),
entries: Default::default(),
},
files: Vec::new(),
current_offset: 0,
file_offsets: Default::default(),
min_root_dir: Default::default(),
// This is not exactly correct because file systems on these OSes
// may be case-sensitive or not based on the directory, but this
// is a good enough approximation and limitation. In the future,
// we may want to store this information per directory instead
// depending on the feedback we get.
case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") {
FileSystemCaseSensitivity::Insensitive
} else {
FileSystemCaseSensitivity::Sensitive
},
}
}
pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity {
self.case_sensitivity
}
pub fn files_len(&self) -> usize {
self.files.len()
}
/// Add a directory that might be the minimum root directory
/// of the VFS.
///
/// For example, say the user has a deno.json and specifies an
/// import map in a parent directory. The import map won't be
/// included in the VFS, but its base will meaning we need to
/// tell the VFS builder to include the base of the import map
/// by calling this method.
pub fn add_possible_min_root_dir(&mut self, path: &Path) {
self.add_dir_raw(path);
match &self.min_root_dir {
Some(WindowsSystemRootablePath::WindowSystemRoot) => {
// already the root dir
}
Some(WindowsSystemRootablePath::Path(current_path)) => {
let mut common_components = Vec::new();
for (a, b) in current_path.components().zip(path.components()) {
if a != b {
break;
}
common_components.push(a);
}
if common_components.is_empty() {
if cfg!(windows) {
self.min_root_dir =
Some(WindowsSystemRootablePath::WindowSystemRoot);
} else {
self.min_root_dir =
Some(WindowsSystemRootablePath::Path(PathBuf::from("/")));
}
} else {
self.min_root_dir = Some(WindowsSystemRootablePath::Path(
common_components.iter().collect(),
));
}
}
None => {
self.min_root_dir =
Some(WindowsSystemRootablePath::Path(path.to_path_buf()));
}
}
}
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
let target_path = self.resolve_target_path(path)?;
self.add_dir_recursive_not_symlink(&target_path)
}
fn add_dir_recursive_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
self.add_dir_raw(path);
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let read_dir = std::fs::read_dir(path)
.with_context(|| format!("Reading {}", path.display()))?;
let mut dir_entries =
read_dir.into_iter().collect::<Result<Vec<_>, _>>()?;
dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism
for entry in dir_entries {
let file_type = entry.file_type()?;
let path = entry.path();
if file_type.is_dir() {
self.add_dir_recursive_not_symlink(&path)?;
} else if file_type.is_file() {
self.add_file_at_path_not_symlink(&path)?;
} else if file_type.is_symlink() {
match self.add_symlink(&path) {
Ok(target) => match target {
SymlinkTarget::File(target) => {
self.add_file_at_path_not_symlink(&target)?
}
SymlinkTarget::Dir(target) => {
self.add_dir_recursive_not_symlink(&target)?;
}
},
Err(err) => {
log::warn!(
"{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}",
colors::yellow("Warning"),
path.display(),
err
);
}
}
}
}
Ok(())
}
fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory {
log::debug!("Ensuring directory '{}'", path.display());
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let index = current_dir.entries.insert_or_modify(
&name,
self.case_sensitivity,
|| {
VfsEntry::Dir(VirtualDirectory {
name: name.to_string(),
entries: Default::default(),
})
},
|_| {
// ignore
},
);
match current_dir.entries.get_mut_by_index(index) {
Some(VfsEntry::Dir(dir)) => {
current_dir = dir;
}
_ => unreachable!(),
};
}
current_dir
}
pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory {
&mut self.executable_root
}
pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> {
debug_assert!(path.is_absolute());
let mut current_dir = &mut self.executable_root;
for component in path.components() {
if matches!(component, std::path::Component::RootDir) {
continue;
}
let name = component.as_os_str().to_string_lossy();
let entry = current_dir
.entries
.get_mut_by_name(&name, self.case_sensitivity)?;
match entry {
VfsEntry::Dir(dir) => {
current_dir = dir;
}
_ => unreachable!(),
};
}
Some(current_dir)
}
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data(path, file_bytes, VfsFileSubDataKind::Raw)
}
fn add_file_at_path_not_symlink(
&mut self,
path: &Path,
) -> Result<(), AnyError> {
// ok, building fs implementation
#[allow(clippy::disallowed_methods)]
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file_with_data_raw(path, file_bytes, VfsFileSubDataKind::Raw)
}
pub fn add_file_with_data(
&mut self,
path: &Path,
data: Vec<u8>,
sub_data_kind: VfsFileSubDataKind,
) -> Result<(), AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
let target = self.add_symlink(path)?.into_path_buf();
self.add_file_with_data_raw(&target, data, sub_data_kind)
} else {
self.add_file_with_data_raw(path, data, sub_data_kind)
}
}
pub fn add_file_with_data_raw(
&mut self,
path: &Path,
data: Vec<u8>,
sub_data_kind: VfsFileSubDataKind,
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display());
let checksum = crate::util::checksum::gen(&[&data]);
let case_sensitivity = self.case_sensitivity;
let offset = if let Some(offset) = self.file_offsets.get(&checksum) {
// duplicate file, reuse an old offset
*offset
} else {
self.file_offsets.insert(checksum, self.current_offset);
self.current_offset
};
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
let offset_and_len = OffsetWithLength {
offset,
len: data.len() as u64,
};
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::File(VirtualFile {
name: name.to_string(),
offset: offset_and_len,
module_graph_offset: offset_and_len,
})
},
|entry| match entry {
VfsEntry::File(virtual_file) => match sub_data_kind {
VfsFileSubDataKind::Raw => {
virtual_file.offset = offset_and_len;
}
VfsFileSubDataKind::ModuleGraph => {
virtual_file.module_graph_offset = offset_and_len;
}
},
VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(),
},
);
// new file, update the list of files
if self.current_offset == offset {
self.files.push(data);
self.current_offset += offset_and_len.len;
}
Ok(())
}
fn resolve_target_path(&mut self, path: &Path) -> Result<PathBuf, AnyError> {
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let metadata = std::fs::symlink_metadata(path).with_context(|| {
format!("Resolving target path for '{}'", path.display())
})?;
if metadata.is_symlink() {
Ok(self.add_symlink(path)?.into_path_buf())
} else {
Ok(path.to_path_buf())
}
}
pub fn add_symlink(
&mut self,
path: &Path,
) -> Result<SymlinkTarget, AnyError> {
self.add_symlink_inner(path, &mut IndexSet::new())
}
fn add_symlink_inner(
&mut self,
path: &Path,
visited: &mut IndexSet<PathBuf>,
) -> Result<SymlinkTarget, AnyError> {
log::debug!("Adding symlink '{}'", path.display());
let target = strip_unc_prefix(
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
std::fs::read_link(path)
.with_context(|| format!("Reading symlink '{}'", path.display()))?,
);
let case_sensitivity = self.case_sensitivity;
let target = normalize_path(path.parent().unwrap().join(&target));
let dir = self.add_dir_raw(path.parent().unwrap());
let name = path.file_name().unwrap().to_string_lossy();
dir.entries.insert_or_modify(
&name,
case_sensitivity,
|| {
VfsEntry::Symlink(VirtualSymlink {
name: name.to_string(),
dest_parts: VirtualSymlinkParts::from_path(&target),
})
},
|_| {
// ignore previously inserted
},
);
// ok, fs implementation
#[allow(clippy::disallowed_methods)]
let target_metadata =
std::fs::symlink_metadata(&target).with_context(|| {
format!("Reading symlink target '{}'", target.display())
})?;
if target_metadata.is_symlink() {
if !visited.insert(target.clone()) {
// todo: probably don't error in this scenario
bail!(
"Circular symlink detected: {} -> {}",
visited
.iter()
.map(|p| p.display().to_string())
.collect::<Vec<_>>()
.join(" -> "),
target.display()
);
}
self.add_symlink_inner(&target, visited)
} else if target_metadata.is_dir() {
Ok(SymlinkTarget::Dir(target))
} else {
Ok(SymlinkTarget::File(target))
}
}
pub fn build(self) -> BuiltVfs {
fn strip_prefix_from_symlinks(
dir: &mut VirtualDirectory,
parts: &[String],
) {
for entry in dir.entries.iter_mut() {
match entry {
VfsEntry::Dir(dir) => {
strip_prefix_from_symlinks(dir, parts);
}
VfsEntry::File(_) => {}
VfsEntry::Symlink(symlink) => {
let parts = symlink
.dest_parts
.take_parts()
.into_iter()
.skip(parts.len())
.collect();
symlink.dest_parts.set_parts(parts);
}
}
}
}
let mut current_dir = self.executable_root;
let mut current_path = if cfg!(windows) {
WindowsSystemRootablePath::WindowSystemRoot
} else {
WindowsSystemRootablePath::Path(PathBuf::from("/"))
};
loop {
if current_dir.entries.len() != 1 {
break;
}
if self.min_root_dir.as_ref() == Some(&current_path) {
break;
}
match current_dir.entries.iter().next().unwrap() {
VfsEntry::Dir(dir) => {
if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME {
// special directory we want to maintain
break;
}
match current_dir.entries.remove(0) {
VfsEntry::Dir(dir) => {
current_path =
WindowsSystemRootablePath::Path(current_path.join(&dir.name));
current_dir = dir;
}
_ => unreachable!(),
};
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => break,
}
}
if let WindowsSystemRootablePath::Path(path) = &current_path {
strip_prefix_from_symlinks(
&mut current_dir,
VirtualSymlinkParts::from_path(path).parts(),
);
}
BuiltVfs {
root_path: current_path,
case_sensitivity: self.case_sensitivity,
entries: current_dir.entries,
files: self.files,
}
}
}
#[derive(Debug)]
pub enum SymlinkTarget {
File(PathBuf),
Dir(PathBuf),
}
impl SymlinkTarget {
pub fn into_path_buf(self) -> PathBuf {
match self {
Self::File(path) => path,
Self::Dir(path) => path,
}
}
}

View file

@ -2,44 +2,33 @@
use std::io::Write;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
use deno_runtime::deno_telemetry;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_runtime::deno_telemetry::OtelConsoleConfig;
use super::draw_thread::DrawThread;
struct CliLogger {
struct CliLogger<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
otel_console_config: OtelConsoleConfig,
logger: env_logger::Logger,
on_log_start: FnOnLogStart,
on_log_end: FnOnLogEnd,
}
impl CliLogger {
pub fn new(
logger: env_logger::Logger,
otel_console_config: OtelConsoleConfig,
) -> Self {
Self {
logger,
otel_console_config,
}
}
impl<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> CliLogger<FnOnLogStart, FnOnLogEnd> {
pub fn filter(&self) -> log::LevelFilter {
self.logger.filter()
}
}
impl log::Log for CliLogger {
impl<FnOnLogStart: Fn() + Send + Sync, FnOnLogEnd: Fn() + Send + Sync> log::Log
for CliLogger<FnOnLogStart, FnOnLogEnd>
{
fn enabled(&self, metadata: &log::Metadata) -> bool {
self.logger.enabled(metadata)
}
fn log(&self, record: &log::Record) {
if self.enabled(record.metadata()) {
// it was considered to hold the draw thread's internal lock
// across logging, but if outputting to stderr blocks then that
// could potentially block other threads that access the draw
// thread's state
DrawThread::hide();
(self.on_log_start)();
match self.otel_console_config {
OtelConsoleConfig::Ignore => {
@ -54,7 +43,7 @@ impl log::Log for CliLogger {
}
}
DrawThread::show();
(self.on_log_end)();
}
}
@ -63,8 +52,20 @@ impl log::Log for CliLogger {
}
}
pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
let log_level = maybe_level.unwrap_or(log::Level::Info);
pub struct InitLoggingOptions<FnOnLogStart: Fn(), FnOnLogEnd: Fn()> {
pub on_log_start: FnOnLogStart,
pub on_log_end: FnOnLogEnd,
pub maybe_level: Option<log::Level>,
pub otel_config: Option<OtelConfig>,
}
pub fn init<
FOnLogStart: Fn() + Send + Sync + 'static,
FnOnLogEnd: Fn() + Send + Sync + 'static,
>(
options: InitLoggingOptions<FOnLogStart, FnOnLogEnd>,
) {
let log_level = options.maybe_level.unwrap_or(log::Level::Info);
let logger = env_logger::Builder::from_env(
env_logger::Env::new()
// Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix
@ -117,12 +118,15 @@ pub fn init(maybe_level: Option<log::Level>, otel_config: Option<OtelConfig>) {
})
.build();
let cli_logger = CliLogger::new(
let cli_logger = CliLogger {
on_log_start: options.on_log_start,
on_log_end: options.on_log_end,
logger,
otel_config
otel_console_config: options
.otel_config
.map(|c| c.console)
.unwrap_or(OtelConsoleConfig::Ignore),
);
};
let max_level = cli_logger.filter();
let r = log::set_boxed_logger(Box::new(cli_logger));
if r.is_ok() {

View file

@ -1,3 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod checksum;
pub mod hash;
pub mod logger;
pub mod result;
pub mod text_encoding;
pub mod v8;

View file

@ -4,10 +4,10 @@ use std::convert::Infallible;
use std::fmt::Debug;
use std::fmt::Display;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_runtime::deno_core::error::AnyError;
use deno_runtime::deno_core::error::CoreError;
pub trait InfallibleResultExt<T> {
fn unwrap_infallible(self) -> T;

View file

@ -0,0 +1,45 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
#[inline(always)]
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
#[inline(always)]
pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow<str> {
match bytes {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
}
}
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
#[allow(dead_code)]
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
let raw = Arc::into_raw(arc_str);
// SAFETY: This is safe because they have the same memory layout.
unsafe { Arc::from_raw(raw as *const [u8]) }
}
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
#[allow(dead_code)]
pub fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
}

14
cli/lib/util/v8.rs Normal file
View file

@ -0,0 +1,14 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[inline(always)]
pub fn construct_v8_flags(
default_v8_flags: &[String],
v8_flags: &[String],
env_v8_flags: Vec<String>,
) -> Vec<String> {
std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned())
.chain(default_v8_flags.iter().cloned())
.chain(env_v8_flags)
.chain(v8_flags.iter().cloned())
.collect::<Vec<_>>()
}

98
cli/lib/version.rs Normal file
View file

@ -0,0 +1,98 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_runtime::deno_telemetry::OtelRuntimeConfig;
use crate::shared::ReleaseChannel;
pub fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
const TYPESCRIPT: &str = "5.6.2";
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
// TODO(bartlomieju): ideally we could remove this const.
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();
// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well
const IS_RC: bool = option_env!("DENO_RC").is_some();
pub static DENO_VERSION_INFO: std::sync::LazyLock<DenoVersionInfo> =
std::sync::LazyLock::new(|| {
let release_channel = libsui::find_section("denover")
.and_then(|buf| std::str::from_utf8(buf).ok())
.and_then(|str_| ReleaseChannel::deserialize(str_).ok())
.unwrap_or({
if IS_CANARY {
ReleaseChannel::Canary
} else if IS_RC {
ReleaseChannel::Rc
} else {
ReleaseChannel::Stable
}
});
DenoVersionInfo {
deno: if release_channel == ReleaseChannel::Canary {
concat!(
env!("CARGO_PKG_VERSION"),
"+",
env!("GIT_COMMIT_HASH_SHORT")
)
} else {
env!("CARGO_PKG_VERSION")
},
release_channel,
git_hash: GIT_COMMIT_HASH,
// Keep in sync with `deno` field.
user_agent: if release_channel == ReleaseChannel::Canary {
concat!(
"Deno/",
env!("CARGO_PKG_VERSION"),
"+",
env!("GIT_COMMIT_HASH_SHORT")
)
} else {
concat!("Deno/", env!("CARGO_PKG_VERSION"))
},
typescript: TYPESCRIPT,
}
});
pub struct DenoVersionInfo {
/// Human-readable version of the current Deno binary.
///
/// For stable release, a semver, eg. `v1.46.2`.
/// For canary release, a semver + 7-char git hash, eg. `v1.46.3+asdfqwq`.
pub deno: &'static str,
pub release_channel: ReleaseChannel,
/// A full git hash.
pub git_hash: &'static str,
/// A user-agent header that will be used in HTTP client.
pub user_agent: &'static str,
pub typescript: &'static str,
}
impl DenoVersionInfo {
/// For stable release, a semver like, eg. `v1.46.2`.
/// For canary release a full git hash, eg. `9bdab6fb6b93eb43b1930f40987fa4997287f9c8`.
pub fn version_or_git_hash(&self) -> &'static str {
if self.release_channel == ReleaseChannel::Canary {
self.git_hash
} else {
CARGO_PKG_VERSION
}
}
}

View file

@ -1,5 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
@ -42,9 +43,10 @@ use deno_runtime::BootstrapOptions;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use node_resolver::errors::ResolvePkgJsonBinExportError;
use url::Url;
use crate::env::has_trace_permissions_enabled;
use crate::args::has_trace_permissions_enabled;
use crate::sys::DenoLibSys;
use crate::util::checksum;
@ -113,9 +115,9 @@ impl StorageKeyResolver {
}
}
// TODO(bartlomieju): this should be moved to some other place, added to avoid string
// duplication between worker setups and `deno info` output.
pub fn get_cache_storage_dir() -> PathBuf {
// ok because this won't ever be used by the js runtime
#[allow(clippy::disallowed_methods)]
// Note: we currently use temp_dir() to avoid managing storage size.
std::env::temp_dir().join("deno_cache")
}
@ -131,10 +133,31 @@ pub fn create_isolate_create_params() -> Option<v8::CreateParams> {
})
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveNpmBinaryEntrypointError {
#[class(inherit)]
#[error(transparent)]
ResolvePkgJsonBinExport(ResolvePkgJsonBinExportError),
#[class(generic)]
#[error("{original:#}\n\nFallback failed: {fallback:#}")]
Fallback {
fallback: ResolveNpmBinaryEntrypointFallbackError,
original: ResolvePkgJsonBinExportError,
},
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveNpmBinaryEntrypointFallbackError {
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(node_resolver::errors::PackageSubpathResolveError),
#[class(generic)]
#[error("Cannot find module '{0}'")]
ModuleNotFound(Url),
}
pub struct LibMainWorkerOptions {
pub argv: Vec<String>,
pub deno_version: &'static str,
pub deno_user_agent: &'static str,
pub log_level: WorkerLogLevel,
pub enable_op_summary_metrics: bool,
pub enable_testing_features: bool,
@ -263,7 +286,7 @@ impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
main_module: args.main_module.clone(),
worker_id: args.worker_id,
bootstrap: BootstrapOptions {
deno_version: shared.options.deno_version.to_string(),
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
args: shared.options.argv.clone(),
cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
@ -278,7 +301,7 @@ impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
is_stdout_tty: deno_terminal::is_stdout_tty(),
is_stderr_tty: deno_terminal::is_stderr_tty(),
unstable_features,
user_agent: shared.options.deno_user_agent.to_string(),
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
inspect: shared.options.is_inspecting,
has_node_modules_dir: shared.options.has_node_modules_dir,
argv0: shared.options.argv0.clone(),
@ -359,6 +382,21 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
}
}
pub fn create_main_worker(
&self,
mode: WorkerExecutionMode,
permissions: PermissionsContainer,
main_module: Url,
) -> Result<LibMainWorker, CoreError> {
self.create_custom_worker(
mode,
main_module,
permissions,
vec![],
Default::default(),
)
}
pub fn create_custom_worker(
&self,
mode: WorkerExecutionMode,
@ -420,7 +458,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
let options = WorkerOptions {
bootstrap: BootstrapOptions {
deno_version: shared.options.deno_version.to_string(),
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
args: shared.options.argv.clone(),
cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
@ -435,7 +473,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
is_stderr_tty: deno_terminal::is_stderr_tty(),
color_level: colors::get_color_level(),
unstable_features,
user_agent: shared.options.deno_user_agent.to_string(),
user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(),
inspect: shared.options.is_inspecting,
has_node_modules_dir: shared.options.has_node_modules_dir,
argv0: shared.options.argv0.clone(),
@ -476,6 +514,76 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
worker,
})
}
pub fn resolve_npm_binary_entrypoint(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<Url, ResolveNpmBinaryEntrypointError> {
match self
.shared
.node_resolver
.resolve_binary_export(package_folder, sub_path)
{
Ok(specifier) => Ok(specifier),
Err(original_err) => {
// if the binary entrypoint was not found, fallback to regular node resolution
let result =
self.resolve_binary_entrypoint_fallback(package_folder, sub_path);
match result {
Ok(Some(specifier)) => Ok(specifier),
Ok(None) => {
Err(ResolveNpmBinaryEntrypointError::ResolvePkgJsonBinExport(
original_err,
))
}
Err(fallback_err) => Err(ResolveNpmBinaryEntrypointError::Fallback {
original: original_err,
fallback: fallback_err,
}),
}
}
}
}
/// resolve the binary entrypoint using regular node resolution
fn resolve_binary_entrypoint_fallback(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<Option<Url>, ResolveNpmBinaryEntrypointFallbackError> {
// only fallback if the user specified a sub path
if sub_path.is_none() {
// it's confusing to users if the package doesn't have any binary
// entrypoint and we just execute the main script which will likely
// have blank output, so do not resolve the entrypoint in this case
return Ok(None);
}
let specifier = self
.shared
.node_resolver
.resolve_package_subpath_from_deno_module(
package_folder,
sub_path,
/* referrer */ None,
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)
.map_err(
ResolveNpmBinaryEntrypointFallbackError::PackageSubpathResolve,
)?;
if deno_path_util::url_to_file_path(&specifier)
.map(|p| self.shared.sys.fs_exists_no_err(p))
.unwrap_or(false)
{
Ok(Some(specifier))
} else {
Err(ResolveNpmBinaryEntrypointFallbackError::ModuleNotFound(
specifier,
))
}
}
}
pub struct LibMainWorker {
@ -536,6 +644,33 @@ impl LibMainWorker {
self.worker.evaluate_module(id).await
}
pub async fn run(&mut self) -> Result<i32, CoreError> {
log::debug!("main_module {}", self.main_module);
self.execute_main_module().await?;
self.worker.dispatch_load_event()?;
loop {
self
.worker
.run_event_loop(/* wait for inspector */ false)
.await?;
let web_continue = self.worker.dispatch_beforeunload_event()?;
if !web_continue {
let node_continue = self.worker.dispatch_process_beforeexit_event()?;
if !node_continue {
break;
}
}
}
self.worker.dispatch_unload_event()?;
self.worker.dispatch_process_exit_event()?;
Ok(self.worker.exit_code())
}
#[inline]
pub async fn run_event_loop(
&mut self,

View file

@ -8,9 +8,9 @@ use std::time::SystemTime;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_lib::cache::DenoDir;
use deno_path_util::url_to_file_path;
use crate::cache::DenoDir;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::cache::LocalLspHttpCache;
@ -70,7 +70,7 @@ fn calculate_fs_version_in_cache(
#[derive(Debug, Clone)]
pub struct LspCache {
deno_dir: DenoDir<CliSys>,
deno_dir: DenoDir,
global: Arc<GlobalHttpCache>,
vendors_by_scope: BTreeMap<ModuleSpecifier, Option<Arc<LocalLspHttpCache>>>,
}
@ -121,7 +121,7 @@ impl LspCache {
.collect();
}
pub fn deno_dir(&self) -> &DenoDir<CliSys> {
pub fn deno_dir(&self) -> &DenoDir {
&self.deno_dir
}

View file

@ -41,7 +41,8 @@ use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_lib::env::has_flag_env_var;
use deno_lib::args::has_flag_env_var;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache;
@ -61,7 +62,6 @@ use crate::args::CliLockfileReadFromPathOptions;
use crate::args::ConfigFile;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::cache::FastInsecureHasher;
use crate::file_fetcher::CliFileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::CliSloppyImportsResolver;

View file

@ -265,7 +265,7 @@ impl TsDiagnosticsStore {
}
pub fn should_send_diagnostic_batch_index_notifications() -> bool {
deno_lib::env::has_flag_env_var(
deno_lib::args::has_flag_env_var(
"DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG",
)
}

View file

@ -27,7 +27,10 @@ use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::Resolution;
use deno_lib::env::has_flag_env_var;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::has_flag_env_var;
use deno_lib::args::CaData;
use deno_lib::version::DENO_VERSION_INFO;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
@ -95,8 +98,6 @@ use super::urls;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::InternalFlags;
@ -703,7 +704,7 @@ impl Inner {
let version = format!(
"{} ({}, {})",
crate::version::DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.deno,
env!("PROFILE"),
env!("TARGET")
);

View file

@ -39,6 +39,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;
use deno_lib::util::result::InfallibleResultExt;
use deno_lib::worker::create_isolate_create_params;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
@ -96,7 +97,6 @@ use crate::tsc::ResolveArgs;
use crate::tsc::MISSING_DEPENDENCY_SPECIFIER;
use crate::util::path::relative_specifier;
use crate::util::path::to_percent_decoded_str;
use crate::util::result::InfallibleResultExt;
use crate::util::v8::convert;
static BRACKET_ACCESSOR_RE: Lazy<Regex> =

View file

@ -17,16 +17,18 @@ mod node;
mod npm;
mod ops;
mod resolver;
mod shared;
mod standalone;
mod sys;
mod task_runner;
mod tools;
mod tsc;
mod util;
mod version;
mod worker;
pub mod sys {
#[allow(clippy::disallowed_types)] // ok, definition
pub type CliSys = sys_traits::impls::RealSys;
}
use std::env;
use std::future::Future;
use std::io::IsTerminal;
@ -40,18 +42,22 @@ use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::FutureExt;
use deno_core::unsync::JoinHandle;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_runtime::WorkerExecutionMode;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use deno_telemetry::OtelConfig;
use deno_terminal::colors;
use factory::CliFactory;
use standalone::MODULE_NOT_FOUND;
use standalone::UNSUPPORTED_SCHEME;
const MODULE_NOT_FOUND: &str = "Module not found";
const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
use self::npm::ResolveSnapshotError;
use self::util::draw_thread::DrawThread;
use crate::args::flags_from_vec;
use crate::args::DenoSubcommand;
use crate::args::Flags;
@ -201,7 +207,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
match result {
Ok(v) => Ok(v),
Err(script_err) => {
if let Some(worker::CreateCustomWorkerError::ResolvePkgFolderFromDenoReq(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_)))) = util::result::any_and_jserrorbox_downcast_ref::<worker::CreateCustomWorkerError>(&script_err) {
if let Some(worker::CreateCustomWorkerError::ResolvePkgFolderFromDenoReq(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_)))) = any_and_jserrorbox_downcast_ref::<worker::CreateCustomWorkerError>(&script_err) {
if flags.node_modules_dir.is_none() {
let mut flags = flags.deref().clone();
let watch = match &flags.subcommand {
@ -351,7 +357,7 @@ fn setup_panic_hook() {
eprintln!("var set and include the backtrace in your report.");
eprintln!();
eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH);
eprintln!("Version: {}", version::DENO_VERSION_INFO.deno);
eprintln!("Version: {}", deno_lib::version::DENO_VERSION_INFO.deno);
eprintln!("Args: {:?}", env::args().collect::<Vec<_>>());
eprintln!();
orig_hook(panic_info);
@ -373,13 +379,11 @@ fn exit_for_error(error: AnyError) -> ! {
let mut error_code = 1;
if let Some(CoreError::Js(e)) =
util::result::any_and_jserrorbox_downcast_ref::<CoreError>(&error)
any_and_jserrorbox_downcast_ref::<CoreError>(&error)
{
error_string = format_js_error(e);
} else if let Some(e @ ResolveSnapshotError { .. }) =
util::result::any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(
&error,
)
any_and_jserrorbox_downcast_ref::<ResolveSnapshotError>(&error)
{
if let Some(e) = e.maybe_integrity_check_error() {
error_string = e.to_string();
@ -442,19 +446,19 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
{
// Ignore results to avoid BrokenPipe errors.
util::logger::init(None, None);
init_logging(None, None);
let _ = err.print();
deno_runtime::exit(0);
}
Err(err) => {
util::logger::init(None, None);
init_logging(None, None);
exit_for_error(AnyError::from(err))
}
};
let otel_config = flags.otel_config();
deno_telemetry::init(crate::args::otel_runtime_config(), &otel_config)?;
util::logger::init(flags.log_level, Some(otel_config));
deno_telemetry::init(deno_lib::version::otel_runtime_config(), &otel_config)?;
init_logging(flags.log_level, Some(otel_config));
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled {
@ -487,3 +491,19 @@ fn resolve_flags_and_init(
Ok(flags)
}
fn init_logging(
maybe_level: Option<log::Level>,
otel_config: Option<OtelConfig>,
) {
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
maybe_level,
otel_config,
// it was considered to hold the draw thread's internal lock
// across logging, but if outputting to stderr blocks then that
// could potentially block other threads that access the draw
// thread's state
on_log_start: DrawThread::hide,
on_log_end: DrawThread::show,
})
}

View file

@ -37,7 +37,11 @@ use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::Resolution;
use deno_graph::WasmModule;
use deno_lib::loader::ModuleCodeStringSource;
use deno_lib::loader::NotSupportedKindInNpmError;
use deno_lib::loader::NpmModuleLoadError;
use deno_lib::npm::NpmRegistryReadPermissionChecker;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::worker::CreateModuleLoaderResult;
use deno_lib::worker::ModuleLoaderFactory;
use deno_resolver::npm::DenoInNpmPackageChecker;
@ -45,6 +49,7 @@ use deno_runtime::code_cache;
use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::ops::require::UnableToGetCwdError;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::ClosestPkgJsonError;
@ -59,7 +64,6 @@ use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::TsTypeLib;
use crate::cache::CodeCache;
use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::graph_container::MainModuleGraphContainer;
@ -69,15 +73,13 @@ use crate::graph_util::enhance_graph_error;
use crate::graph_util::CreateGraphOptions;
use crate::graph_util::EnhanceGraphErrorMode;
use crate::graph_util::ModuleGraphBuilder;
use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::ModuleCodeStringSource;
use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader;
use crate::sys::CliSys;
use crate::tools::check;
use crate::tools::check::CheckError;
@ -86,6 +88,14 @@ use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code;
pub type CliNpmModuleLoader = deno_lib::loader::NpmModuleLoader<
CliCjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
CliNpmResolver,
CliSys,
>;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum PrepareModuleLoadError {
#[class(inherit)]
@ -246,7 +256,7 @@ struct SharedCliModuleLoaderState {
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: NpmModuleLoader,
npm_module_loader: CliNpmModuleLoader,
npm_registry_permission_checker:
Arc<NpmRegistryReadPermissionChecker<CliSys>>,
npm_req_resolver: Arc<CliNpmReqResolver>,
@ -308,7 +318,7 @@ impl CliModuleLoaderFactory {
module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: NpmModuleLoader,
npm_module_loader: CliNpmModuleLoader,
npm_registry_permission_checker: Arc<
NpmRegistryReadPermissionChecker<CliSys>,
>,
@ -427,7 +437,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
pub enum LoadCodeSourceError {
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(crate::resolver::NpmModuleLoadError),
NpmModuleLoad(NpmModuleLoadError),
#[class(inherit)]
#[error(transparent)]
LoadPreparedModule(#[from] LoadPreparedModuleError),

View file

@ -12,7 +12,6 @@ use deno_runtime::deno_fs;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
use node_resolver::analyze::CjsAnalysisExports;
use node_resolver::analyze::CjsCodeAnalysisError;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslator;
use serde::Deserialize;
@ -76,7 +75,7 @@ impl CliCjsCodeAnalyzer {
&self,
specifier: &ModuleSpecifier,
source: &str,
) -> Result<CliCjsAnalysis, CjsCodeAnalysisError> {
) -> Result<CliCjsAnalysis, JsErrorBox> {
let source_hash = CacheDBHash::from_hashable(source);
if let Some(analysis) =
self.cache.get_cjs_analysis(specifier.as_str(), source_hash)
@ -93,7 +92,9 @@ impl CliCjsCodeAnalyzer {
}
let cjs_tracker = self.cjs_tracker.clone();
let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
let is_maybe_cjs = cjs_tracker
.is_maybe_cjs(specifier, media_type)
.map_err(JsErrorBox::from_err)?;
let analysis = if is_maybe_cjs {
let maybe_parsed_source = self
.parsed_source_cache
@ -103,7 +104,7 @@ impl CliCjsCodeAnalyzer {
deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source: Arc<str> = source.into();
move || -> Result<_, CjsCodeAnalysisError> {
move || -> Result<_, JsErrorBox> {
let parsed_source = maybe_parsed_source
.map(Ok)
.unwrap_or_else(|| {
@ -118,11 +119,13 @@ impl CliCjsCodeAnalyzer {
})
.map_err(JsErrorBox::from_err)?;
let is_script = parsed_source.compute_is_script();
let is_cjs = cjs_tracker.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)?;
let is_cjs = cjs_tracker
.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)
.map_err(JsErrorBox::from_err)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(CliCjsAnalysis::Cjs {
@ -154,7 +157,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
&self,
specifier: &ModuleSpecifier,
source: Option<Cow<'a, str>>,
) -> Result<ExtNodeCjsAnalysis<'a>, CjsCodeAnalysisError> {
) -> Result<ExtNodeCjsAnalysis<'a>, JsErrorBox> {
let source = match source {
Some(source) => source,
None => {

View file

@ -1,32 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_core::serde_json;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_runtime::deno_process::NpmProcessStateProvider;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::CliSys;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolver = ByonmNpmResolver<CliSys>;
#[derive(Debug)]
pub struct CliByonmNpmProcessStateProvider(pub Arc<CliByonmNpmResolver>);
impl NpmProcessStateProvider for CliByonmNpmProcessStateProvider {
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
}

View file

@ -220,7 +220,7 @@ impl<'a> LifecycleScripts<'a> {
get_package_path,
);
let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state(
let process_state = deno_lib::npm::npm_process_state(
snapshot.as_valid_serialized(),
Some(root_node_modules_dir_path),
);

View file

@ -9,6 +9,7 @@ use async_trait::async_trait;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_error::JsErrorBox;
use deno_lib::util::hash::FastInsecureHasher;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_resolver::npm::managed::NpmResolutionCell;
@ -17,7 +18,6 @@ use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
use super::common::NpmPackageFsInstaller;
use super::PackageCaching;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use crate::colors;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;

View file

@ -1,11 +1,9 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_npm::registry::NpmRegistryApi;
@ -13,14 +11,10 @@ use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_runtime::deno_process::NpmProcessStateProvider;
use thiserror::Error;
use super::CliNpmRegistryInfoProvider;
use crate::args::CliLockfile;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::CliSys;
pub type CliManagedNpmResolverCreateOptions =
@ -207,27 +201,3 @@ async fn snapshot_from_lockfile(
.await?;
Ok(snapshot)
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
#[derive(Debug)]
pub struct CliManagedNpmProcessStateProvider(pub ManagedNpmResolverRc<CliSys>);
impl NpmProcessStateProvider for CliManagedNpmProcessStateProvider {
fn get_npm_process_state(&self) -> String {
npm_process_state(
self.0.resolution().serialized_valid_snapshot(),
self.0.root_node_modules_path(),
)
}
}

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod byonm;
pub mod installer;
mod managed;
@ -12,13 +11,12 @@ use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_runtime::deno_process::NpmProcessStateProviderRc;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use http::HeaderName;
use http::HeaderValue;
pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::NpmResolutionInitializer;
@ -37,6 +35,8 @@ pub type CliNpmResolver = deno_resolver::npm::NpmResolver<CliSys>;
pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver<CliSys>;
pub type CliNpmResolverCreateOptions =
deno_resolver::npm::NpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
#[derive(Debug)]
pub struct CliNpmCacheHttpClient {
@ -56,19 +56,6 @@ impl CliNpmCacheHttpClient {
}
}
pub fn create_npm_process_state_provider(
npm_resolver: &CliNpmResolver,
) -> NpmProcessStateProviderRc {
match npm_resolver {
CliNpmResolver::Byonm(byonm_npm_resolver) => Arc::new(
byonm::CliByonmNpmProcessStateProvider(byonm_npm_resolver.clone()),
),
CliNpmResolver::Managed(managed_npm_resolver) => Arc::new(
managed::CliManagedNpmProcessStateProvider(managed_npm_resolver.clone()),
),
}
}
#[async_trait::async_trait(?Send)]
impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
async fn download_with_retries_on_any_tokio_runtime(

View file

@ -1,16 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use dashmap::DashSet;
use deno_ast::MediaType;
use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::source::ResolveError;
@ -22,23 +17,19 @@ use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::sloppy_imports::SloppyImportsCachedFs;
use deno_resolver::sloppy_imports::SloppyImportsResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use deno_semver::package::PackageReq;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use thiserror::Error;
use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub type CliCjsTracker =
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, CliSys>;
@ -61,150 +52,6 @@ pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver<
CliSys,
>;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
pub found_url: ModuleSpecifier,
pub media_type: MediaType,
}
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
pub specifier: Url,
}
// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone)
#[derive(Clone)]
pub struct NpmModuleLoader {
cjs_tracker: Arc<CliCjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
node_code_translator: Arc<CliNodeCodeTranslator>,
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum NpmModuleLoadError {
#[class(inherit)]
#[error(transparent)]
NotSupportedKindInNpm(#[from] NotSupportedKindInNpmError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
#[class(inherit)]
#[error("{}", format_message(file_path, maybe_referrer))]
Fs {
file_path: PathBuf,
maybe_referrer: Option<ModuleSpecifier>,
#[source]
#[inherit]
source: deno_runtime::deno_io::fs::FsError,
},
}
fn format_message(
file_path: &std::path::Path,
maybe_referrer: &Option<ModuleSpecifier>,
) -> String {
if file_path.is_dir() {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
let entrypoint_name = ["index.mjs", "index.js", "index.cjs"]
.iter()
.find(|e| dir_path.join(e).is_file());
if let Some(entrypoint_name) = entrypoint_name {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
}
msg
} else {
let mut msg = "Unable to load ".to_string();
msg.push_str(&file_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" imported from ");
msg.push_str(referrer.as_str());
}
msg
}
}
impl NpmModuleLoader {
pub fn new(
cjs_tracker: Arc<CliCjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
node_code_translator: Arc<CliNodeCodeTranslator>,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
fs,
}
}
pub async fn load(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, NpmModuleLoadError> {
let file_path = specifier.to_file_path().unwrap();
let code = self
.fs
.read_file_async(file_path.clone(), None)
.await
.map_err(|source| NpmModuleLoadError::Fs {
file_path,
maybe_referrer: maybe_referrer.cloned(),
source,
})?;
let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(NpmModuleLoadError::NotSupportedKindInNpm(
NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
},
));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
code,
found_url: specifier.clone(),
media_type: MediaType::from_specifier(specifier),
})
}
}
#[derive(Debug, Default)]
pub struct FoundPackageJsonDepFlag(AtomicFlag);

64
cli/rt/Cargo.toml Normal file
View file

@ -0,0 +1,64 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "denort"
version = "2.1.5"
authors.workspace = true
default-run = "denort"
edition.workspace = true
license.workspace = true
publish = false
repository.workspace = true
description = "Provides the denort executable"
[[bin]]
name = "denort"
path = "main.rs"
doc = false
[[test]]
name = "integration"
path = "integration_tests_runner.rs"
harness = false
[build-dependencies]
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
# todo(dsherret): remove deno_cache_dir
deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_error.workspace = true
# todo(dsherret): remove deno_graph
deno_graph = { version = "=0.87.0" }
deno_lib.workspace = true
deno_media_type.workspace = true
deno_npm.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_snapshots.workspace = true
deno_terminal.workspace = true
libsui = "0.5.0"
node_resolver.workspace = true
async-trait.workspace = true
import_map = { version = "=0.21.0", features = ["ext"] }
indexmap.workspace = true
log = { workspace = true, features = ["serde"] }
serde.workspace = true
sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] }
tokio.workspace = true
tokio-util.workspace = true
twox-hash.workspace = true
url.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
sys_traits = { workspace = true, features = ["memory"] }
test_util.workspace = true

685
cli/rt/binary.rs Normal file
View file

@ -0,0 +1,685 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::ffi::OsString;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_error::JsErrorBox;
use deno_lib::standalone::binary::Metadata;
use deno_lib::standalone::binary::SourceMapStore;
use deno_lib::standalone::binary::MAGIC_BYTES;
use deno_lib::standalone::virtual_fs::VfsFileSubDataKind;
use deno_lib::standalone::virtual_fs::VirtualDirectory;
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
use deno_media_type::MediaType;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use crate::file_system::FileBackedVfs;
use crate::file_system::VfsRoot;
pub struct StandaloneData {
pub metadata: Metadata,
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub source_maps: SourceMapStore,
pub vfs: Arc<FileBackedVfs>,
}
/// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file,
/// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>,
) -> Result<Option<StandaloneData>, AnyError> {
let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None);
};
let DeserializedDataSection {
mut metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
None => return Ok(None),
};
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let cli_args = cli_args.into_owned();
metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap());
}
let vfs = {
let fs_root = VfsRoot {
dir: VirtualDirectory {
// align the name of the directory with the root dir
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
entries: vfs_root_entries,
},
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(
Cow::Borrowed(vfs_files_data),
fs_root,
metadata.vfs_case_sensitivity,
))
};
Ok(Some(StandaloneData {
metadata,
modules: StandaloneModules {
remote_modules,
vfs: vfs.clone(),
},
npm_snapshot,
root_path,
source_maps,
vfs,
}))
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub source_maps: SourceMapStore,
pub vfs_root_entries: VirtualDirectoryEntries,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
#[allow(clippy::type_complexity)]
fn read_source_map_entry(
input: &[u8],
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let (input, source_map) = read_bytes_with_u32_len(input)?;
Ok((input, (specifier, source_map)))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) =
read_bytes_with_u64_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, remote_modules) =
RemoteModulesStore::build(input).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
let vfs_root_entries: VirtualDirectoryEntries =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_u64_len(input).context("reading vfs files data")?;
// 5. Source maps
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
for _ in 0..source_map_data_len {
let (current_input, (specifier, source_map)) =
read_source_map_entry(input)?;
input = current_input;
source_maps.add(specifier, Cow::Borrowed(source_map));
}
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
}))
}
pub struct StandaloneModules {
remote_modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, JsErrorBox> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.remote_modules.resolve_specifier(specifier)
}
}
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>(
&'a self,
specifier: &'a Url,
kind: VfsFileSubDataKind,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
}
}
Err(err) => return Err(err.into()),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
}))
} else {
self.remote_modules.read(specifier).map(|maybe_entry| {
maybe_entry.map(|entry| DenoCompileModuleData {
media_type: entry.media_type,
specifier: entry.specifier,
data: match kind {
VfsFileSubDataKind::Raw => entry.data,
VfsFileSubDataKind::ModuleGraph => {
entry.transpiled_data.unwrap_or(entry.data)
}
},
})
})
}
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data {
Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string
unsafe { std::str::from_utf8_unchecked(d) },
),
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
}
}
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
(ModuleType::JavaScript, into_string_unsafe(self.data))
}
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => {
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
}
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
ModuleType::JavaScript,
DenoCompileModuleSource::Bytes(self.data),
),
};
(self.specifier, media_type, source)
}
}
pub enum DenoCompileModuleSource {
String(&'static str),
Bytes(Cow<'static, [u8]>),
}
impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
pub struct RemoteModuleEntry<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
pub transpiled_data: Option<Cow<'static, [u8]>>,
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
}
pub struct RemoteModulesStore {
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
files_data: &'static [u8],
}
impl RemoteModulesStore {
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
let (input, offset) = read_u64(input)?;
Ok((input, (specifier, offset)))
}
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
let (input, from) = read_string_lossy(input)?;
let from = Url::parse(&from)?;
let (input, to) = read_string_lossy(input)?;
let to = Url::parse(&to)?;
Ok((input, (from, to)))
}
fn read_headers(
input: &[u8],
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
{
let (input, specifiers_len) = read_u32_as_usize(input)?;
let (mut input, redirects_len) = read_u32_as_usize(input)?;
let mut specifiers =
HashMap::with_capacity(specifiers_len + redirects_len);
for _ in 0..specifiers_len {
let (current_input, (specifier, offset)) =
read_specifier(input).context("reading specifier")?;
input = current_input;
specifiers.insert(
specifier,
RemoteModulesStoreSpecifierValue::Data(offset as usize),
);
}
for _ in 0..redirects_len {
let (current_input, (from, to)) = read_redirect(input)?;
input = current_input;
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
}
Ok((input, specifiers))
}
let (input, specifiers) = read_headers(input)?;
let (input, files_data) = read_bytes_with_u64_len(input)?;
Ok((
input,
Self {
specifiers,
files_data,
},
))
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, JsErrorBox> {
let mut count = 0;
let mut current = specifier;
loop {
if count > 10 {
return Err(JsErrorBox::generic(format!(
"Too many redirects resolving '{}'",
specifier
)));
}
match self.specifiers.get(current) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
return Ok(Some(current));
}
None => {
return Ok(None);
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", original_specifier);
}
match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
specifier = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, data) = read_bytes_with_u32_len(input)?;
check_has_len(input, 1)?;
let (input, has_transpiled) = (&input[1..], input[0]);
let (_, transpiled_data) = match has_transpiled {
0 => (input, None),
1 => {
let (input, data) = read_bytes_with_u32_len(input)?;
(input, Some(data))
}
value => bail!(
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
value
),
};
return Ok(Some(RemoteModuleEntry {
specifier,
media_type,
data: Cow::Borrowed(data),
transpiled_data: transpiled_data.map(Cow::Borrowed),
}));
}
None => {
return Ok(None);
}
}
}
}
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
let req = StackString::from_cow(req);
Ok((input, (req, id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
match value {
0 => Ok(MediaType::JavaScript),
1 => Ok(MediaType::Jsx),
2 => Ok(MediaType::Mjs),
3 => Ok(MediaType::Cjs),
4 => Ok(MediaType::TypeScript),
5 => Ok(MediaType::Mts),
6 => Ok(MediaType::Cts),
7 => Ok(MediaType::Dts),
8 => Ok(MediaType::Dmts),
9 => Ok(MediaType::Dcts),
10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::Css),
14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value),
}
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.");
}
Ok(())
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into()?);
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into()?);
Ok((input, len))
}

11
cli/rt/build.rs Normal file
View file

@ -0,0 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license.
fn main() {
// Skip building from docs.rs.
if std::env::var_os("DOCS_RS").is_some() {
return;
}
deno_runtime::deno_napi::print_linker_flags("denort");
deno_runtime::deno_webgpu::print_linker_flags("denort");
}

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::BufReader;
use std::io::BufWriter;
@ -10,17 +9,15 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::sync::AtomicFlag;
use deno_lib::util::hash::FastInsecureHasher;
use deno_path_util::get_atomic_path;
use deno_runtime::code_cache::CodeCache;
use deno_runtime::code_cache::CodeCacheType;
use crate::cache::FastInsecureHasher;
use crate::worker::CliCodeCache;
use url::Url;
enum CodeCacheStrategy {
FirstRun(FirstRunCodeCacheStrategy),
@ -76,12 +73,27 @@ impl DenoCompileCodeCache {
}
}
}
pub fn for_deno_core(self: Arc<Self>) -> Arc<dyn CodeCache> {
self.clone()
}
pub fn enabled(&self) -> bool {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
!strategy.is_finished.is_raised()
}
CodeCacheStrategy::SubsequentRun(strategy) => {
!strategy.is_finished.is_raised()
}
}
}
}
impl CodeCache for DenoCompileCodeCache {
fn get_sync(
&self,
specifier: &ModuleSpecifier,
specifier: &Url,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
@ -106,7 +118,7 @@ impl CodeCache for DenoCompileCodeCache {
fn set_sync(
&self,
specifier: ModuleSpecifier,
specifier: Url,
code_cache_type: CodeCacheType,
source_hash: u64,
bytes: &[u8],
@ -152,23 +164,6 @@ impl CodeCache for DenoCompileCodeCache {
}
}
impl CliCodeCache for DenoCompileCodeCache {
fn enabled(&self) -> bool {
match &self.strategy {
CodeCacheStrategy::FirstRun(strategy) => {
!strategy.is_finished.is_raised()
}
CodeCacheStrategy::SubsequentRun(strategy) => {
!strategy.is_finished.is_raised()
}
}
}
fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> {
self
}
}
type CodeCacheKey = (String, CodeCacheType);
struct FirstRunCodeCacheData {
@ -216,7 +211,7 @@ struct SubsequentRunCodeCacheStrategy {
impl SubsequentRunCodeCacheStrategy {
fn take_from_cache(
&self,
specifier: &ModuleSpecifier,
specifier: &Url,
code_cache_type: CodeCacheType,
source_hash: u64,
) -> Option<Vec<u8>> {
@ -395,8 +390,6 @@ fn deserialize_with_reader<T: Read>(
#[cfg(test)]
mod test {
use std::fs::File;
use test_util::TempDir;
use super::*;
@ -463,8 +456,8 @@ mod test {
fn code_cache() {
let temp_dir = TempDir::new();
let file_path = temp_dir.path().join("cache.bin").to_path_buf();
let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap();
let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap();
let url1 = Url::parse("https://deno.land/example1.js").unwrap();
let url2 = Url::parse("https://deno.land/example2.js").unwrap();
// first run
{
let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234);

1734
cli/rt/file_system.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn main() {
// this file exists to cause the executable to be built when running cargo test
}

View file

@ -1,46 +1,27 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Allow unused code warnings because we share
// code between the two bin targets.
#![allow(dead_code)]
#![allow(unused_imports)]
mod standalone;
mod args;
mod cache;
mod emit;
mod file_fetcher;
mod http_util;
mod js;
mod node;
mod npm;
mod resolver;
mod shared;
mod sys;
mod task_runner;
mod util;
mod version;
mod worker;
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::env::current_exe;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::JsError;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_lib::version::otel_runtime_config;
use deno_runtime::deno_telemetry::OtelConfig;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS;
use deno_terminal::colors;
use indexmap::IndexMap;
use standalone::DenoCompileFileSystem;
use crate::args::Flags;
use crate::util::result::any_and_jserrorbox_downcast_ref;
use self::binary::extract_standalone;
use self::file_system::DenoRtSys;
mod binary;
mod code_cache;
mod file_system;
mod node;
mod run;
pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
log::error!(
@ -87,27 +68,26 @@ fn load_env_vars(env_vars: &IndexMap<String, String>) {
fn main() {
deno_runtime::deno_permissions::mark_standalone();
let args: Vec<_> = env::args_os().collect();
let standalone = standalone::extract_standalone(Cow::Owned(args));
let standalone = extract_standalone(Cow::Owned(args));
let future = async move {
match standalone {
Ok(Some(data)) => {
deno_telemetry::init(
crate::args::otel_runtime_config(),
deno_runtime::deno_telemetry::init(
otel_runtime_config(),
&data.metadata.otel_config,
)?;
util::logger::init(
init_logging(
data.metadata.log_level,
Some(data.metadata.otel_config.clone()),
);
load_env_vars(&data.metadata.env_vars_from_env_file);
let fs = DenoCompileFileSystem::new(data.vfs.clone());
let sys = crate::sys::CliSys::DenoCompile(fs.clone());
let exit_code = standalone::run(Arc::new(fs), sys, data).await?;
let sys = DenoRtSys::new(data.vfs.clone());
let exit_code = run::run(Arc::new(sys.clone()), sys, data).await?;
deno_runtime::exit(exit_code);
}
Ok(None) => Ok(()),
Err(err) => {
util::logger::init(None, None);
init_logging(None, None);
Err(err)
}
}
@ -115,3 +95,15 @@ fn main() {
unwrap_or_exit(create_and_run_current_thread_with_maybe_metrics(future));
}
fn init_logging(
maybe_level: Option<log::Level>,
otel_config: Option<OtelConfig>,
) {
deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions {
maybe_level,
otel_config,
on_log_start: || {},
on_log_end: || {},
})
}

157
cli/rt/node.rs Normal file
View file

@ -0,0 +1,157 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lib::loader::NpmModuleLoader;
use deno_media_type::MediaType;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolver;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker;
use node_resolver::analyze::CjsAnalysis;
use node_resolver::analyze::CjsAnalysisExports;
use node_resolver::analyze::NodeCodeTranslator;
use crate::file_system::DenoRtSys;
pub type DenoRtCjsTracker =
deno_resolver::cjs::CjsTracker<DenoInNpmPackageChecker, DenoRtSys>;
pub type DenoRtNpmResolver = deno_resolver::npm::NpmResolver<DenoRtSys>;
pub type DenoRtNpmModuleLoader = NpmModuleLoader<
CjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNodeCodeTranslator = NodeCodeTranslator<
CjsCodeAnalyzer,
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNodeResolver = deno_runtime::deno_node::NodeResolver<
DenoInNpmPackageChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub type DenoRtNpmReqResolver = NpmReqResolver<
DenoInNpmPackageChecker,
RealIsBuiltInNodeModuleChecker,
DenoRtNpmResolver,
DenoRtSys,
>;
pub struct CjsCodeAnalyzer {
cjs_tracker: Arc<DenoRtCjsTracker>,
fs: deno_fs::FileSystemRc,
}
impl CjsCodeAnalyzer {
pub fn new(
cjs_tracker: Arc<DenoRtCjsTracker>,
fs: deno_fs::FileSystemRc,
) -> Self {
Self { cjs_tracker, fs }
}
async fn inner_cjs_analysis<'a>(
&self,
specifier: &Url,
source: Cow<'a, str>,
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
let media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
let cjs_tracker = self.cjs_tracker.clone();
let is_maybe_cjs = cjs_tracker
.is_maybe_cjs(specifier, media_type)
.map_err(JsErrorBox::from_err)?;
let analysis = if is_maybe_cjs {
let maybe_cjs = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source: Arc<str> = source.to_string().into();
move || -> Result<_, JsErrorBox> {
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: source.clone(),
media_type,
capture_tokens: true,
scope_analysis: false,
maybe_syntax: None,
})
.map_err(JsErrorBox::from_err)?;
let is_script = parsed_source.compute_is_script();
let is_cjs = cjs_tracker
.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)
.map_err(JsErrorBox::from_err)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(Some(CjsAnalysisExports {
exports: analysis.exports,
reexports: analysis.reexports,
}))
} else {
Ok(None)
}
}
})
.await
.unwrap()?;
match maybe_cjs {
Some(cjs) => CjsAnalysis::Cjs(cjs),
None => CjsAnalysis::Esm(source),
}
} else {
CjsAnalysis::Esm(source)
};
Ok(analysis)
}
}
#[async_trait::async_trait(?Send)]
impl node_resolver::analyze::CjsCodeAnalyzer for CjsCodeAnalyzer {
async fn analyze_cjs<'a>(
&self,
specifier: &Url,
source: Option<Cow<'a, str>>,
) -> Result<CjsAnalysis<'a>, JsErrorBox> {
let source = match source {
Some(source) => source,
None => {
if let Ok(path) = specifier.to_file_path() {
if let Ok(source_from_file) =
self.fs.read_text_file_lossy_async(path, None).await
{
source_from_file
} else {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} else {
return Ok(CjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
}
};
self.inner_cjs_analysis(specifier, source).await
}
}

1005
cli/rt/run.rs Normal file

File diff suppressed because it is too large Load diff

20
cli/snapshot/Cargo.toml Normal file
View file

@ -0,0 +1,20 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_snapshots"
version = "0.1.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "v8 snapshot used by the Deno CLI"
[lib]
path = "lib.rs"
[features]
disable = []
[build-dependencies]
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] }

3
cli/snapshot/README.md Normal file
View file

@ -0,0 +1,3 @@
# deno_snapshots
v8 snapshot used in the Deno CLI.

30
cli/snapshot/build.rs Normal file
View file

@ -0,0 +1,30 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(feature = "disable"))]
mod shared;
fn main() {
#[cfg(not(feature = "disable"))]
{
let o = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
create_cli_snapshot(cli_snapshot_path);
}
}
#[cfg(not(feature = "disable"))]
fn create_cli_snapshot(snapshot_path: std::path::PathBuf) {
use deno_runtime::ops::bootstrap::SnapshotOptions;
let snapshot_options = SnapshotOptions {
ts_version: shared::TS_VERSION.to_string(),
v8_version: deno_runtime::deno_core::v8::VERSION_STRING,
target: std::env::var("TARGET").unwrap(),
};
deno_runtime::snapshot::create_runtime_snapshot(
snapshot_path,
snapshot_options,
vec![],
);
}

13
cli/snapshot/lib.rs Normal file
View file

@ -0,0 +1,13 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(feature = "disable"))]
pub static CLI_SNAPSHOT: Option<&[u8]> = Some(include_bytes!(concat!(
env!("OUT_DIR"),
"/CLI_SNAPSHOT.bin"
)));
#[cfg(feature = "disable")]
pub static CLI_SNAPSHOT: Option<&[u8]> = None;
mod shared;
pub use shared::TS_VERSION;

3
cli/snapshot/shared.rs Normal file
View file

@ -0,0 +1,3 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub static TS_VERSION: &str = "5.6.2";

View file

@ -1,111 +1,64 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::env;
use std::env::current_exe;
use std::ffi::OsString;
use std::fs;
use std::fs::File;
use std::future::Future;
use std::io::ErrorKind;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::io::Write;
use std::ops::Range;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::io::AllowStdIo;
use deno_core::futures::AsyncReadExt;
use deno_core::futures::AsyncSeekExt;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_graph::ModuleGraph;
use deno_lib::cache::DenoDir;
use deno_lib::standalone::virtual_fs::FileSystemCaseSensitivity;
use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::shared::ReleaseChannel;
use deno_lib::standalone::binary::Metadata;
use deno_lib::standalone::binary::NodeModules;
use deno_lib::standalone::binary::SerializedResolverWorkspaceJsrPackage;
use deno_lib::standalone::binary::SerializedWorkspaceResolver;
use deno_lib::standalone::binary::SerializedWorkspaceResolverImportMap;
use deno_lib::standalone::binary::SourceMapStore;
use deno_lib::standalone::virtual_fs::BuiltVfs;
use deno_lib::standalone::virtual_fs::VfsBuilder;
use deno_lib::standalone::virtual_fs::VfsEntry;
use deno_lib::standalone::virtual_fs::VfsFileSubDataKind;
use deno_lib::standalone::virtual_fs::VirtualDirectory;
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath;
use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError;
use deno_telemetry::OtelConfig;
use indexmap::IndexMap;
use log::Level;
use serde::Deserialize;
use serde::Serialize;
use super::file_system::DenoCompileFileSystem;
use super::serialization::deserialize_binary_data_section;
use super::serialization::serialize_binary_data_section;
use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder;
use super::serialization::SourceMapStore;
use super::virtual_fs::output_vfs;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsRoot;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::CompileFlags;
use crate::args::NpmInstallDepsProvider;
use crate::args::PermissionFlags;
use crate::args::UnstableConfig;
use crate::cache::FastInsecureHasher;
use crate::cache::DenoDir;
use crate::emit::Emitter;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::shared::ReleaseChannel;
use crate::sys::CliSys;
use crate::util::archive;
use crate::util::fs::canonicalize_path;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
/// A URL that can be designated as the base for relative URLs.
///
/// After creation, this URL may be used to get the key for a
@ -151,62 +104,6 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
}
}
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
/// Relative path for the node_modules directory in the vfs.
node_modules_dir: Option<String>,
},
Byonm {
root_node_modules_dir: Option<String>,
},
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolverImportMap {
pub specifier: String,
pub json: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SerializedResolverWorkspaceJsrPackage {
pub relative_base: String,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
}
#[derive(Deserialize, Serialize)]
pub struct SerializedWorkspaceResolver {
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
pub jsr_pkgs: Vec<SerializedResolverWorkspaceJsrPackage>,
pub package_jsons: BTreeMap<String, serde_json::Value>,
pub pkg_json_resolution: PackageJsonDepResolution,
}
// Note: Don't use hashmaps/hashsets. Ensure the serialization
// is deterministic.
#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
pub code_cache_key: Option<u64>,
pub permissions: PermissionsOptions,
pub location: Option<Url>,
pub v8_flags: Vec<String>,
pub log_level: Option<Level>,
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub env_vars_from_env_file: IndexMap<String, String>,
pub workspace_resolver: SerializedWorkspaceResolver,
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: OtelConfig,
pub vfs_case_sensitivity: FileSystemCaseSensitivity,
}
#[allow(clippy::too_many_arguments)]
fn write_binary_bytes(
mut file_writer: File,
@ -261,146 +158,6 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|| libsui::utils::is_macho(&data)
}
pub struct StandaloneData {
pub metadata: Metadata,
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub source_maps: SourceMapStore,
pub vfs: Arc<FileBackedVfs>,
}
pub struct StandaloneModules {
remote_modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<&'a ModuleSpecifier>, JsErrorBox> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.remote_modules.resolve_specifier(specifier)
}
}
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
kind: VfsFileSubDataKind,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
}
}
Err(err) => return Err(err.into()),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
}))
} else {
self.remote_modules.read(specifier).map(|maybe_entry| {
maybe_entry.map(|entry| DenoCompileModuleData {
media_type: entry.media_type,
specifier: entry.specifier,
data: match kind {
VfsFileSubDataKind::Raw => entry.data,
VfsFileSubDataKind::ModuleGraph => {
entry.transpiled_data.unwrap_or(entry.data)
}
},
})
})
}
}
}
/// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file,
/// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>,
) -> Result<Option<StandaloneData>, AnyError> {
let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None);
};
let DeserializedDataSection {
mut metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
None => return Ok(None),
};
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let cli_args = cli_args.into_owned();
metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap());
}
let vfs = {
let fs_root = VfsRoot {
dir: VirtualDirectory {
// align the name of the directory with the root dir
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
entries: vfs_root_entries,
},
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(
Cow::Borrowed(vfs_files_data),
fs_root,
metadata.vfs_case_sensitivity,
))
};
Ok(Some(StandaloneData {
metadata,
modules: StandaloneModules {
remote_modules,
vfs: vfs.clone(),
},
npm_snapshot,
root_path,
source_maps,
vfs,
}))
}
pub struct WriteBinOptions<'a> {
pub writer: File,
pub display_output_filename: &'a str,
@ -413,9 +170,8 @@ pub struct WriteBinOptions<'a> {
pub struct DenoCompileBinaryWriter<'a> {
cjs_tracker: &'a CliCjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir<CliSys>,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
@ -427,9 +183,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub fn new(
cjs_tracker: &'a CliCjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir<CliSys>,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
@ -440,7 +195,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
cli_options,
deno_dir,
emitter,
file_fetcher,
http_client_provider,
npm_resolver,
workspace_resolver,
@ -496,19 +250,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
let target = compile_flags.resolve_target();
let binary_name = format!("denort-{target}.zip");
let binary_path_suffix =
match crate::version::DENO_VERSION_INFO.release_channel {
ReleaseChannel::Canary => {
format!(
"canary/{}/{}",
crate::version::DENO_VERSION_INFO.git_hash,
binary_name
)
}
_ => {
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
}
};
let binary_path_suffix = match DENO_VERSION_INFO.release_channel {
ReleaseChannel::Canary => {
format!("canary/{}/{}", DENO_VERSION_INFO.git_hash, binary_name)
}
_ => {
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
}
};
let download_directory = self.deno_dir.dl_folder_path();
let binary_path = download_directory.join(&binary_path_suffix);

View file

@ -1,884 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::time::Duration;
use std::time::SystemTime;
use deno_lib::standalone::virtual_fs::VfsFileSubDataKind;
use deno_runtime::deno_fs::AccessCheckCb;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::FsDirEntry;
use deno_runtime::deno_fs::FsFileType;
use deno_runtime::deno_fs::OpenOptions;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::File;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_io::fs::FsResult;
use deno_runtime::deno_io::fs::FsStat;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsMetadataValue;
use sys_traits::boxed::FsMetadataBoxed;
use sys_traits::boxed::FsReadDirBoxed;
use sys_traits::FsCopy;
use sys_traits::FsMetadata;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::FileBackedVfsDirEntry;
use super::virtual_fs::FileBackedVfsFile;
use super::virtual_fs::FileBackedVfsMetadata;
#[derive(Debug, Clone)]
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
impl DenoCompileFileSystem {
pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
Self(vfs)
}
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {
if self.0.is_path_within(path) {
Err(FsError::NotSupported)
} else {
Ok(())
}
}
fn copy_to_real_path(
&self,
oldpath: &Path,
newpath: &Path,
) -> std::io::Result<u64> {
let old_file = self.0.file_entry(oldpath)?;
let old_file_bytes =
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
let len = old_file_bytes.len() as u64;
RealFs
.write_file_sync(
newpath,
OpenOptions {
read: false,
write: true,
create: true,
truncate: true,
append: false,
create_new: false,
mode: None,
},
None,
&old_file_bytes,
)
.map_err(|err| err.into_io_error())?;
Ok(len)
}
}
#[async_trait::async_trait(?Send)]
impl FileSystem for DenoCompileFileSystem {
fn cwd(&self) -> FsResult<PathBuf> {
RealFs.cwd()
}
fn tmp_dir(&self) -> FsResult<PathBuf> {
RealFs.tmp_dir()
}
fn chdir(&self, path: &Path) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chdir(path)
}
fn umask(&self, mask: Option<u32>) -> FsResult<u32> {
RealFs.umask(mask)
}
fn open_sync(
&self,
path: &Path,
options: OpenOptions,
access_check: Option<AccessCheckCb>,
) -> FsResult<Rc<dyn File>> {
if self.0.is_path_within(path) {
Ok(Rc::new(self.0.open_file(path)?))
} else {
RealFs.open_sync(path, options, access_check)
}
}
async fn open_async<'a>(
&'a self,
path: PathBuf,
options: OpenOptions,
access_check: Option<AccessCheckCb<'a>>,
) -> FsResult<Rc<dyn File>> {
if self.0.is_path_within(&path) {
Ok(Rc::new(self.0.open_file(&path)?))
} else {
RealFs.open_async(path, options, access_check).await
}
}
fn mkdir_sync(
&self,
path: &Path,
recursive: bool,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.mkdir_sync(path, recursive, mode)
}
async fn mkdir_async(
&self,
path: PathBuf,
recursive: bool,
mode: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.mkdir_async(path, recursive, mode).await
}
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chmod_sync(path, mode)
}
async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.chmod_async(path, mode).await
}
fn chown_sync(
&self,
path: &Path,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.chown_sync(path, uid, gid)
}
async fn chown_async(
&self,
path: PathBuf,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.chown_async(path, uid, gid).await
}
fn lchown_sync(
&self,
path: &Path,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.lchown_sync(path, uid, gid)
}
async fn lchown_async(
&self,
path: PathBuf,
uid: Option<u32>,
gid: Option<u32>,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.lchown_async(path, uid, gid).await
}
fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.remove_sync(path, recursive)
}
async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.remove_async(path, recursive).await
}
fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(newpath)?;
if self.0.is_path_within(oldpath) {
self
.copy_to_real_path(oldpath, newpath)
.map(|_| ())
.map_err(FsError::Io)
} else {
RealFs.copy_file_sync(oldpath, newpath)
}
}
async fn copy_file_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&newpath)?;
if self.0.is_path_within(&oldpath) {
let fs = self.clone();
tokio::task::spawn_blocking(move || {
fs.copy_to_real_path(&oldpath, &newpath)
.map(|_| ())
.map_err(FsError::Io)
})
.await?
} else {
RealFs.copy_file_async(oldpath, newpath).await
}
}
fn cp_sync(&self, from: &Path, to: &Path) -> FsResult<()> {
self.error_if_in_vfs(to)?;
RealFs.cp_sync(from, to)
}
async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
self.error_if_in_vfs(&to)?;
RealFs.cp_async(from, to).await
}
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
if self.0.is_path_within(path) {
Ok(self.0.stat(path)?.as_fs_stat())
} else {
RealFs.stat_sync(path)
}
}
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
if self.0.is_path_within(&path) {
Ok(self.0.stat(&path)?.as_fs_stat())
} else {
RealFs.stat_async(path).await
}
}
fn lstat_sync(&self, path: &Path) -> FsResult<FsStat> {
if self.0.is_path_within(path) {
Ok(self.0.lstat(path)?.as_fs_stat())
} else {
RealFs.lstat_sync(path)
}
}
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
if self.0.is_path_within(&path) {
Ok(self.0.lstat(&path)?.as_fs_stat())
} else {
RealFs.lstat_async(path).await
}
}
fn realpath_sync(&self, path: &Path) -> FsResult<PathBuf> {
if self.0.is_path_within(path) {
Ok(self.0.canonicalize(path)?)
} else {
RealFs.realpath_sync(path)
}
}
async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
if self.0.is_path_within(&path) {
Ok(self.0.canonicalize(&path)?)
} else {
RealFs.realpath_async(path).await
}
}
fn read_dir_sync(&self, path: &Path) -> FsResult<Vec<FsDirEntry>> {
if self.0.is_path_within(path) {
Ok(self.0.read_dir(path)?)
} else {
RealFs.read_dir_sync(path)
}
}
async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
if self.0.is_path_within(&path) {
Ok(self.0.read_dir(&path)?)
} else {
RealFs.read_dir_async(path).await
}
}
fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.rename_sync(oldpath, newpath)
}
async fn rename_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.rename_async(oldpath, newpath).await
}
fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.link_sync(oldpath, newpath)
}
async fn link_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.link_async(oldpath, newpath).await
}
fn symlink_sync(
&self,
oldpath: &Path,
newpath: &Path,
file_type: Option<FsFileType>,
) -> FsResult<()> {
self.error_if_in_vfs(oldpath)?;
self.error_if_in_vfs(newpath)?;
RealFs.symlink_sync(oldpath, newpath, file_type)
}
async fn symlink_async(
&self,
oldpath: PathBuf,
newpath: PathBuf,
file_type: Option<FsFileType>,
) -> FsResult<()> {
self.error_if_in_vfs(&oldpath)?;
self.error_if_in_vfs(&newpath)?;
RealFs.symlink_async(oldpath, newpath, file_type).await
}
fn read_link_sync(&self, path: &Path) -> FsResult<PathBuf> {
if self.0.is_path_within(path) {
Ok(self.0.read_link(path)?)
} else {
RealFs.read_link_sync(path)
}
}
async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
if self.0.is_path_within(&path) {
Ok(self.0.read_link(&path)?)
} else {
RealFs.read_link_async(path).await
}
}
fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.truncate_sync(path, len)
}
async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs.truncate_async(path, len).await
}
fn utime_sync(
&self,
path: &Path,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
}
async fn utime_async(
&self,
path: PathBuf,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs
.utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
.await
}
fn lutime_sync(
&self,
path: &Path,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(path)?;
RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
}
async fn lutime_async(
&self,
path: PathBuf,
atime_secs: i64,
atime_nanos: u32,
mtime_secs: i64,
mtime_nanos: u32,
) -> FsResult<()> {
self.error_if_in_vfs(&path)?;
RealFs
.lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
.await
}
}
impl sys_traits::BaseFsHardLink for DenoCompileFileSystem {
#[inline]
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
self.link_sync(src, dst).map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRead for DenoCompileFileSystem {
#[inline]
fn base_fs_read(&self, path: &Path) -> std::io::Result<Cow<'static, [u8]>> {
self
.read_file_sync(path, None)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::FsMetadataValue for FileBackedVfsMetadata {
fn file_type(&self) -> sys_traits::FileType {
self.file_type
}
fn len(&self) -> u64 {
self.len
}
fn accessed(&self) -> std::io::Result<SystemTime> {
Err(not_supported("accessed time"))
}
fn created(&self) -> std::io::Result<SystemTime> {
Err(not_supported("created time"))
}
fn changed(&self) -> std::io::Result<SystemTime> {
Err(not_supported("changed time"))
}
fn modified(&self) -> std::io::Result<SystemTime> {
Err(not_supported("modified time"))
}
fn dev(&self) -> std::io::Result<u64> {
Ok(0)
}
fn ino(&self) -> std::io::Result<u64> {
Ok(0)
}
fn mode(&self) -> std::io::Result<u32> {
Ok(0)
}
fn nlink(&self) -> std::io::Result<u64> {
Ok(0)
}
fn uid(&self) -> std::io::Result<u32> {
Ok(0)
}
fn gid(&self) -> std::io::Result<u32> {
Ok(0)
}
fn rdev(&self) -> std::io::Result<u64> {
Ok(0)
}
fn blksize(&self) -> std::io::Result<u64> {
Ok(0)
}
fn blocks(&self) -> std::io::Result<u64> {
Ok(0)
}
fn is_block_device(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_char_device(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_fifo(&self) -> std::io::Result<bool> {
Ok(false)
}
fn is_socket(&self) -> std::io::Result<bool> {
Ok(false)
}
fn file_attributes(&self) -> std::io::Result<u32> {
Ok(0)
}
}
fn not_supported(name: &str) -> std::io::Error {
std::io::Error::new(
ErrorKind::Unsupported,
format!(
"{} is not supported for an embedded deno compile file",
name
),
)
}
impl sys_traits::FsDirEntry for FileBackedVfsDirEntry {
type Metadata = BoxedFsMetadataValue;
fn file_name(&self) -> Cow<std::ffi::OsStr> {
Cow::Borrowed(self.metadata.name.as_ref())
}
fn file_type(&self) -> std::io::Result<sys_traits::FileType> {
Ok(self.metadata.file_type)
}
fn metadata(&self) -> std::io::Result<Self::Metadata> {
Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone())))
}
fn path(&self) -> Cow<Path> {
Cow::Owned(self.parent_path.join(&self.metadata.name))
}
}
impl sys_traits::BaseFsReadDir for DenoCompileFileSystem {
type ReadDirEntry = BoxedFsDirEntry;
fn base_fs_read_dir(
&self,
path: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
> {
if self.0.is_path_within(path) {
let entries = self.0.read_dir_with_metadata(path)?;
Ok(Box::new(
entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))),
))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_read_dir_boxed(path)
}
}
}
impl sys_traits::BaseFsCanonicalize for DenoCompileFileSystem {
#[inline]
fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result<PathBuf> {
self.realpath_sync(path).map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsMetadata for DenoCompileFileSystem {
type Metadata = BoxedFsMetadataValue;
#[inline]
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
if self.0.is_path_within(path) {
Ok(BoxedFsMetadataValue::new(self.0.stat(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_metadata_boxed(path)
}
}
#[inline]
fn base_fs_symlink_metadata(
&self,
path: &Path,
) -> std::io::Result<Self::Metadata> {
if self.0.is_path_within(path) {
Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path)
}
}
}
impl sys_traits::BaseFsCopy for DenoCompileFileSystem {
#[inline]
fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result<u64> {
self
.error_if_in_vfs(to)
.map_err(|err| err.into_io_error())?;
if self.0.is_path_within(from) {
self.copy_to_real_path(from, to)
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.fs_copy(from, to)
}
}
}
impl sys_traits::BaseFsCloneFile for DenoCompileFileSystem {
fn base_fs_clone_file(
&self,
_from: &Path,
_to: &Path,
) -> std::io::Result<()> {
// will cause a fallback in the code that uses this
Err(not_supported("cloning files"))
}
}
impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem {
#[inline]
fn base_fs_create_dir(
&self,
path: &Path,
options: &sys_traits::CreateDirOptions,
) -> std::io::Result<()> {
self
.mkdir_sync(path, options.recursive, options.mode)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRemoveFile for DenoCompileFileSystem {
#[inline]
fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> {
self
.remove_sync(path, false)
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::BaseFsRename for DenoCompileFileSystem {
#[inline]
fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> {
self
.rename_sync(from, to)
.map_err(|err| err.into_io_error())
}
}
pub enum FsFileAdapter {
Real(sys_traits::impls::RealFsFile),
Vfs(FileBackedVfsFile),
}
impl sys_traits::FsFile for FsFileAdapter {}
impl sys_traits::FsFileAsRaw for FsFileAdapter {
#[cfg(windows)]
fn fs_file_as_raw_handle(&self) -> Option<std::os::windows::io::RawHandle> {
match self {
Self::Real(file) => file.fs_file_as_raw_handle(),
Self::Vfs(_) => None,
}
}
#[cfg(unix)]
fn fs_file_as_raw_fd(&self) -> Option<std::os::fd::RawFd> {
match self {
Self::Real(file) => file.fs_file_as_raw_fd(),
Self::Vfs(_) => None,
}
}
}
impl sys_traits::FsFileSyncData for FsFileAdapter {
fn fs_file_sync_data(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_sync_data(),
Self::Vfs(_) => Ok(()),
}
}
}
impl sys_traits::FsFileSyncAll for FsFileAdapter {
fn fs_file_sync_all(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_sync_all(),
Self::Vfs(_) => Ok(()),
}
}
}
impl sys_traits::FsFileSetPermissions for FsFileAdapter {
#[inline]
fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_permissions(mode),
Self::Vfs(_) => Ok(()),
}
}
}
impl std::io::Read for FsFileAdapter {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
match self {
Self::Real(file) => file.read(buf),
Self::Vfs(file) => file.read_to_buf(buf),
}
}
}
impl std::io::Seek for FsFileAdapter {
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
match self {
Self::Real(file) => file.seek(pos),
Self::Vfs(file) => file.seek(pos),
}
}
}
impl std::io::Write for FsFileAdapter {
#[inline]
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
match self {
Self::Real(file) => file.write(buf),
Self::Vfs(_) => Err(not_supported("writing files")),
}
}
#[inline]
fn flush(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.flush(),
Self::Vfs(_) => Err(not_supported("writing files")),
}
}
}
impl sys_traits::FsFileSetLen for FsFileAdapter {
#[inline]
fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_len(len),
Self::Vfs(_) => Err(not_supported("setting file length")),
}
}
}
impl sys_traits::FsFileSetTimes for FsFileAdapter {
fn fs_file_set_times(
&mut self,
times: sys_traits::FsFileTimes,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_set_times(times),
Self::Vfs(_) => Err(not_supported("setting file times")),
}
}
}
impl sys_traits::FsFileLock for FsFileAdapter {
fn fs_file_lock(
&mut self,
mode: sys_traits::FsFileLockMode,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_lock(mode),
Self::Vfs(_) => Err(not_supported("locking files")),
}
}
fn fs_file_try_lock(
&mut self,
mode: sys_traits::FsFileLockMode,
) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_try_lock(mode),
Self::Vfs(_) => Err(not_supported("locking files")),
}
}
fn fs_file_unlock(&mut self) -> std::io::Result<()> {
match self {
Self::Real(file) => file.fs_file_unlock(),
Self::Vfs(_) => Err(not_supported("unlocking files")),
}
}
}
impl sys_traits::FsFileIsTerminal for FsFileAdapter {
#[inline]
fn fs_file_is_terminal(&self) -> bool {
match self {
Self::Real(file) => file.fs_file_is_terminal(),
Self::Vfs(_) => false,
}
}
}
impl sys_traits::BaseFsOpen for DenoCompileFileSystem {
type File = FsFileAdapter;
fn base_fs_open(
&self,
path: &Path,
options: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
if self.0.is_path_within(path) {
Ok(FsFileAdapter::Vfs(self.0.open_file(path)?))
} else {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
Ok(FsFileAdapter::Real(
sys_traits::impls::RealSys.base_fs_open(path, options)?,
))
}
}
}
impl sys_traits::BaseFsSymlinkDir for DenoCompileFileSystem {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
self
.symlink_sync(src, dst, Some(FsFileType::Directory))
.map_err(|err| err.into_io_error())
}
}
impl sys_traits::SystemRandom for DenoCompileFileSystem {
#[inline]
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.sys_random(buf)
}
}
impl sys_traits::SystemTimeNow for DenoCompileFileSystem {
#[inline]
fn sys_time_now(&self) -> SystemTime {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.sys_time_now()
}
}
impl sys_traits::ThreadSleep for DenoCompileFileSystem {
#[inline]
fn thread_sleep(&self, dur: Duration) {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.thread_sleep(dur)
}
}
impl sys_traits::EnvCurrentDir for DenoCompileFileSystem {
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.env_current_dir()
}
}
impl sys_traits::BaseEnvVar for DenoCompileFileSystem {
fn base_env_var_os(
&self,
key: &std::ffi::OsStr,
) -> Option<std::ffi::OsString> {
#[allow(clippy::disallowed_types)] // ok because we're implementing the fs
sys_traits::impls::RealSys.base_env_var_os(key)
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,37 +1,17 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
use capacity_builder::BytesAppendable;
use deno_ast::swc::common::source_map;
use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_error::JsErrorBox;
use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries;
use deno_lib::standalone::binary::Metadata;
use deno_lib::standalone::binary::SourceMapStore;
use deno_lib::standalone::binary::MAGIC_BYTES;
use deno_lib::standalone::virtual_fs::BuiltVfs;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use indexmap::IndexMap;
use super::binary::Metadata;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::VfsBuilder;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
/// Binary format:
/// * d3n0l4nd
@ -82,12 +62,12 @@ pub fn serialize_binary_data_section(
}
// 5. Source maps
{
builder.append_le(source_map_store.data.len() as u32);
for (specifier, source_map) in &source_map_store.data {
builder.append_le(source_map_store.len() as u32);
for (specifier, source_map) in source_map_store.iter() {
builder.append_le(specifier.len() as u32);
builder.append(specifier);
builder.append_le(source_map.len() as u32);
builder.append(source_map.as_ref());
builder.append(source_map);
}
}
@ -99,91 +79,6 @@ pub fn serialize_binary_data_section(
Ok(bytes)
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub source_maps: SourceMapStore,
pub vfs_root_entries: VirtualDirectoryEntries,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
#[allow(clippy::type_complexity)]
fn read_source_map_entry(
input: &[u8],
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let (input, source_map) = read_bytes_with_u32_len(input)?;
Ok((input, (specifier, source_map)))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) =
read_bytes_with_u64_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, remote_modules) =
RemoteModulesStore::build(input).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
let vfs_root_entries: VirtualDirectoryEntries =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_u64_len(input).context("reading vfs files data")?;
// 5. Source maps
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
for _ in 0..source_map_data_len {
let (current_input, (specifier, source_map)) =
read_source_map_entry(input)?;
input = current_input;
source_maps.add(specifier, Cow::Borrowed(source_map));
}
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
remote_modules,
source_maps,
vfs_root_entries,
vfs_files_data,
}))
}
#[derive(Default)]
pub struct RemoteModulesStoreBuilder {
specifiers: Vec<(String, u64)>,
@ -272,249 +167,6 @@ impl RemoteModulesStoreBuilder {
}
}
pub enum DenoCompileModuleSource {
String(&'static str),
Bytes(Cow<'static, [u8]>),
}
impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
pub struct SourceMapStore {
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
}
impl SourceMapStore {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn add(
&mut self,
specifier: Cow<'static, str>,
source_map: Cow<'static, [u8]>,
) {
self.data.insert(specifier, source_map);
}
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
self.data.get(specifier).map(|v| v.as_ref())
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data {
Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string
unsafe { std::str::from_utf8_unchecked(d) },
),
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
}
}
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
(ModuleType::JavaScript, into_string_unsafe(self.data))
}
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => {
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
}
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
ModuleType::JavaScript,
DenoCompileModuleSource::Bytes(self.data),
),
};
(self.specifier, media_type, source)
}
}
pub struct RemoteModuleEntry<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
pub transpiled_data: Option<Cow<'static, [u8]>>,
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
}
pub struct RemoteModulesStore {
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
files_data: &'static [u8],
}
impl RemoteModulesStore {
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
let (input, offset) = read_u64(input)?;
Ok((input, (specifier, offset)))
}
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
let (input, from) = read_string_lossy(input)?;
let from = Url::parse(&from)?;
let (input, to) = read_string_lossy(input)?;
let to = Url::parse(&to)?;
Ok((input, (from, to)))
}
fn read_headers(
input: &[u8],
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
{
let (input, specifiers_len) = read_u32_as_usize(input)?;
let (mut input, redirects_len) = read_u32_as_usize(input)?;
let mut specifiers =
HashMap::with_capacity(specifiers_len + redirects_len);
for _ in 0..specifiers_len {
let (current_input, (specifier, offset)) =
read_specifier(input).context("reading specifier")?;
input = current_input;
specifiers.insert(
specifier,
RemoteModulesStoreSpecifierValue::Data(offset as usize),
);
}
for _ in 0..redirects_len {
let (current_input, (from, to)) = read_redirect(input)?;
input = current_input;
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
}
Ok((input, specifiers))
}
let (input, specifiers) = read_headers(input)?;
let (input, files_data) = read_bytes_with_u64_len(input)?;
Ok((
input,
Self {
specifiers,
files_data,
},
))
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, JsErrorBox> {
let mut count = 0;
let mut current = specifier;
loop {
if count > 10 {
return Err(JsErrorBox::generic(format!(
"Too many redirects resolving '{}'",
specifier
)));
}
match self.specifiers.get(current) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
return Ok(Some(current));
}
None => {
return Ok(None);
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", original_specifier);
}
match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
specifier = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, data) = read_bytes_with_u32_len(input)?;
check_has_len(input, 1)?;
let (input, has_transpiled) = (&input[1..], input[0]);
let (_, transpiled_data) = match has_transpiled {
0 => (input, None),
1 => {
let (input, data) = read_bytes_with_u32_len(input)?;
(input, Some(data))
}
value => bail!(
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
value
),
};
return Ok(Some(RemoteModuleEntry {
specifier,
media_type,
data: Cow::Borrowed(data),
transpiled_data: transpiled_data.map(Cow::Borrowed),
}));
}
None => {
return Ok(None);
}
}
}
}
}
fn serialize_npm_snapshot(
mut snapshot: SerializedNpmResolutionSnapshot,
) -> Vec<u8> {
@ -563,106 +215,6 @@ fn serialize_npm_snapshot(
bytes
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
let req = StackString::from_cow(req);
Ok((input, (req, id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
@ -683,106 +235,3 @@ fn serialize_media_type(media_type: MediaType) -> u8 {
MediaType::Unknown => 15,
}
}
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
match value {
0 => Ok(MediaType::JavaScript),
1 => Ok(MediaType::Jsx),
2 => Ok(MediaType::Mjs),
3 => Ok(MediaType::Cjs),
4 => Ok(MediaType::TypeScript),
5 => Ok(MediaType::Mts),
6 => Ok(MediaType::Cts),
7 => Ok(MediaType::Dts),
8 => Ok(MediaType::Dmts),
9 => Ok(MediaType::Dcts),
10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::Css),
14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value),
}
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.");
}
Ok(())
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into()?);
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into()?);
Ok((input, len))
}

File diff suppressed because it is too large Load diff

View file

@ -1,232 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// todo(dsherret): this should instead use conditional compilation and directly
// surface the underlying implementation.
//
// The problem atm is that there's no way to have conditional compilation for
// denort or the deno binary. We should extract out denort to a separate binary.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::BoxedFsFile;
use sys_traits::boxed::BoxedFsMetadataValue;
use sys_traits::boxed::FsMetadataBoxed;
use sys_traits::boxed::FsOpenBoxed;
use sys_traits::boxed::FsReadDirBoxed;
use sys_traits::CreateDirOptions;
use crate::standalone::DenoCompileFileSystem;
#[derive(Debug, Clone)]
pub enum CliSys {
#[allow(dead_code)] // will be dead code for denort
#[allow(clippy::disallowed_types)] // ok because sys impl
Real(sys_traits::impls::RealSys),
#[allow(dead_code)] // will be dead code for deno
DenoCompile(DenoCompileFileSystem),
}
impl deno_lib::sys::DenoLibSys for CliSys {}
impl Default for CliSys {
fn default() -> Self {
Self::Real(sys_traits::impls::RealSys)
}
}
impl deno_runtime::deno_node::ExtNodeSys for CliSys {}
impl sys_traits::BaseFsCloneFile for CliSys {
fn base_fs_clone_file(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_clone_file(src, dst),
Self::DenoCompile(sys) => sys.base_fs_clone_file(src, dst),
}
}
}
impl sys_traits::BaseFsSymlinkDir for CliSys {
fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_symlink_dir(src, dst),
Self::DenoCompile(sys) => sys.base_fs_symlink_dir(src, dst),
}
}
}
impl sys_traits::BaseFsCopy for CliSys {
fn base_fs_copy(&self, src: &Path, dst: &Path) -> std::io::Result<u64> {
match self {
Self::Real(sys) => sys.base_fs_copy(src, dst),
Self::DenoCompile(sys) => sys.base_fs_copy(src, dst),
}
}
}
impl sys_traits::BaseFsHardLink for CliSys {
fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_hard_link(src, dst),
Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst),
}
}
}
impl sys_traits::BaseFsRead for CliSys {
fn base_fs_read(&self, p: &Path) -> std::io::Result<Cow<'static, [u8]>> {
match self {
Self::Real(sys) => sys.base_fs_read(p),
Self::DenoCompile(sys) => sys.base_fs_read(p),
}
}
}
impl sys_traits::BaseFsReadDir for CliSys {
type ReadDirEntry = BoxedFsDirEntry;
fn base_fs_read_dir(
&self,
p: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>> + '_>,
> {
match self {
Self::Real(sys) => sys.fs_read_dir_boxed(p),
Self::DenoCompile(sys) => sys.fs_read_dir_boxed(p),
}
}
}
impl sys_traits::BaseFsCanonicalize for CliSys {
fn base_fs_canonicalize(&self, p: &Path) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.base_fs_canonicalize(p),
Self::DenoCompile(sys) => sys.base_fs_canonicalize(p),
}
}
}
impl sys_traits::BaseFsMetadata for CliSys {
type Metadata = BoxedFsMetadataValue;
fn base_fs_metadata(&self, path: &Path) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_metadata_boxed(path),
Self::DenoCompile(sys) => sys.fs_metadata_boxed(path),
}
}
fn base_fs_symlink_metadata(
&self,
path: &Path,
) -> std::io::Result<Self::Metadata> {
match self {
Self::Real(sys) => sys.fs_symlink_metadata_boxed(path),
Self::DenoCompile(sys) => sys.fs_symlink_metadata_boxed(path),
}
}
}
impl sys_traits::BaseFsCreateDir for CliSys {
fn base_fs_create_dir(
&self,
p: &Path,
options: &CreateDirOptions,
) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_create_dir(p, options),
Self::DenoCompile(sys) => sys.base_fs_create_dir(p, options),
}
}
}
impl sys_traits::BaseFsOpen for CliSys {
type File = BoxedFsFile;
fn base_fs_open(
&self,
path: &Path,
options: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
match self {
Self::Real(sys) => sys.fs_open_boxed(path, options),
Self::DenoCompile(sys) => sys.fs_open_boxed(path, options),
}
}
}
impl sys_traits::BaseFsRemoveFile for CliSys {
fn base_fs_remove_file(&self, p: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_remove_file(p),
Self::DenoCompile(sys) => sys.base_fs_remove_file(p),
}
}
}
impl sys_traits::BaseFsRename for CliSys {
fn base_fs_rename(&self, old: &Path, new: &Path) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.base_fs_rename(old, new),
Self::DenoCompile(sys) => sys.base_fs_rename(old, new),
}
}
}
impl sys_traits::SystemRandom for CliSys {
fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> {
match self {
Self::Real(sys) => sys.sys_random(buf),
Self::DenoCompile(sys) => sys.sys_random(buf),
}
}
}
impl sys_traits::SystemTimeNow for CliSys {
fn sys_time_now(&self) -> std::time::SystemTime {
match self {
Self::Real(sys) => sys.sys_time_now(),
Self::DenoCompile(sys) => sys.sys_time_now(),
}
}
}
impl sys_traits::ThreadSleep for CliSys {
fn thread_sleep(&self, dur: std::time::Duration) {
match self {
Self::Real(sys) => sys.thread_sleep(dur),
Self::DenoCompile(sys) => sys.thread_sleep(dur),
}
}
}
impl sys_traits::EnvCurrentDir for CliSys {
fn env_current_dir(&self) -> std::io::Result<PathBuf> {
match self {
Self::Real(sys) => sys.env_current_dir(),
Self::DenoCompile(sys) => sys.env_current_dir(),
}
}
}
impl sys_traits::BaseEnvVar for CliSys {
fn base_env_var_os(
&self,
key: &std::ffi::OsStr,
) -> Option<std::ffi::OsString> {
match self {
Self::Real(sys) => sys.base_env_var_os(key),
Self::DenoCompile(sys) => sys.base_env_var_os(key),
}
}
}
impl sys_traits::EnvHomeDir for CliSys {
fn env_home_dir(&self) -> Option<PathBuf> {
#[allow(clippy::disallowed_types)] // ok because sys impl
sys_traits::impls::RealSys.env_home_dir()
}
}

View file

@ -1,10 +1,10 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_lib::version::DENO_VERSION_INFO;
use serde::Serialize;
use super::*;
use crate::tools::test::TestFailureFormatOptions;
use crate::version;
pub trait BenchReporter {
fn report_group_summary(&mut self);
@ -31,11 +31,7 @@ impl Default for JsonReporterOutput {
fn default() -> Self {
Self {
version: JSON_SCHEMA_VERSION,
runtime: format!(
"{} {}",
version::DENO_VERSION_INFO.user_agent,
env!("TARGET")
),
runtime: format!("{} {}", DENO_VERSION_INFO.user_agent, env!("TARGET")),
cpu: mitata::cpu::name(),
benches: vec![],
}
@ -163,7 +159,7 @@ impl BenchReporter for ConsoleReporter {
"{}\n",
colors::gray(format!(
"Runtime | Deno {} ({})",
crate::version::DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.deno,
env!("TARGET")
))
);

View file

@ -13,6 +13,7 @@ use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleLoadError;
use deno_lib::util::hash::FastInsecureHasher;
use deno_semver::npm::NpmPackageNvReference;
use deno_terminal::colors;
use once_cell::sync::Lazy;
@ -28,7 +29,6 @@ use crate::args::TsTypeLib;
use crate::args::TypeCheckMode;
use crate::cache::CacheDBHash;
use crate::cache::Caches;
use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::factory::CliFactory;
use crate::graph_util::maybe_additional_sloppy_imports_message;

View file

@ -4,8 +4,8 @@ use std::path::Path;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_lib::cache::DenoDir;
use crate::cache::DenoDir;
use crate::colors;
use crate::display;
use crate::sys::CliSys;

View file

@ -18,6 +18,7 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_lib::args::CaData;
use deno_semver::npm::NpmPackageReqReference;
use log::Level;
use once_cell::sync::Lazy;
@ -26,7 +27,6 @@ use regex::RegexBuilder;
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
@ -657,6 +657,7 @@ fn is_in_path(dir: &Path) -> bool {
mod tests {
use std::process::Command;
use deno_lib::args::UnstableConfig;
use test_util::testdata_path;
use test_util::TempDir;
@ -664,7 +665,6 @@ mod tests {
use crate::args::ConfigFlag;
use crate::args::PermissionFlags;
use crate::args::UninstallFlagsGlobal;
use crate::args::UnstableConfig;
use crate::util::fs::canonicalize_path;
#[tokio::test]

View file

@ -18,6 +18,7 @@ use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use deno_lib::version::DENO_VERSION_INFO;
use jupyter_runtime::messaging;
use jupyter_runtime::ConnectionInfo;
use jupyter_runtime::ExecutionCount;
@ -679,10 +680,10 @@ fn kernel_info() -> messaging::KernelInfoReply {
status: ReplyStatus::Ok,
protocol_version: "5.3".to_string(),
implementation: "Deno kernel".to_string(),
implementation_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
implementation_version: DENO_VERSION_INFO.deno.to_string(),
language_info: messaging::LanguageInfo {
name: "typescript".to_string(),
version: crate::version::DENO_VERSION_INFO.typescript.to_string(),
version: DENO_VERSION_INFO.typescript.to_string(),
mimetype: "text/x.typescript".to_string(),
file_extension: ".ts".to_string(),
pygments_lexer: "typescript".to_string(),

View file

@ -8,6 +8,7 @@ use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::unsync::spawn_blocking;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::WorkerExecutionMode;
use rustyline::error::ReadlineError;
@ -244,7 +245,7 @@ pub async fn run(
if !cli_options.is_quiet() {
let mut handle = io::stdout().lock();
writeln!(handle, "Deno {}", crate::version::DENO_VERSION_INFO.deno)?;
writeln!(handle, "Deno {}", DENO_VERSION_INFO.deno)?;
writeln!(handle, "exit using ctrl+d, ctrl+c, or close()")?;
if repl_flags.is_default_command {

View file

@ -32,6 +32,7 @@ use deno_error::JsErrorBox;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionKind;
@ -402,18 +403,16 @@ impl ReplSession {
}
Err(err) => {
// handle a parsing diagnostic
match crate::util::result::any_and_jserrorbox_downcast_ref::<
deno_ast::ParseDiagnostic,
>(&err)
{
match any_and_jserrorbox_downcast_ref::<deno_ast::ParseDiagnostic>(
&err,
) {
Some(diagnostic) => {
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
}
None => {
match crate::util::result::any_and_jserrorbox_downcast_ref::<
ParseDiagnosticsError,
>(&err)
{
match any_and_jserrorbox_downcast_ref::<ParseDiagnosticsError>(
&err,
) {
Some(diagnostics) => Ok(EvaluationOutput::Error(
diagnostics
.0

View file

@ -19,6 +19,8 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_lib::shared::ReleaseChannel;
use deno_lib::version;
use deno_semver::SmallStackString;
use deno_semver::Version;
use once_cell::sync::Lazy;
@ -30,11 +32,9 @@ use crate::colors;
use crate::factory::CliFactory;
use crate::http_util::HttpClient;
use crate::http_util::HttpClientProvider;
use crate::shared::ReleaseChannel;
use crate::util::archive;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::version;
const RELEASE_URL: &str = "https://github.com/denoland/deno/releases";
const CANARY_URL: &str = "https://dl.deno.land/canary";

View file

@ -29,6 +29,7 @@ use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_graph::ResolutionResolved;
use deno_lib::util::checksum;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lib::worker::create_isolate_create_params;
use deno_resolver::npm::managed::ResolvePkgFolderFromDenoModuleError;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
@ -44,7 +45,6 @@ use thiserror::Error;
use crate::args::TsConfig;
use crate::args::TypeCheckMode;
use crate::cache::FastInsecureHasher;
use crate::cache::ModuleInfoCache;
use crate::node::CliNodeResolver;
use crate::npm::CliNpmResolver;

View file

@ -14,6 +14,7 @@ use deno_core::error::CoreError;
use deno_core::futures::Future;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_runtime::fmt_errors::format_js_error;
use log::info;
use notify::event::Event as NotifyEvent;
@ -82,13 +83,11 @@ where
{
let result = watch_future.await;
if let Err(err) = result {
let error_string =
match crate::util::result::any_and_jserrorbox_downcast_ref::<CoreError>(
&err,
) {
Some(CoreError::Js(e)) => format_js_error(e),
_ => format!("{err:?}"),
};
let error_string = match any_and_jserrorbox_downcast_ref::<CoreError>(&err)
{
Some(CoreError::Js(e)) => format_js_error(e),
_ => format!("{err:?}"),
};
log::error!(
"{}: {}",
colors::red_bold("error"),

View file

@ -9,10 +9,8 @@ pub mod draw_thread;
pub mod extract;
pub mod file_watcher;
pub mod fs;
pub mod logger;
pub mod path;
pub mod progress_bar;
pub mod result;
pub mod retry;
pub mod sync;
pub mod text_encoding;

View file

@ -1,8 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::ops::Range;
use std::sync::Arc;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
@ -11,24 +9,6 @@ use deno_core::ModuleSourceCode;
static SOURCE_MAP_PREFIX: &[u8] =
b"//# sourceMappingURL=data:application/json;base64,";
#[inline(always)]
pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow<str> {
match bytes {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
}
}
#[inline(always)]
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
pub fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> {
let range = find_source_map_range(code)?;
let source_map_range = &code[range];
@ -105,29 +85,6 @@ fn find_source_map_range(code: &[u8]) -> Option<Range<usize>> {
}
}
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
#[allow(dead_code)]
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
let raw = Arc::into_raw(arc_str);
// SAFETY: This is safe because they have the same memory layout.
unsafe { Arc::from_raw(raw as *const [u8]) }
}
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
#[allow(dead_code)]
pub fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
}
#[cfg(test)]
mod tests {
use std::sync::Arc;

View file

@ -1,5 +1,7 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_lib::util::v8::construct_v8_flags;
pub mod convert;
#[inline(always)]
@ -10,19 +12,6 @@ pub fn get_v8_flags_from_env() -> Vec<String> {
.unwrap_or_default()
}
#[inline(always)]
pub fn construct_v8_flags(
default_v8_flags: &[String],
v8_flags: &[String],
env_v8_flags: Vec<String>,
) -> Vec<String> {
std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned())
.chain(default_v8_flags.iter().cloned())
.chain(env_v8_flags)
.chain(v8_flags.iter().cloned())
.collect::<Vec<_>>()
}
pub fn init_v8_flags(
default_v8_flags: &[String],
v8_flags: &[String],

View file

@ -1,88 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use once_cell::sync::Lazy;
use crate::shared::ReleaseChannel;
const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
const TYPESCRIPT: &str = env!("TS_VERSION");
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
// TODO(bartlomieju): ideally we could remove this const.
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();
// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well
const IS_RC: bool = option_env!("DENO_RC").is_some();
pub static DENO_VERSION_INFO: Lazy<DenoVersionInfo> = Lazy::new(|| {
let release_channel = libsui::find_section("denover")
.and_then(|buf| std::str::from_utf8(buf).ok())
.and_then(|str_| ReleaseChannel::deserialize(str_).ok())
.unwrap_or({
if IS_CANARY {
ReleaseChannel::Canary
} else if IS_RC {
ReleaseChannel::Rc
} else {
ReleaseChannel::Stable
}
});
DenoVersionInfo {
deno: if release_channel == ReleaseChannel::Canary {
concat!(
env!("CARGO_PKG_VERSION"),
"+",
env!("GIT_COMMIT_HASH_SHORT")
)
} else {
env!("CARGO_PKG_VERSION")
},
release_channel,
git_hash: GIT_COMMIT_HASH,
// Keep in sync with `deno` field.
user_agent: if release_channel == ReleaseChannel::Canary {
concat!(
"Deno/",
env!("CARGO_PKG_VERSION"),
"+",
env!("GIT_COMMIT_HASH_SHORT")
)
} else {
concat!("Deno/", env!("CARGO_PKG_VERSION"))
},
typescript: TYPESCRIPT,
}
});
pub struct DenoVersionInfo {
/// Human-readable version of the current Deno binary.
///
/// For stable release, a semver, eg. `v1.46.2`.
/// For canary release, a semver + 7-char git hash, eg. `v1.46.3+asdfqwq`.
pub deno: &'static str,
pub release_channel: ReleaseChannel,
/// A full git hash.
pub git_hash: &'static str,
/// A user-agent header that will be used in HTTP client.
pub user_agent: &'static str,
pub typescript: &'static str,
}
impl DenoVersionInfo {
/// For stable release, a semver like, eg. `v1.46.2`.
/// For canary release a full git hash, eg. `9bdab6fb6b93eb43b1930f40987fa4997287f9c8`.
pub fn version_or_git_hash(&self) -> &'static str {
if self.release_channel == ReleaseChannel::Canary {
self.git_hash
} else {
CARGO_PKG_VERSION
}
}
}

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
@ -12,20 +11,16 @@ use deno_core::PollEventLoopOptions;
use deno_error::JsErrorBox;
use deno_lib::worker::LibMainWorker;
use deno_lib::worker::LibMainWorkerFactory;
use deno_runtime::code_cache;
use deno_lib::worker::ResolveNpmBinaryEntrypointError;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::worker::MainWorker;
use deno_runtime::WorkerExecutionMode;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::ResolvePkgJsonBinExportError;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use sys_traits::EnvCurrentDir;
use tokio::select;
use crate::args::CliLockfile;
use crate::args::NpmCachingStrategy;
use crate::node::CliNodeResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmResolver;
@ -40,15 +35,6 @@ pub trait HmrRunner: Send + Sync {
async fn run(&mut self) -> Result<(), CoreError>;
}
pub trait CliCodeCache: code_cache::CodeCache {
/// Gets if the code cache is still enabled.
fn enabled(&self) -> bool {
true
}
fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache>;
}
#[async_trait::async_trait(?Send)]
pub trait CoverageCollector: Send + Sync {
async fn start_collecting(&mut self) -> Result<(), CoreError>;
@ -102,6 +88,8 @@ impl CliMainWorker {
log::debug!("main_module {}", self.worker.main_module());
// WARNING: Remember to update cli/lib/worker.rs to align with
// changes made here so that they affect deno_compile as well.
self.execute_main_module().await?;
self.worker.dispatch_load_event()?;
@ -295,29 +283,6 @@ impl CliMainWorker {
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveBinaryEntrypointError {
#[class(inherit)]
#[error(transparent)]
ResolvePkgJsonBinExport(ResolvePkgJsonBinExportError),
#[class(generic)]
#[error("{original:#}\n\nFallback failed: {fallback:#}")]
Fallback {
fallback: ResolveBinaryEntrypointFallbackError,
original: ResolvePkgJsonBinExportError,
},
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveBinaryEntrypointFallbackError {
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(node_resolver::errors::PackageSubpathResolveError),
#[class(generic)]
#[error("Cannot find module '{0}'")]
ModuleNotFound(ModuleSpecifier),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CreateCustomWorkerError {
#[class(inherit)]
@ -336,7 +301,7 @@ pub enum CreateCustomWorkerError {
UrlParse(#[from] deno_core::url::ParseError),
#[class(inherit)]
#[error(transparent)]
ResolveBinaryEntrypoint(#[from] ResolveBinaryEntrypointError),
ResolveNpmBinaryEntrypoint(#[from] ResolveNpmBinaryEntrypointError),
#[class(inherit)]
#[error(transparent)]
NpmPackageReq(JsErrorBox),
@ -350,7 +315,6 @@ pub enum CreateCustomWorkerError {
pub struct CliMainWorkerFactory {
lib_main_worker_factory: LibMainWorkerFactory<CliSys>,
maybe_lockfile: Option<Arc<CliLockfile>>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
root_permissions: PermissionsContainer,
@ -366,7 +330,6 @@ impl CliMainWorkerFactory {
lib_main_worker_factory: LibMainWorkerFactory<CliSys>,
maybe_file_watcher_communicator: Option<Arc<WatcherCommunicator>>,
maybe_lockfile: Option<Arc<CliLockfile>>,
node_resolver: Arc<CliNodeResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_resolver: CliNpmResolver,
sys: CliSys,
@ -376,7 +339,6 @@ impl CliMainWorkerFactory {
Self {
lib_main_worker_factory,
maybe_lockfile,
node_resolver,
npm_installer,
npm_resolver,
root_permissions,
@ -446,8 +408,11 @@ impl CliMainWorkerFactory {
package_ref.req(),
&referrer,
)?;
let main_module = self
.resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?;
let main_module =
self.lib_main_worker_factory.resolve_npm_binary_entrypoint(
&package_folder,
package_ref.sub_path(),
)?;
if let Some(lockfile) = &self.maybe_lockfile {
// For npm binary commands, ensure that the lockfile gets updated
@ -494,71 +459,6 @@ impl CliMainWorkerFactory {
shared: self.shared.clone(),
})
}
fn resolve_binary_entrypoint(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<ModuleSpecifier, ResolveBinaryEntrypointError> {
match self
.node_resolver
.resolve_binary_export(package_folder, sub_path)
{
Ok(specifier) => Ok(specifier),
Err(original_err) => {
// if the binary entrypoint was not found, fallback to regular node resolution
let result =
self.resolve_binary_entrypoint_fallback(package_folder, sub_path);
match result {
Ok(Some(specifier)) => Ok(specifier),
Ok(None) => Err(
ResolveBinaryEntrypointError::ResolvePkgJsonBinExport(original_err),
),
Err(fallback_err) => Err(ResolveBinaryEntrypointError::Fallback {
original: original_err,
fallback: fallback_err,
}),
}
}
}
}
/// resolve the binary entrypoint using regular node resolution
fn resolve_binary_entrypoint_fallback(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<Option<ModuleSpecifier>, ResolveBinaryEntrypointFallbackError> {
// only fallback if the user specified a sub path
if sub_path.is_none() {
// it's confusing to users if the package doesn't have any binary
// entrypoint and we just execute the main script which will likely
// have blank output, so do not resolve the entrypoint in this case
return Ok(None);
}
let specifier = self
.node_resolver
.resolve_package_subpath_from_deno_module(
package_folder,
sub_path,
/* referrer */ None,
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.map_err(ResolveBinaryEntrypointFallbackError::PackageSubpathResolve)?;
if specifier
.to_file_path()
.map(|p| p.exists())
.unwrap_or(false)
{
Ok(Some(specifier))
} else {
Err(ResolveBinaryEntrypointFallbackError::ModuleNotFound(
specifier,
))
}
}
}
#[allow(clippy::print_stdout)]

View file

@ -33,7 +33,6 @@ deno_error.workspace = true
deno_fetch.workspace = true
deno_fs.workspace = true
deno_io.workspace = true
deno_media_type.workspace = true
deno_net.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
@ -47,7 +46,6 @@ ecb.workspace = true
ecdsa = "0.16.9"
ed25519-dalek = { version = "2.1.1", features = ["digest", "pkcs8", "rand_core", "signature"] }
elliptic-curve.workspace = true
errno = "0.2.8"
faster-hex.workspace = true
h2.workspace = true
hkdf.workspace = true
@ -104,6 +102,9 @@ x25519-dalek = { version = "2.0.0", features = ["static_secrets"] }
x509-parser = "0.15.0"
yoke.workspace = true
[target.'cfg(unix)'.dependencies]
errno = "0.3.8"
[target.'cfg(windows)'.dependencies]
windows-sys.workspace = true
winapi = { workspace = true, features = ["consoleapi"] }

View file

@ -23,11 +23,13 @@ libc.workspace = true
netif = "0.1.6"
once_cell.workspace = true
serde.workspace = true
signal-hook = "0.3.17"
signal-hook-registry = "1.4.0"
thiserror.workspace = true
tokio.workspace = true
[target.'cfg(windows)'.dependencies]
winapi = { workspace = true, features = ["commapi", "knownfolders", "mswsock", "objbase", "psapi", "shlobj", "tlhelp32", "winbase", "winerror", "winuser", "winsock2"] }
ntapi = "0.4.0"
[target.'cfg(unix)'.dependencies]
signal-hook = "0.3.17"
signal-hook-registry = "1.4.0"

View file

@ -19,6 +19,25 @@ pub use wgpu_types;
pub const UNSTABLE_FEATURE_NAME: &str = "webgpu";
#[allow(clippy::print_stdout)]
pub fn print_linker_flags(name: &str) {
if cfg!(windows) {
// these dls load slowly, so delay loading them
let dlls = [
// webgpu
"d3dcompiler_47",
"OPENGL32",
// network related functions
"iphlpapi",
];
for dll in dlls {
println!("cargo:rustc-link-arg-bin={name}=/delayload:{dll}.dll");
}
// enable delay loading
println!("cargo:rustc-link-arg-bin={name}=delayimp.lib");
}
}
#[macro_use]
mod macros {
macro_rules! gfx_select {

View file

@ -21,7 +21,6 @@ anyhow.workspace = true
async-trait.workspace = true
boxed_error.workspace = true
deno_error.workspace = true
deno_media_type.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
futures.workspace = true

View file

@ -42,16 +42,6 @@ pub struct CjsAnalysisExports {
pub reexports: Vec<String>,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CjsCodeAnalysisError {
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] crate::errors::ClosestPkgJsonError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
/// Code analyzer for CJS and ESM files.
#[async_trait::async_trait(?Send)]
pub trait CjsCodeAnalyzer {
@ -66,17 +56,17 @@ pub trait CjsCodeAnalyzer {
&self,
specifier: &Url,
maybe_source: Option<Cow<'a, str>>,
) -> Result<CjsAnalysis<'a>, CjsCodeAnalysisError>;
) -> Result<CjsAnalysis<'a>, JsErrorBox>;
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum TranslateCjsToEsmError {
#[class(inherit)]
#[error(transparent)]
CjsCodeAnalysis(#[from] CjsCodeAnalysisError),
CjsCodeAnalysis(JsErrorBox),
#[class(inherit)]
#[error(transparent)]
ExportAnalysis(#[from] JsErrorBox),
ExportAnalysis(JsErrorBox),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
@ -87,7 +77,7 @@ pub struct CjsAnalysisCouldNotLoadError {
reexport_specifier: Url,
referrer: Url,
#[source]
source: CjsCodeAnalysisError,
source: JsErrorBox,
}
pub struct NodeCodeTranslator<
@ -164,7 +154,8 @@ impl<
let analysis = self
.cjs_code_analyzer
.analyze_cjs(entry_specifier, source)
.await?;
.await
.map_err(TranslateCjsToEsmError::CjsCodeAnalysis)?;
let analysis = match analysis {
CjsAnalysis::Esm(source) => return Ok(source),

View file

@ -35,6 +35,8 @@ ring.workspace = true
serde_json.workspace = true
sys_traits.workspace = true
tar.workspace = true
tempfile = "3.4.0"
thiserror.workspace = true
url.workspace = true
[dev-dependencies]
tempfile = "3.4.0"

View file

@ -13,37 +13,31 @@ repository.workspace = true
path = "lib.rs"
[features]
run = []
upgrade = []
[[test]]
name = "integration_tests"
path = "integration/mod.rs"
required-features = ["run"]
[[test]]
name = "specs"
path = "specs/mod.rs"
required-features = ["run"]
harness = false
[[test]]
name = "node_compat_tests"
path = "node_compat/test_runner.rs"
required-features = ["run"]
[dev-dependencies]
anyhow.workspace = true
bytes.workspace = true
chrono = { workspace = true, features = ["now"] }
deno_ast.workspace = true
deno_bench_util.workspace = true
deno_cache_dir = { workspace = true }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting", "unsafe_use_unprotected_platform"] }
deno_fetch.workspace = true
deno_lockfile.workspace = true
deno_semver.workspace = true
deno_terminal.workspace = true
deno_tls.workspace = true
deno_unsync.workspace = true
fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] }
file_test_runner = "0.7.3"
flaky_test = "=0.2.2"
@ -59,7 +53,11 @@ os_pipe.workspace = true
pretty_assertions.workspace = true
regex.workspace = true
reqwest.workspace = true
rustls.workspace = true
rustls-pemfile.workspace = true
rustls-tokio-stream.workspace = true
serde.workspace = true
serde_json.workspace = true
sys_traits = { workspace = true, features = ["real", "getrandom", "libc", "winapi"] }
test_util.workspace = true
tokio.workspace = true

View file

@ -1,8 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde_json::json;
use deno_core::url::Url;
use serde_json::json;
use test_util as util;
use url::Url;
use util::assert_contains;
use util::assert_not_contains;
use util::TestContext;

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde_json;
use test_util as util;
use util::assert_not_contains;
use util::testdata_path;

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde_json;
use test_util as util;
use test_util::TempDir;
use util::assert_contains;

View file

@ -1,6 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde_json::json;
use serde_json::json;
use test_util as util;
use test_util::itest;
use util::assert_contains;

View file

@ -4,12 +4,9 @@ use std::io::BufRead;
use std::process::ChildStderr;
use std::time::Duration;
use anyhow::anyhow;
use anyhow::Error as AnyError;
use bytes::Bytes;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::url;
use fastwebsockets::FragmentCollector;
use fastwebsockets::Frame;
use fastwebsockets::WebSocket;
@ -18,6 +15,7 @@ use hyper::upgrade::Upgraded;
use hyper::Request;
use hyper::Response;
use hyper_util::rt::TokioIo;
use serde_json::json;
use test_util as util;
use tokio::net::TcpStream;
use tokio::time::timeout;
@ -35,7 +33,7 @@ where
Fut::Output: Send + 'static,
{
fn execute(&self, fut: Fut) {
deno_core::unsync::spawn(fut);
deno_unsync::spawn(fut);
}
}
@ -742,7 +740,7 @@ async fn inspector_json() {
}
let resp = client.execute(req).await.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
let endpoint_list: Vec<deno_core::serde_json::Value> =
let endpoint_list: Vec<serde_json::Value> =
serde_json::from_str(&resp.text().await.unwrap()).unwrap();
let matching_endpoint = endpoint_list.iter().find(|e| {
e["webSocketDebuggerUrl"]
@ -775,7 +773,7 @@ async fn inspector_json_list() {
url.set_path("/json/list");
let resp = reqwest::get(url).await.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
let endpoint_list: Vec<deno_core::serde_json::Value> =
let endpoint_list: Vec<serde_json::Value> =
serde_json::from_str(&resp.text().await.unwrap()).unwrap();
let matching_endpoint = endpoint_list
.iter()

Some files were not shown because too many files have changed in this diff Show more