From 9efed4c7a3d32de62e9c9b5e0c6712ce97637abb Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 1 May 2023 14:35:23 -0400 Subject: [PATCH] refactor(cli): remove ProcState - add CliFactory (#18900) This removes `ProcState` and replaces it with a new `CliFactory` which initializes our "service structs" on demand. This isn't a performance improvement at the moment for `deno run`, but might unlock performance improvements in the future. --- cli/args/mod.rs | 4 +- cli/cache/caches.rs | 33 +- cli/factory.rs | 669 ++++++++++++++++++ cli/lsp/language_server.rs | 11 +- cli/lsp/testing/execution.rs | 40 +- cli/main.rs | 19 +- cli/module_loader.rs | 21 +- cli/proc_state.rs | 447 ------------ cli/standalone/binary.rs | 19 +- .../package_json/invalid_value/task.out | 2 +- .../task/both/package_json_selected.out | 2 +- cli/tests/testdata/task/npx/non_existent.out | 2 +- cli/tests/testdata/task/npx/on_own.out | 2 +- cli/tests/testdata/task/package_json/bin.out | 2 +- cli/tools/bench.rs | 47 +- cli/tools/bundle.rs | 95 +-- cli/tools/check.rs | 7 +- cli/tools/coverage/mod.rs | 17 +- cli/tools/doc.rs | 25 +- cli/tools/fmt.rs | 9 +- cli/tools/info.rs | 42 +- cli/tools/installer.rs | 7 +- cli/tools/lint.rs | 9 +- cli/tools/repl/mod.rs | 24 +- cli/tools/run.rs | 70 +- cli/tools/standalone.rs | 30 +- cli/tools/task.rs | 32 +- cli/tools/test.rs | 54 +- cli/tools/upgrade.rs | 7 +- cli/tools/vendor/mod.rs | 17 +- cli/watcher.rs | 99 +++ cli/worker.rs | 1 + 32 files changed, 1140 insertions(+), 725 deletions(-) create mode 100644 cli/factory.rs delete mode 100644 cli/proc_state.rs create mode 100644 cli/watcher.rs diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 4038fb0998..00476dce1c 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -753,7 +753,7 @@ impl CliOptions { return Ok(Some(state.snapshot.clone().into_valid()?)); } - if let Some(lockfile) = self.maybe_lock_file() { + if let Some(lockfile) = self.maybe_lockfile() { if !lockfile.lock().overwrite { return Ok(Some( snapshot_from_lockfile(lockfile.clone(), api) @@ -827,7 +827,7 @@ impl CliOptions { .map(|host| InspectorServer::new(host, version::get_user_agent())) } - pub fn maybe_lock_file(&self) -> Option>> { + pub fn maybe_lockfile(&self) -> Option>> { self.maybe_lockfile.clone() } diff --git a/cli/cache/caches.rs b/cli/cache/caches.rs index 0b60d0bece..62bec8a000 100644 --- a/cli/cache/caches.rs +++ b/cli/cache/caches.rs @@ -12,8 +12,8 @@ use super::node::NODE_ANALYSIS_CACHE_DB; use super::parsed_source::PARSED_SOURCE_CACHE_DB; use super::DenoDir; -#[derive(Default)] pub struct Caches { + dir: DenoDir, fmt_incremental_cache_db: OnceCell, lint_incremental_cache_db: OnceCell, dep_analysis_db: OnceCell, @@ -22,6 +22,17 @@ pub struct Caches { } impl Caches { + pub fn new(dir: DenoDir) -> Self { + Self { + dir, + fmt_incremental_cache_db: Default::default(), + lint_incremental_cache_db: Default::default(), + dep_analysis_db: Default::default(), + node_analysis_db: Default::default(), + type_checking_cache_db: Default::default(), + } + } + fn make_db( cell: &OnceCell, config: &'static CacheDBConfiguration, @@ -32,43 +43,43 @@ impl Caches { .clone() } - pub fn fmt_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn fmt_incremental_cache_db(&self) -> CacheDB { Self::make_db( &self.fmt_incremental_cache_db, &INCREMENTAL_CACHE_DB, - dir.fmt_incremental_cache_db_file_path(), + self.dir.fmt_incremental_cache_db_file_path(), ) } - pub fn lint_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn lint_incremental_cache_db(&self) -> CacheDB { Self::make_db( &self.lint_incremental_cache_db, &INCREMENTAL_CACHE_DB, - dir.lint_incremental_cache_db_file_path(), + self.dir.lint_incremental_cache_db_file_path(), ) } - pub fn dep_analysis_db(&self, dir: &DenoDir) -> CacheDB { + pub fn dep_analysis_db(&self) -> CacheDB { Self::make_db( &self.dep_analysis_db, &PARSED_SOURCE_CACHE_DB, - dir.dep_analysis_db_file_path(), + self.dir.dep_analysis_db_file_path(), ) } - pub fn node_analysis_db(&self, dir: &DenoDir) -> CacheDB { + pub fn node_analysis_db(&self) -> CacheDB { Self::make_db( &self.node_analysis_db, &NODE_ANALYSIS_CACHE_DB, - dir.node_analysis_db_file_path(), + self.dir.node_analysis_db_file_path(), ) } - pub fn type_checking_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn type_checking_cache_db(&self) -> CacheDB { Self::make_db( &self.type_checking_cache_db, &TYPE_CHECK_CACHE_DB, - dir.type_checking_cache_db_file_path(), + self.dir.type_checking_cache_db_file_path(), ) } } diff --git a/cli/factory.rs b/cli/factory.rs new file mode 100644 index 0000000000..69560cf544 --- /dev/null +++ b/cli/factory.rs @@ -0,0 +1,669 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CliOptions; +use crate::args::DenoSubcommand; +use crate::args::Flags; +use crate::args::Lockfile; +use crate::args::StorageKeyResolver; +use crate::args::TsConfigType; +use crate::cache::Caches; +use crate::cache::DenoDir; +use crate::cache::EmitCache; +use crate::cache::HttpCache; +use crate::cache::NodeAnalysisCache; +use crate::cache::ParsedSourceCache; +use crate::emit::Emitter; +use crate::file_fetcher::FileFetcher; +use crate::graph_util::ModuleGraphBuilder; +use crate::graph_util::ModuleGraphContainer; +use crate::http_util::HttpClient; +use crate::module_loader::CjsResolutionStore; +use crate::module_loader::CliModuleLoaderFactory; +use crate::module_loader::ModuleLoadPreparer; +use crate::module_loader::NpmModuleLoader; +use crate::node::CliCjsEsmCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; +use crate::npm::create_npm_fs_resolver; +use crate::npm::CliNpmRegistryApi; +use crate::npm::CliNpmResolver; +use crate::npm::NpmCache; +use crate::npm::NpmResolution; +use crate::npm::PackageJsonDepsInstaller; +use crate::resolver::CliGraphResolver; +use crate::tools::check::TypeChecker; +use crate::util::progress_bar::ProgressBar; +use crate::util::progress_bar::ProgressBarStyle; +use crate::watcher::FileWatcher; +use crate::watcher::FileWatcherReporter; +use crate::worker::CliMainWorkerFactory; +use crate::worker::CliMainWorkerOptions; +use crate::worker::HasNodeSpecifierChecker; + +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; + +use deno_runtime::deno_node; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_web::BlobStore; +use deno_runtime::inspector_server::InspectorServer; +use deno_semver::npm::NpmPackageReqReference; +use import_map::ImportMap; +use log::warn; +use std::cell::RefCell; +use std::future::Future; +use std::path::PathBuf; +use std::sync::Arc; + +pub struct CliFactoryBuilder { + maybe_sender: Option>>, +} + +impl CliFactoryBuilder { + pub fn new() -> Self { + Self { maybe_sender: None } + } + + pub fn with_watcher( + mut self, + sender: tokio::sync::mpsc::UnboundedSender>, + ) -> Self { + self.maybe_sender = Some(sender); + self + } + + pub async fn build_from_flags( + self, + flags: Flags, + ) -> Result { + Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?))) + } + + pub fn build_from_cli_options(self, options: Arc) -> CliFactory { + CliFactory { + maybe_sender: RefCell::new(self.maybe_sender), + options, + services: Default::default(), + } + } +} + +struct Deferred(once_cell::unsync::OnceCell); + +impl Default for Deferred { + fn default() -> Self { + Self(once_cell::unsync::OnceCell::default()) + } +} + +impl Deferred { + pub fn get_or_try_init( + &self, + create: impl FnOnce() -> Result, + ) -> Result<&T, AnyError> { + self.0.get_or_try_init(create) + } + + pub fn get_or_init(&self, create: impl FnOnce() -> T) -> &T { + self.0.get_or_init(create) + } + + pub async fn get_or_try_init_async( + &self, + create: impl Future>, + ) -> Result<&T, AnyError> { + if self.0.get().is_none() { + // todo(dsherret): it would be more ideal if this enforced a + // single executor and then we could make some initialization + // concurrent + let val = create.await?; + _ = self.0.set(val); + } + Ok(self.0.get().unwrap()) + } +} + +#[derive(Default)] +struct CliFactoryServices { + dir: Deferred, + caches: Deferred>, + file_fetcher: Deferred>, + http_client: Deferred, + emit_cache: Deferred, + emitter: Deferred>, + graph_container: Deferred>, + lockfile: Deferred>>>, + maybe_import_map: Deferred>>, + maybe_inspector_server: Deferred>>, + root_cert_store: Deferred, + blob_store: Deferred, + parsed_source_cache: Deferred>, + resolver: Deferred>, + file_watcher: Deferred>, + maybe_file_watcher_reporter: Deferred>, + module_graph_builder: Deferred>, + module_load_preparer: Deferred>, + node_code_translator: Deferred>, + node_fs: Deferred>, + node_resolver: Deferred>, + npm_api: Deferred>, + npm_cache: Deferred>, + npm_resolver: Deferred>, + npm_resolution: Deferred>, + package_json_deps_installer: Deferred>, + text_only_progress_bar: Deferred, + type_checker: Deferred>, + cjs_resolutions: Deferred>, +} + +pub struct CliFactory { + maybe_sender: + RefCell>>>, + options: Arc, + services: CliFactoryServices, +} + +impl CliFactory { + pub async fn from_flags(flags: Flags) -> Result { + CliFactoryBuilder::new().build_from_flags(flags).await + } + + pub fn from_cli_options(options: Arc) -> Self { + CliFactoryBuilder::new().build_from_cli_options(options) + } + + pub fn cli_options(&self) -> &Arc { + &self.options + } + + pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> { + self + .services + .dir + .get_or_try_init(|| self.options.resolve_deno_dir()) + } + + pub fn caches(&self) -> Result<&Arc, AnyError> { + self.services.caches.get_or_try_init(|| { + let caches = Arc::new(Caches::new(self.deno_dir()?.clone())); + // Warm up the caches we know we'll likely need based on the CLI mode + match self.options.sub_command() { + DenoSubcommand::Run(_) => { + _ = caches.dep_analysis_db(); + _ = caches.node_analysis_db(); + } + DenoSubcommand::Check(_) => { + _ = caches.dep_analysis_db(); + _ = caches.node_analysis_db(); + _ = caches.type_checking_cache_db(); + } + _ => {} + } + Ok(caches) + }) + } + + pub fn blob_store(&self) -> &BlobStore { + self.services.blob_store.get_or_init(BlobStore::default) + } + + pub fn root_cert_store(&self) -> Result<&RootCertStore, AnyError> { + self + .services + .root_cert_store + .get_or_try_init(|| self.options.resolve_root_cert_store()) + } + + pub fn text_only_progress_bar(&self) -> &ProgressBar { + self + .services + .text_only_progress_bar + .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) + } + + pub fn http_client(&self) -> Result<&HttpClient, AnyError> { + self.services.http_client.get_or_try_init(|| { + HttpClient::new( + Some(self.root_cert_store()?.clone()), + self.options.unsafely_ignore_certificate_errors().clone(), + ) + }) + } + + pub fn file_fetcher(&self) -> Result<&Arc, AnyError> { + self.services.file_fetcher.get_or_try_init(|| { + Ok(Arc::new(FileFetcher::new( + HttpCache::new(&self.deno_dir()?.deps_folder_path()), + self.options.cache_setting(), + !self.options.no_remote(), + self.http_client()?.clone(), + self.blob_store().clone(), + Some(self.text_only_progress_bar().clone()), + ))) + }) + } + + pub fn maybe_lockfile(&self) -> &Option>> { + self + .services + .lockfile + .get_or_init(|| self.options.maybe_lockfile()) + } + + pub fn npm_cache(&self) -> Result<&Arc, AnyError> { + self.services.npm_cache.get_or_try_init(|| { + Ok(Arc::new(NpmCache::new( + self.deno_dir()?.npm_folder_path(), + self.options.cache_setting(), + self.http_client()?.clone(), + self.text_only_progress_bar().clone(), + ))) + }) + } + + pub fn npm_api(&self) -> Result<&Arc, AnyError> { + self.services.npm_api.get_or_try_init(|| { + Ok(Arc::new(CliNpmRegistryApi::new( + CliNpmRegistryApi::default_url().to_owned(), + self.npm_cache()?.clone(), + self.http_client()?.clone(), + self.text_only_progress_bar().clone(), + ))) + }) + } + + pub async fn npm_resolution(&self) -> Result<&Arc, AnyError> { + self + .services + .npm_resolution + .get_or_try_init_async(async { + let npm_api = self.npm_api()?; + Ok(Arc::new(NpmResolution::from_serialized( + npm_api.clone(), + self + .options + .resolve_npm_resolution_snapshot(npm_api) + .await?, + self.maybe_lockfile().as_ref().cloned(), + ))) + }) + .await + } + + pub fn node_fs(&self) -> &Arc { + self + .services + .node_fs + .get_or_init(|| Arc::new(deno_node::RealFs)) + } + + pub async fn npm_resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .npm_resolver + .get_or_try_init_async(async { + let npm_resolution = self.npm_resolution().await?; + let npm_fs_resolver = create_npm_fs_resolver( + self.node_fs().clone(), + self.npm_cache()?.clone(), + self.text_only_progress_bar(), + CliNpmRegistryApi::default_url().to_owned(), + npm_resolution.clone(), + self.options.node_modules_dir_path(), + ); + Ok(Arc::new(CliNpmResolver::new( + npm_resolution.clone(), + npm_fs_resolver, + self.maybe_lockfile().as_ref().cloned(), + ))) + }) + .await + } + + pub async fn package_json_deps_installer( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .package_json_deps_installer + .get_or_try_init_async(async { + let npm_api = self.npm_api()?; + let npm_resolution = self.npm_resolution().await?; + Ok(Arc::new(PackageJsonDepsInstaller::new( + npm_api.clone(), + npm_resolution.clone(), + self.options.maybe_package_json_deps(), + ))) + }) + .await + } + + pub async fn maybe_import_map( + &self, + ) -> Result<&Option>, AnyError> { + self + .services + .maybe_import_map + .get_or_try_init_async(async { + Ok( + self + .options + .resolve_import_map(self.file_fetcher()?) + .await? + .map(Arc::new), + ) + }) + .await + } + + pub async fn resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .resolver + .get_or_try_init_async(async { + Ok(Arc::new(CliGraphResolver::new( + self.options.to_maybe_jsx_import_source_config(), + self.maybe_import_map().await?.clone(), + self.options.no_npm(), + self.npm_api()?.clone(), + self.npm_resolution().await?.clone(), + self.package_json_deps_installer().await?.clone(), + ))) + }) + .await + } + + pub fn file_watcher(&self) -> Result<&Arc, AnyError> { + self.services.file_watcher.get_or_try_init(|| { + let watcher = FileWatcher::new( + self.options.clone(), + self.cjs_resolutions().clone(), + self.graph_container().clone(), + self.maybe_file_watcher_reporter().clone(), + self.parsed_source_cache()?.clone(), + ); + watcher.init_watcher(); + Ok(Arc::new(watcher)) + }) + } + + pub fn maybe_file_watcher_reporter(&self) -> &Option { + let maybe_sender = self.maybe_sender.borrow_mut().take(); + self + .services + .maybe_file_watcher_reporter + .get_or_init(|| maybe_sender.map(FileWatcherReporter::new)) + } + + pub fn emit_cache(&self) -> Result<&EmitCache, AnyError> { + self.services.emit_cache.get_or_try_init(|| { + Ok(EmitCache::new(self.deno_dir()?.gen_cache.clone())) + }) + } + + pub fn parsed_source_cache( + &self, + ) -> Result<&Arc, AnyError> { + self.services.parsed_source_cache.get_or_try_init(|| { + Ok(Arc::new(ParsedSourceCache::new( + self.caches()?.dep_analysis_db(), + ))) + }) + } + + pub fn emitter(&self) -> Result<&Arc, AnyError> { + self.services.emitter.get_or_try_init(|| { + let ts_config_result = self + .options + .resolve_ts_config_for_emit(TsConfigType::Emit)?; + if let Some(ignored_options) = ts_config_result.maybe_ignored_options { + warn!("{}", ignored_options); + } + let emit_options: deno_ast::EmitOptions = + ts_config_result.ts_config.into(); + Ok(Arc::new(Emitter::new( + self.emit_cache()?.clone(), + self.parsed_source_cache()?.clone(), + emit_options, + ))) + }) + } + + pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .node_resolver + .get_or_try_init_async(async { + Ok(Arc::new(NodeResolver::new( + self.node_fs().clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn node_code_translator( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .node_code_translator + .get_or_try_init_async(async { + let caches = self.caches()?; + let node_analysis_cache = + NodeAnalysisCache::new(caches.node_analysis_db()); + let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); + + Ok(Arc::new(NodeCodeTranslator::new( + cjs_esm_analyzer, + self.node_fs().clone(), + self.node_resolver().await?.clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn type_checker(&self) -> Result<&Arc, AnyError> { + self + .services + .type_checker + .get_or_try_init_async(async { + Ok(Arc::new(TypeChecker::new( + self.caches()?.clone(), + self.options.clone(), + self.node_resolver().await?.clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn module_graph_builder( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .module_graph_builder + .get_or_try_init_async(async { + Ok(Arc::new(ModuleGraphBuilder::new( + self.options.clone(), + self.resolver().await?.clone(), + self.npm_resolver().await?.clone(), + self.parsed_source_cache()?.clone(), + self.maybe_lockfile().clone(), + self.emit_cache()?.clone(), + self.file_fetcher()?.clone(), + self.type_checker().await?.clone(), + ))) + }) + .await + } + + pub fn graph_container(&self) -> &Arc { + self.services.graph_container.get_or_init(Default::default) + } + + pub fn maybe_inspector_server(&self) -> &Option> { + self + .services + .maybe_inspector_server + .get_or_init(|| self.options.resolve_inspector_server().map(Arc::new)) + } + + pub async fn module_load_preparer( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .module_load_preparer + .get_or_try_init_async(async { + Ok(Arc::new(ModuleLoadPreparer::new( + self.options.clone(), + self.graph_container().clone(), + self.maybe_lockfile().clone(), + self.maybe_file_watcher_reporter().clone(), + self.module_graph_builder().await?.clone(), + self.parsed_source_cache()?.clone(), + self.text_only_progress_bar().clone(), + self.resolver().await?.clone(), + self.type_checker().await?.clone(), + ))) + }) + .await + } + + pub fn cjs_resolutions(&self) -> &Arc { + self.services.cjs_resolutions.get_or_init(Default::default) + } + + /// Gets a function that can be used to create a CliMainWorkerFactory + /// for a file watcher. + pub async fn create_cli_main_worker_factory_func( + &self, + ) -> Result CliMainWorkerFactory>, AnyError> { + let emitter = self.emitter()?.clone(); + let graph_container = self.graph_container().clone(); + let module_load_preparer = self.module_load_preparer().await?.clone(); + let parsed_source_cache = self.parsed_source_cache()?.clone(); + let resolver = self.resolver().await?.clone(); + let blob_store = self.blob_store().clone(); + let cjs_resolutions = self.cjs_resolutions().clone(); + let node_code_translator = self.node_code_translator().await?.clone(); + let options = self.cli_options().clone(); + let main_worker_options = self.create_cli_main_worker_options()?; + let node_fs = self.node_fs().clone(); + let root_cert_store = self.root_cert_store()?.clone(); + let node_resolver = self.node_resolver().await?.clone(); + let npm_resolver = self.npm_resolver().await?.clone(); + let maybe_inspector_server = self.maybe_inspector_server().clone(); + Ok(Arc::new(move || { + CliMainWorkerFactory::new( + StorageKeyResolver::from_options(&options), + npm_resolver.clone(), + node_resolver.clone(), + Box::new(CliHasNodeSpecifierChecker(graph_container.clone())), + blob_store.clone(), + Box::new(CliModuleLoaderFactory::new( + &options, + emitter.clone(), + graph_container.clone(), + module_load_preparer.clone(), + parsed_source_cache.clone(), + resolver.clone(), + NpmModuleLoader::new( + cjs_resolutions.clone(), + node_code_translator.clone(), + node_resolver.clone(), + ), + )), + root_cert_store.clone(), + node_fs.clone(), + maybe_inspector_server.clone(), + main_worker_options.clone(), + ) + })) + } + + pub async fn create_cli_main_worker_factory( + &self, + ) -> Result { + let node_resolver = self.node_resolver().await?; + Ok(CliMainWorkerFactory::new( + StorageKeyResolver::from_options(&self.options), + self.npm_resolver().await?.clone(), + node_resolver.clone(), + Box::new(CliHasNodeSpecifierChecker(self.graph_container().clone())), + self.blob_store().clone(), + Box::new(CliModuleLoaderFactory::new( + &self.options, + self.emitter()?.clone(), + self.graph_container().clone(), + self.module_load_preparer().await?.clone(), + self.parsed_source_cache()?.clone(), + self.resolver().await?.clone(), + NpmModuleLoader::new( + self.cjs_resolutions().clone(), + self.node_code_translator().await?.clone(), + node_resolver.clone(), + ), + )), + self.root_cert_store()?.clone(), + self.node_fs().clone(), + self.maybe_inspector_server().clone(), + self.create_cli_main_worker_options()?, + )) + } + + fn create_cli_main_worker_options( + &self, + ) -> Result { + Ok(CliMainWorkerOptions { + argv: self.options.argv().clone(), + debug: self + .options + .log_level() + .map(|l| l == log::Level::Debug) + .unwrap_or(false), + coverage_dir: self.options.coverage_dir(), + enable_testing_features: self.options.enable_testing_features(), + has_node_modules_dir: self.options.has_node_modules_dir(), + inspect_brk: self.options.inspect_brk().is_some(), + inspect_wait: self.options.inspect_wait().is_some(), + is_inspecting: self.options.is_inspecting(), + is_npm_main: self.options.is_npm_main(), + location: self.options.location_flag().clone(), + maybe_binary_npm_command_name: { + let mut maybe_binary_command_name = None; + if let DenoSubcommand::Run(flags) = self.options.sub_command() { + if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) { + // if the user ran a binary command, we'll need to set process.argv[0] + // to be the name of the binary command instead of deno + let binary_name = pkg_ref + .sub_path + .as_deref() + .unwrap_or(pkg_ref.req.name.as_str()); + maybe_binary_command_name = Some(binary_name.to_string()); + } + } + maybe_binary_command_name + }, + origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()), + seed: self.options.seed(), + unsafely_ignore_certificate_errors: self + .options + .unsafely_ignore_certificate_errors() + .clone(), + unstable: self.options.unstable(), + }) + } +} + +struct CliHasNodeSpecifierChecker(Arc); + +impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { + fn has_node_specifier(&self) -> bool { + self.0.graph().has_node_specifier + } +} diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 288e453626..d49a2559ca 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -76,6 +76,7 @@ use crate::args::LintOptions; use crate::args::TsConfig; use crate::cache::DenoDir; use crate::cache::HttpCache; +use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; use crate::graph_util; use crate::http_util::HttpClient; @@ -85,7 +86,6 @@ use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; use crate::npm::NpmCache; use crate::npm::NpmResolution; -use crate::proc_state::ProcState; use crate::tools::fmt::format_file; use crate::tools::fmt::format_parsed_source; use crate::util::fs::remove_dir_all_if_exists; @@ -185,15 +185,14 @@ impl LanguageServer { .into_iter() .map(|d| (d.specifier().clone(), d)) .collect::>(); - // todo(dsherret): don't use ProcState here - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; - let mut inner_loader = ps.module_graph_builder.create_graph_loader(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let module_graph_builder = factory.module_graph_builder().await?; + let mut inner_loader = module_graph_builder.create_graph_loader(); let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader { inner_loader: &mut inner_loader, open_docs: &open_docs, }; - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph_with_loader(roots.clone(), &mut loader) .await?; graph_util::graph_valid( diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index 5dfb310137..4834cd0c9c 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -6,11 +6,11 @@ use super::lsp_custom; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; +use crate::factory::CliFactory; use crate::lsp::client::Client; use crate::lsp::client::TestingNotification; use crate::lsp::config; use crate::lsp::logging::lsp_log; -use crate::proc_state; use crate::tools::test; use crate::tools::test::FailFastTracker; use crate::tools::test::TestEventSender; @@ -218,16 +218,16 @@ impl TestRun { let args = self.get_args(); lsp_log!("Executing test run with arguments: {}", args.join(" ")); let flags = flags_from_vec(args.into_iter().map(String::from).collect())?; - let ps = proc_state::ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&factory.cli_options().permissions_options())?; test::check_specifiers( - &ps.options, - &ps.file_fetcher, - &ps.module_load_preparer, + factory.cli_options(), + factory.file_fetcher()?, + factory.module_load_preparer().await?, self .queue .iter() @@ -236,18 +236,19 @@ impl TestRun { ) .await?; - let (concurrent_jobs, fail_fast) = - if let DenoSubcommand::Test(test_flags) = ps.options.sub_command() { - ( - test_flags - .concurrent_jobs - .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()) - .into(), - test_flags.fail_fast, - ) - } else { - unreachable!("Should always be Test subcommand."); - }; + let (concurrent_jobs, fail_fast) = if let DenoSubcommand::Test(test_flags) = + factory.cli_options().sub_command() + { + ( + test_flags + .concurrent_jobs + .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()) + .into(), + test_flags.fail_fast, + ) + } else { + unreachable!("Should always be Test subcommand."); + }; let (sender, mut receiver) = mpsc::unbounded_channel::(); let sender = TestEventSender::new(sender); @@ -259,7 +260,8 @@ impl TestRun { let tests: Arc>> = Arc::new(RwLock::new(IndexMap::new())); let mut test_steps = IndexMap::new(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); let join_handles = queue.into_iter().map(move |specifier| { let specifier = specifier.clone(); diff --git a/cli/main.rs b/cli/main.rs index 02ac5891cd..85942cbd82 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -6,6 +6,7 @@ mod cache; mod deno_std; mod emit; mod errors; +mod factory; mod file_fetcher; mod graph_util; mod http_util; @@ -16,19 +17,18 @@ mod napi; mod node; mod npm; mod ops; -mod proc_state; mod resolver; mod standalone; mod tools; mod tsc; mod util; mod version; +mod watcher; mod worker; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; -use crate::proc_state::ProcState; use crate::resolver::CliGraphResolver; use crate::util::display; use crate::util::v8::get_v8_flags_from_env; @@ -41,6 +41,7 @@ use deno_core::error::JsError; use deno_runtime::colors; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::run_local; +use factory::CliFactory; use std::env; use std::env::current_exe; use std::path::PathBuf; @@ -70,16 +71,20 @@ async fn run_subcommand(flags: Flags) -> Result { tools::run::eval_command(flags, eval_flags).await } DenoSubcommand::Cache(cache_flags) => { - let ps = ProcState::from_flags(flags).await?; - ps.module_load_preparer + let factory = CliFactory::from_flags(flags).await?; + let module_load_preparer = factory.module_load_preparer().await?; + let emitter = factory.emitter()?; + let graph_container = factory.graph_container(); + module_load_preparer .load_and_type_check_files(&cache_flags.files) .await?; - ps.emitter.cache_module_emits(&ps.graph_container.graph())?; + emitter.cache_module_emits(&graph_container.graph())?; Ok(0) } DenoSubcommand::Check(check_flags) => { - let ps = ProcState::from_flags(flags).await?; - ps.module_load_preparer + let factory = CliFactory::from_flags(flags).await?; + let module_load_preparer = factory.module_load_preparer().await?; + module_load_preparer .load_and_type_check_files(&check_flags.files) .await?; Ok(0) diff --git a/cli/module_loader.rs b/cli/module_loader.rs index d8a5b73c4d..0ed84a20f6 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -12,14 +12,13 @@ use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::node; use crate::node::CliNodeCodeTranslator; -use crate::proc_state::CjsResolutionStore; -use crate::proc_state::FileWatcherReporter; use crate::resolver::CliGraphResolver; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; +use crate::watcher::FileWatcherReporter; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; @@ -791,3 +790,21 @@ impl NpmModuleLoader { Ok(response.into_url()) } } + +/// Keeps track of what module specifiers were resolved as CJS. +#[derive(Default)] +pub struct CjsResolutionStore(Mutex>); + +impl CjsResolutionStore { + pub fn clear(&self) { + self.0.lock().clear(); + } + + pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { + self.0.lock().contains(specifier) + } + + pub fn insert(&self, specifier: ModuleSpecifier) { + self.0.lock().insert(specifier); + } +} diff --git a/cli/proc_state.rs b/cli/proc_state.rs deleted file mode 100644 index 6c1a5e7c56..0000000000 --- a/cli/proc_state.rs +++ /dev/null @@ -1,447 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use crate::args::CliOptions; -use crate::args::DenoSubcommand; -use crate::args::Flags; -use crate::args::Lockfile; -use crate::args::StorageKeyResolver; -use crate::args::TsConfigType; -use crate::cache::Caches; -use crate::cache::DenoDir; -use crate::cache::EmitCache; -use crate::cache::HttpCache; -use crate::cache::NodeAnalysisCache; -use crate::cache::ParsedSourceCache; -use crate::emit::Emitter; -use crate::file_fetcher::FileFetcher; -use crate::graph_util::ModuleGraphBuilder; -use crate::graph_util::ModuleGraphContainer; -use crate::http_util::HttpClient; -use crate::module_loader::CliModuleLoaderFactory; -use crate::module_loader::ModuleLoadPreparer; -use crate::module_loader::NpmModuleLoader; -use crate::node::CliCjsEsmCodeAnalyzer; -use crate::node::CliNodeCodeTranslator; -use crate::npm::create_npm_fs_resolver; -use crate::npm::CliNpmRegistryApi; -use crate::npm::CliNpmResolver; -use crate::npm::NpmCache; -use crate::npm::NpmResolution; -use crate::npm::PackageJsonDepsInstaller; -use crate::resolver::CliGraphResolver; -use crate::tools::check::TypeChecker; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; -use crate::worker::CliMainWorkerFactory; -use crate::worker::CliMainWorkerOptions; -use crate::worker::HasNodeSpecifierChecker; - -use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::ModuleSpecifier; - -use deno_runtime::deno_node; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_tls::rustls::RootCertStore; -use deno_runtime::deno_web::BlobStore; -use deno_runtime::inspector_server::InspectorServer; -use deno_semver::npm::NpmPackageReqReference; -use import_map::ImportMap; -use log::warn; -use std::collections::HashSet; -use std::path::PathBuf; -use std::sync::Arc; - -/// This structure used to represent state of single "deno" program -/// that was shared by all created workers. It morphed into being the -/// "factory" for all objects, but is being slowly phased out. -pub struct ProcState { - pub dir: DenoDir, - pub caches: Arc, - pub file_fetcher: Arc, - pub http_client: HttpClient, - pub options: Arc, - pub emit_cache: EmitCache, - pub emitter: Arc, - pub graph_container: Arc, - pub lockfile: Option>>, - pub maybe_import_map: Option>, - pub maybe_inspector_server: Option>, - pub root_cert_store: RootCertStore, - pub blob_store: BlobStore, - pub parsed_source_cache: Arc, - pub resolver: Arc, - maybe_file_watcher_reporter: Option, - pub module_graph_builder: Arc, - pub module_load_preparer: Arc, - pub node_code_translator: Arc, - pub node_fs: Arc, - pub node_resolver: Arc, - pub npm_api: Arc, - pub npm_cache: Arc, - pub npm_resolver: Arc, - pub npm_resolution: Arc, - pub package_json_deps_installer: Arc, - pub cjs_resolutions: Arc, -} - -impl ProcState { - pub async fn from_cli_options( - options: Arc, - ) -> Result { - Self::build_with_sender(options, None).await - } - - pub async fn from_flags(flags: Flags) -> Result { - Self::from_cli_options(Arc::new(CliOptions::from_flags(flags)?)).await - } - - pub async fn from_flags_for_file_watcher( - flags: Flags, - files_to_watch_sender: tokio::sync::mpsc::UnboundedSender>, - ) -> Result { - // resolve the config each time - let cli_options = Arc::new(CliOptions::from_flags(flags)?); - let ps = - Self::build_with_sender(cli_options, Some(files_to_watch_sender.clone())) - .await?; - ps.init_watcher(); - Ok(ps) - } - - /// Reset all runtime state to its default. This should be used on file - /// watcher restarts. - pub fn reset_for_file_watcher(&self) { - self.cjs_resolutions.clear(); - self.parsed_source_cache.clear(); - self.graph_container.clear(); - - self.init_watcher(); - } - - // Add invariant files like the import map and explicit watch flag list to - // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. - fn init_watcher(&self) { - let files_to_watch_sender = match &self.maybe_file_watcher_reporter { - Some(reporter) => &reporter.sender, - None => return, - }; - if let Some(watch_paths) = self.options.watch_paths() { - files_to_watch_sender.send(watch_paths.clone()).unwrap(); - } - if let Ok(Some(import_map_path)) = self - .options - .resolve_import_map_specifier() - .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) - { - files_to_watch_sender.send(vec![import_map_path]).unwrap(); - } - } - - async fn build_with_sender( - cli_options: Arc, - maybe_sender: Option>>, - ) -> Result { - let dir = cli_options.resolve_deno_dir()?; - let caches = Arc::new(Caches::default()); - // Warm up the caches we know we'll likely need based on the CLI mode - match cli_options.sub_command() { - DenoSubcommand::Run(_) => { - _ = caches.dep_analysis_db(&dir); - _ = caches.node_analysis_db(&dir); - } - DenoSubcommand::Check(_) => { - _ = caches.dep_analysis_db(&dir); - _ = caches.node_analysis_db(&dir); - _ = caches.type_checking_cache_db(&dir); - } - _ => {} - } - let blob_store = BlobStore::default(); - let deps_cache_location = dir.deps_folder_path(); - let http_cache = HttpCache::new(&deps_cache_location); - let root_cert_store = cli_options.resolve_root_cert_store()?; - let cache_usage = cli_options.cache_setting(); - let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly); - let http_client = HttpClient::new( - Some(root_cert_store.clone()), - cli_options.unsafely_ignore_certificate_errors().clone(), - )?; - let file_fetcher = FileFetcher::new( - http_cache, - cache_usage, - !cli_options.no_remote(), - http_client.clone(), - blob_store.clone(), - Some(progress_bar.clone()), - ); - - let lockfile = cli_options.maybe_lock_file(); - - let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); - let npm_cache = Arc::new(NpmCache::new( - dir.npm_folder_path(), - cli_options.cache_setting(), - http_client.clone(), - progress_bar.clone(), - )); - let npm_api = Arc::new(CliNpmRegistryApi::new( - npm_registry_url.clone(), - npm_cache.clone(), - http_client.clone(), - progress_bar.clone(), - )); - let npm_snapshot = cli_options - .resolve_npm_resolution_snapshot(&npm_api) - .await?; - let npm_resolution = Arc::new(NpmResolution::from_serialized( - npm_api.clone(), - npm_snapshot, - lockfile.as_ref().cloned(), - )); - let node_fs = Arc::new(deno_node::RealFs); - let npm_fs_resolver = create_npm_fs_resolver( - node_fs.clone(), - npm_cache.clone(), - &progress_bar, - npm_registry_url, - npm_resolution.clone(), - cli_options.node_modules_dir_path(), - ); - let npm_resolver = Arc::new(CliNpmResolver::new( - npm_resolution.clone(), - npm_fs_resolver, - lockfile.as_ref().cloned(), - )); - let package_json_deps_installer = Arc::new(PackageJsonDepsInstaller::new( - npm_api.clone(), - npm_resolution.clone(), - cli_options.maybe_package_json_deps(), - )); - let maybe_import_map = cli_options - .resolve_import_map(&file_fetcher) - .await? - .map(Arc::new); - let maybe_inspector_server = - cli_options.resolve_inspector_server().map(Arc::new); - - let resolver = Arc::new(CliGraphResolver::new( - cli_options.to_maybe_jsx_import_source_config(), - maybe_import_map.clone(), - cli_options.no_npm(), - npm_api.clone(), - npm_resolution.clone(), - package_json_deps_installer.clone(), - )); - - let maybe_file_watcher_reporter = - maybe_sender.map(|sender| FileWatcherReporter { - sender, - file_paths: Arc::new(Mutex::new(vec![])), - }); - - let ts_config_result = - cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?; - if let Some(ignored_options) = ts_config_result.maybe_ignored_options { - warn!("{}", ignored_options); - } - let emit_cache = EmitCache::new(dir.gen_cache.clone()); - let parsed_source_cache = - Arc::new(ParsedSourceCache::new(caches.dep_analysis_db(&dir))); - let emit_options: deno_ast::EmitOptions = ts_config_result.ts_config.into(); - let emitter = Arc::new(Emitter::new( - emit_cache.clone(), - parsed_source_cache.clone(), - emit_options, - )); - let file_fetcher = Arc::new(file_fetcher); - let node_analysis_cache = - NodeAnalysisCache::new(caches.node_analysis_db(&dir)); - let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); - let node_resolver = - Arc::new(NodeResolver::new(node_fs.clone(), npm_resolver.clone())); - let node_code_translator = Arc::new(NodeCodeTranslator::new( - cjs_esm_analyzer, - node_fs.clone(), - node_resolver.clone(), - npm_resolver.clone(), - )); - let type_checker = Arc::new(TypeChecker::new( - dir.clone(), - caches.clone(), - cli_options.clone(), - node_resolver.clone(), - npm_resolver.clone(), - )); - let module_graph_builder = Arc::new(ModuleGraphBuilder::new( - cli_options.clone(), - resolver.clone(), - npm_resolver.clone(), - parsed_source_cache.clone(), - lockfile.clone(), - emit_cache.clone(), - file_fetcher.clone(), - type_checker.clone(), - )); - let graph_container: Arc = Default::default(); - let module_load_preparer = Arc::new(ModuleLoadPreparer::new( - cli_options.clone(), - graph_container.clone(), - lockfile.clone(), - maybe_file_watcher_reporter.clone(), - module_graph_builder.clone(), - parsed_source_cache.clone(), - progress_bar.clone(), - resolver.clone(), - type_checker, - )); - - Ok(ProcState { - dir, - caches, - options: cli_options, - emit_cache, - emitter, - file_fetcher, - http_client, - graph_container, - lockfile, - maybe_import_map, - maybe_inspector_server, - root_cert_store, - blob_store, - parsed_source_cache, - resolver, - maybe_file_watcher_reporter, - module_graph_builder, - node_code_translator, - node_fs, - node_resolver, - npm_api, - npm_cache, - npm_resolver, - npm_resolution, - package_json_deps_installer, - cjs_resolutions: Default::default(), - module_load_preparer, - }) - } - - // todo(dsherret): this is a transitory method as we separate out - // ProcState from more code - pub fn create_cli_main_worker_factory(&self) -> CliMainWorkerFactory { - CliMainWorkerFactory::new( - StorageKeyResolver::from_options(&self.options), - self.npm_resolver.clone(), - self.node_resolver.clone(), - Box::new(CliHasNodeSpecifierChecker(self.graph_container.clone())), - self.blob_store.clone(), - Box::new(CliModuleLoaderFactory::new( - &self.options, - self.emitter.clone(), - self.graph_container.clone(), - self.module_load_preparer.clone(), - self.parsed_source_cache.clone(), - self.resolver.clone(), - NpmModuleLoader::new( - self.cjs_resolutions.clone(), - self.node_code_translator.clone(), - self.node_resolver.clone(), - ), - )), - self.root_cert_store.clone(), - self.node_fs.clone(), - self.maybe_inspector_server.clone(), - CliMainWorkerOptions { - argv: self.options.argv().clone(), - debug: self - .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - coverage_dir: self.options.coverage_dir(), - enable_testing_features: self.options.enable_testing_features(), - has_node_modules_dir: self.options.has_node_modules_dir(), - inspect_brk: self.options.inspect_brk().is_some(), - inspect_wait: self.options.inspect_wait().is_some(), - is_inspecting: self.options.is_inspecting(), - is_npm_main: self.options.is_npm_main(), - location: self.options.location_flag().clone(), - maybe_binary_npm_command_name: { - let mut maybe_binary_command_name = None; - if let DenoSubcommand::Run(flags) = self.options.sub_command() { - if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) - { - // if the user ran a binary command, we'll need to set process.argv[0] - // to be the name of the binary command instead of deno - let binary_name = pkg_ref - .sub_path - .as_deref() - .unwrap_or(pkg_ref.req.name.as_str()); - maybe_binary_command_name = Some(binary_name.to_string()); - } - } - maybe_binary_command_name - }, - origin_data_folder_path: Some(self.dir.origin_data_folder_path()), - seed: self.options.seed(), - unsafely_ignore_certificate_errors: self - .options - .unsafely_ignore_certificate_errors() - .clone(), - unstable: self.options.unstable(), - }, - ) - } -} - -struct CliHasNodeSpecifierChecker(Arc); - -impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { - fn has_node_specifier(&self) -> bool { - self.0.graph().has_node_specifier - } -} - -/// Keeps track of what module specifiers were resolved as CJS. -#[derive(Default)] -pub struct CjsResolutionStore(Mutex>); - -impl CjsResolutionStore { - pub fn clear(&self) { - self.0.lock().clear(); - } - - pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { - self.0.lock().contains(specifier) - } - - pub fn insert(&self, specifier: ModuleSpecifier) { - self.0.lock().insert(specifier); - } -} - -#[derive(Clone, Debug)] -pub struct FileWatcherReporter { - sender: tokio::sync::mpsc::UnboundedSender>, - file_paths: Arc>>, -} - -impl deno_graph::source::Reporter for FileWatcherReporter { - fn on_load( - &self, - specifier: &ModuleSpecifier, - modules_done: usize, - modules_total: usize, - ) { - let mut file_paths = self.file_paths.lock(); - if specifier.scheme() == "file" { - file_paths.push(specifier.to_file_path().unwrap()); - } - - if modules_done == modules_total { - self.sender.send(file_paths.drain(..).collect()).unwrap(); - } - } -} diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index bca0aff2b4..51d8db79e1 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -5,7 +5,6 @@ use std::io::Seek; use std::io::SeekFrom; use std::io::Write; use std::path::Path; -use std::sync::Arc; use deno_ast::ModuleSpecifier; use deno_core::anyhow::Context; @@ -150,17 +149,17 @@ fn u64_from_bytes(arr: &[u8]) -> Result { Ok(u64::from_be_bytes(*fixed_arr)) } -pub struct DenoCompileBinaryWriter { - file_fetcher: Arc, - client: HttpClient, - deno_dir: DenoDir, +pub struct DenoCompileBinaryWriter<'a> { + file_fetcher: &'a FileFetcher, + client: &'a HttpClient, + deno_dir: &'a DenoDir, } -impl DenoCompileBinaryWriter { +impl<'a> DenoCompileBinaryWriter<'a> { pub fn new( - file_fetcher: Arc, - client: HttpClient, - deno_dir: DenoDir, + file_fetcher: &'a FileFetcher, + client: &'a HttpClient, + deno_dir: &'a DenoDir, ) -> Self { Self { file_fetcher, @@ -282,7 +281,7 @@ impl DenoCompileBinaryWriter { None => None, }; let maybe_import_map = cli_options - .resolve_import_map(&self.file_fetcher) + .resolve_import_map(self.file_fetcher) .await? .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); let metadata = Metadata { diff --git a/cli/tests/testdata/package_json/invalid_value/task.out b/cli/tests/testdata/package_json/invalid_value/task.out index 914dc27c6b..823c50612f 100644 --- a/cli/tests/testdata/package_json/invalid_value/task.out +++ b/cli/tests/testdata/package_json/invalid_value/task.out @@ -1,6 +1,6 @@ Warning Ignoring dependency '@denotest/cjs-default-export' in package.json because its version requirement failed to parse: Invalid npm specifier version requirement. Unexpected character. invalid stuff that won't parse ~ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task test echo 1 1 diff --git a/cli/tests/testdata/task/both/package_json_selected.out b/cli/tests/testdata/task/both/package_json_selected.out index 06b735c9da..d317af4ed4 100644 --- a/cli/tests/testdata/task/both/package_json_selected.out +++ b/cli/tests/testdata/task/both/package_json_selected.out @@ -1,7 +1,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin cli-esm testing this out "asdf" testing this diff --git a/cli/tests/testdata/task/npx/non_existent.out b/cli/tests/testdata/task/npx/non_existent.out index b08d29ece6..81065bf743 100644 --- a/cli/tests/testdata/task/npx/non_existent.out +++ b/cli/tests/testdata/task/npx/non_existent.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task non-existent npx this-command-should-not-exist-for-you npx: could not resolve command 'this-command-should-not-exist-for-you' diff --git a/cli/tests/testdata/task/npx/on_own.out b/cli/tests/testdata/task/npx/on_own.out index 80d8ed9db3..fc9673f7f6 100644 --- a/cli/tests/testdata/task/npx/on_own.out +++ b/cli/tests/testdata/task/npx/on_own.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task on-own npx npx: missing command diff --git a/cli/tests/testdata/task/package_json/bin.out b/cli/tests/testdata/task/package_json/bin.out index fac6921156..6cfa06d433 100644 --- a/cli/tests/testdata/task/package_json/bin.out +++ b/cli/tests/testdata/task/package_json/bin.out @@ -3,7 +3,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin/0.5.0.tgz Initialize @denotest/bin@0.5.0 Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin @denotest/bin hi && cli-esm testing this out && npx cli-cjs test "extra" hi testing diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 3f606cfa93..aa5bd044df 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -5,10 +5,10 @@ use crate::args::CliOptions; use crate::args::TypeCheckMode; use crate::colors; use crate::display::write_json_to_stdout; +use crate::factory::CliFactory; use crate::graph_util::graph_valid_with_cli_options; use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; use crate::util::file_watcher; @@ -635,12 +635,13 @@ pub async fn run_benchmarks( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&cli_options.permissions_options())?; let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)?; @@ -649,15 +650,20 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers(&ps.options, &ps.module_load_preparer, specifiers.clone()) - .await?; + check_specifiers( + cli_options, + factory.module_load_preparer().await?, + specifiers.clone(), + ) + .await?; if bench_options.no_run { return Ok(()); } - let log_level = ps.options.log_level(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let log_level = cli_options.log_level(); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); bench_specifiers( worker_factory, &permissions, @@ -678,21 +684,25 @@ pub async fn run_benchmarks_with_watch( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let file_watcher = factory.file_watcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; + Permissions::from_options(&cli_options.permissions_options())?; + let no_check = cli_options.type_check_mode() == TypeCheckMode::None; let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let bench_options = &bench_options; - let module_graph_builder = ps.module_graph_builder.clone(); - let cli_options = ps.options.clone(); + let module_graph_builder = module_graph_builder.clone(); + let cli_options = cli_options.clone(); async move { let bench_modules = @@ -797,15 +807,18 @@ pub async fn run_benchmarks_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let bench_options = &bench_options; - ps.reset_for_file_watcher(); - let module_load_preparer = ps.module_load_preparer.clone(); - let cli_options = ps.options.clone(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + file_watcher.reset(); + let module_load_preparer = module_load_preparer.clone(); + let cli_options = cli_options.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)? .into_iter() @@ -836,7 +849,7 @@ pub async fn run_benchmarks_with_watch( } }; - let clear_screen = !ps.options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index 26d170d7e2..759882c833 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -13,8 +13,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TsConfigType; use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; -use crate::proc_state::ProcState; use crate::util; use crate::util::display; use crate::util::file_watcher::ResolutionResult; @@ -40,9 +40,11 @@ pub async fn bundle( let module_specifier = &module_specifier; async move { log::debug!(">>>>> bundle START"); - let ps = ProcState::from_cli_options(cli_options).await?; - let graph = ps - .module_graph_builder + let factory = CliFactory::from_cli_options(cli_options); + let module_graph_builder = factory.module_graph_builder().await?; + let cli_options = factory.cli_options(); + + let graph = module_graph_builder .create_graph_and_maybe_check(vec![module_specifier.clone()]) .await?; @@ -58,15 +60,14 @@ pub async fn bundle( }) .collect(); - if let Ok(Some(import_map_path)) = ps - .options + if let Ok(Some(import_map_path)) = cli_options .resolve_import_map_specifier() .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) { paths_to_watch.push(import_map_path); } - Ok((paths_to_watch, graph, ps)) + Ok((paths_to_watch, graph, cli_options.clone())) } .map(move |result| match result { Ok((paths_to_watch, graph, ps)) => ResolutionResult::Restart { @@ -80,49 +81,50 @@ pub async fn bundle( }) }; - let operation = |(ps, graph): (ProcState, Arc)| { - let out_file = &bundle_flags.out_file; - async move { - // at the moment, we don't support npm specifiers in deno bundle, so show an error - error_for_any_npm_specifier(&graph)?; + let operation = + |(cli_options, graph): (Arc, Arc)| { + let out_file = &bundle_flags.out_file; + async move { + // at the moment, we don't support npm specifiers in deno bundle, so show an error + error_for_any_npm_specifier(&graph)?; - let bundle_output = bundle_module_graph(graph.as_ref(), &ps)?; - log::debug!(">>>>> bundle END"); + let bundle_output = bundle_module_graph(graph.as_ref(), &cli_options)?; + log::debug!(">>>>> bundle END"); - if let Some(out_file) = out_file { - let output_bytes = bundle_output.code.as_bytes(); - let output_len = output_bytes.len(); - util::fs::write_file(out_file, output_bytes, 0o644)?; - log::info!( - "{} {:?} ({})", - colors::green("Emit"), - out_file, - colors::gray(display::human_size(output_len as f64)) - ); - if let Some(bundle_map) = bundle_output.maybe_map { - let map_bytes = bundle_map.as_bytes(); - let map_len = map_bytes.len(); - let ext = if let Some(curr_ext) = out_file.extension() { - format!("{}.map", curr_ext.to_string_lossy()) - } else { - "map".to_string() - }; - let map_out_file = out_file.with_extension(ext); - util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + if let Some(out_file) = out_file { + let output_bytes = bundle_output.code.as_bytes(); + let output_len = output_bytes.len(); + util::fs::write_file(out_file, output_bytes, 0o644)?; log::info!( "{} {:?} ({})", colors::green("Emit"), - map_out_file, - colors::gray(display::human_size(map_len as f64)) + out_file, + colors::gray(display::human_size(output_len as f64)) ); + if let Some(bundle_map) = bundle_output.maybe_map { + let map_bytes = bundle_map.as_bytes(); + let map_len = map_bytes.len(); + let ext = if let Some(curr_ext) = out_file.extension() { + format!("{}.map", curr_ext.to_string_lossy()) + } else { + "map".to_string() + }; + let map_out_file = out_file.with_extension(ext); + util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + log::info!( + "{} {:?} ({})", + colors::green("Emit"), + map_out_file, + colors::gray(display::human_size(map_len as f64)) + ); + } + } else { + println!("{}", bundle_output.code); } - } else { - println!("{}", bundle_output.code); - } - Ok(()) - } - }; + Ok(()) + } + }; if cli_options.watch_paths().is_some() { util::file_watcher::watch_func( @@ -149,14 +151,13 @@ pub async fn bundle( fn bundle_module_graph( graph: &deno_graph::ModuleGraph, - ps: &ProcState, + cli_options: &CliOptions, ) -> Result { log::info!("{} {}", colors::green("Bundle"), graph.roots[0]); - let ts_config_result = ps - .options - .resolve_ts_config_for_emit(TsConfigType::Bundle)?; - if ps.options.type_check_mode() == TypeCheckMode::None { + let ts_config_result = + cli_options.resolve_ts_config_for_emit(TsConfigType::Bundle)?; + if cli_options.type_check_mode() == TypeCheckMode::None { if let Some(ignored_options) = ts_config_result.maybe_ignored_options { log::warn!("{}", ignored_options); } diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 4fb6800fa0..4464802e6e 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -19,7 +19,6 @@ use crate::args::TsConfigType; use crate::args::TsTypeLib; use crate::args::TypeCheckMode; use crate::cache::Caches; -use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; use crate::npm::CliNpmResolver; @@ -39,7 +38,6 @@ pub struct CheckOptions { } pub struct TypeChecker { - deno_dir: DenoDir, caches: Arc, cli_options: Arc, node_resolver: Arc, @@ -48,14 +46,12 @@ pub struct TypeChecker { impl TypeChecker { pub fn new( - deno_dir: DenoDir, caches: Arc, cli_options: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { - deno_dir, caches, cli_options, node_resolver, @@ -95,8 +91,7 @@ impl TypeChecker { let ts_config = ts_config_result.ts_config; let type_check_mode = self.cli_options.type_check_mode(); let debug = self.cli_options.log_level() == Some(log::Level::Debug); - let cache = - TypeCheckCache::new(self.caches.type_checking_cache_db(&self.deno_dir)); + let cache = TypeCheckCache::new(self.caches.type_checking_cache_db()); let check_js = ts_config.get_check_js(); let check_hash = match get_check_hash(&graph, type_check_mode, &ts_config) { CheckHashResult::NoFiles => return Ok(()), diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index d3044a7163..223bac3167 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -4,7 +4,7 @@ use crate::args::CoverageFlags; use crate::args::FileFlags; use crate::args::Flags; use crate::colors; -use crate::proc_state::ProcState; +use crate::factory::CliFactory; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::fs::FileCollector; @@ -623,8 +623,11 @@ pub async fn cover_files( return Err(generic_error("No matching coverage profiles found")); } - let ps = ProcState::from_flags(flags).await?; - let root_dir_url = ps.npm_resolver.root_dir_url(); + let factory = CliFactory::from_flags(flags).await?; + let root_dir_url = factory.npm_resolver().await?.root_dir_url(); + let file_fetcher = factory.file_fetcher()?; + let cli_options = factory.cli_options(); + let emitter = factory.emitter()?; let script_coverages = collect_coverages(coverage_flags.files)?; let script_coverages = filter_coverages( @@ -667,13 +670,13 @@ pub async fn cover_files( for script_coverage in script_coverages { let module_specifier = deno_core::resolve_url_or_path( &script_coverage.url, - ps.options.initial_cwd(), + cli_options.initial_cwd(), )?; let maybe_file = if module_specifier.scheme() == "file" { - ps.file_fetcher.get_source(&module_specifier) + file_fetcher.get_source(&module_specifier) } else { - ps.file_fetcher + file_fetcher .fetch_cached(&module_specifier, 10) .with_context(|| { format!("Failed to fetch \"{module_specifier}\" from cache.") @@ -700,7 +703,7 @@ pub async fn cover_files( | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - match ps.emitter.maybed_cached_emit(&file.specifier, &file.source) { + match emitter.maybed_cached_emit(&file.specifier, &file.source) { Some(code) => code.into(), None => { return Err(anyhow!( diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index a07ba175aa..2cb53cb6ab 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -6,9 +6,9 @@ use crate::args::Flags; use crate::colors; use crate::display::write_json_to_stdout; use crate::display::write_to_stdout_ignore_sigpipe; +use crate::factory::CliFactory; use crate::file_fetcher::File; use crate::graph_util::graph_lock_or_exit; -use crate::proc_state::ProcState; use crate::tsc::get_types_declaration_file_text; use deno_ast::MediaType; use deno_core::anyhow::bail; @@ -23,13 +23,14 @@ pub async fn print_docs( flags: Flags, doc_flags: DocFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); let mut doc_nodes = match doc_flags.source_file { DocSourceFileFlag::Builtin => { let source_file_specifier = ModuleSpecifier::parse("internal://lib.deno.d.ts").unwrap(); - let content = get_types_declaration_file_text(ps.options.unstable()); + let content = get_types_declaration_file_text(cli_options.unstable()); let mut loader = deno_graph::source::MemoryLoader::new( vec![( source_file_specifier.to_string(), @@ -61,13 +62,18 @@ pub async fn print_docs( doc_parser.parse_module(&source_file_specifier)?.definitions } DocSourceFileFlag::Path(source_file) => { + let file_fetcher = factory.file_fetcher()?; + let module_graph_builder = factory.module_graph_builder().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let parsed_source_cache = factory.parsed_source_cache()?; + let module_specifier = - resolve_url_or_path(&source_file, ps.options.initial_cwd())?; + resolve_url_or_path(&source_file, cli_options.initial_cwd())?; // If the root module has external types, the module graph won't redirect it, // so instead create a dummy file which exports everything from the actual file being documented. let root_specifier = - resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap(); + resolve_path("./$deno$doc.ts", cli_options.initial_cwd()).unwrap(); let root = File { local: PathBuf::from("./$deno$doc.ts"), maybe_types: None, @@ -78,21 +84,20 @@ pub async fn print_docs( }; // Save our fake file into file fetcher cache. - ps.file_fetcher.insert_cached(root); + file_fetcher.insert_cached(root); - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph(vec![root_specifier.clone()]) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } let doc_parser = doc::DocParser::new( graph, doc_flags.private, - ps.parsed_source_cache.as_capturing_parser(), + parsed_source_cache.as_capturing_parser(), ); doc_parser.parse_with_reexports(&root_specifier)? } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 41accacba9..70d2bd6395 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -12,8 +12,8 @@ use crate::args::FilesConfig; use crate::args::FmtOptions; use crate::args::FmtOptionsConfig; use crate::args::ProseWrap; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::util::diff::diff; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -101,11 +101,12 @@ pub async fn format( } } }; - let deno_dir = &cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |(paths, fmt_options): (Vec, FmtOptionsConfig)| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.fmt_incremental_cache_db(deno_dir), + caches.fmt_incremental_cache_db(), &fmt_options, &paths, )); diff --git a/cli/tools/info.rs b/cli/tools/info.rs index a59f8a4c84..d491f55dc6 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -27,57 +27,61 @@ use deno_semver::npm::NpmPackageReqReference; use crate::args::Flags; use crate::args::InfoFlags; use crate::display; +use crate::factory::CliFactory; use crate::graph_util::graph_lock_or_exit; use crate::npm::CliNpmResolver; -use crate::proc_state::ProcState; use crate::util::checksum; pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); if let Some(specifier) = info_flags.file { - let specifier = resolve_url_or_path(&specifier, ps.options.initial_cwd())?; - let mut loader = ps.module_graph_builder.create_graph_loader(); + let module_graph_builder = factory.module_graph_builder().await?; + let npm_resolver = factory.npm_resolver().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let specifier = resolve_url_or_path(&specifier, cli_options.initial_cwd())?; + let mut loader = module_graph_builder.create_graph_loader(); loader.enable_loading_cache_info(); // for displaying the cache information - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph_with_loader(vec![specifier], &mut loader) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } if info_flags.json { let mut json_graph = json!(graph); - add_npm_packages_to_json(&mut json_graph, &ps.npm_resolver); + add_npm_packages_to_json(&mut json_graph, npm_resolver); display::write_json_to_stdout(&json_graph)?; } else { let mut output = String::new(); - GraphDisplayContext::write(&graph, &ps.npm_resolver, &mut output)?; + GraphDisplayContext::write(&graph, npm_resolver, &mut output)?; display::write_to_stdout_ignore_sigpipe(output.as_bytes())?; } } else { // If it was just "deno info" print location of caches and exit print_cache_info( - &ps, + &factory, info_flags.json, - ps.options.location_flag().as_ref(), + cli_options.location_flag().as_ref(), )?; } Ok(()) } fn print_cache_info( - state: &ProcState, + factory: &CliFactory, json: bool, location: Option<&deno_core::url::Url>, ) -> Result<(), AnyError> { - let deno_dir = &state.dir.root_path_for_display(); - let modules_cache = &state.file_fetcher.get_http_cache_location(); - let npm_cache = &state.npm_cache.as_readonly().get_cache_location(); - let typescript_cache = &state.dir.gen_cache.location; - let registry_cache = &state.dir.registries_folder_path(); - let mut origin_dir = state.dir.origin_data_folder_path(); + let dir = factory.deno_dir()?; + let modules_cache = factory.file_fetcher()?.get_http_cache_location(); + let npm_cache = factory.npm_cache()?.as_readonly().get_cache_location(); + let typescript_cache = &dir.gen_cache.location; + let registry_cache = dir.registries_folder_path(); + let mut origin_dir = dir.origin_data_folder_path(); + let deno_dir = dir.root_path_for_display().to_string(); if let Some(location) = &location { origin_dir = @@ -88,7 +92,7 @@ fn print_cache_info( if json { let mut output = json!({ - "denoDir": deno_dir.to_string(), + "denoDir": deno_dir, "modulesCache": modules_cache, "npmCache": npm_cache, "typescriptCache": typescript_cache, diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 461bb1a50a..fb83c3cab9 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -6,8 +6,8 @@ use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_core::anyhow::Context; @@ -233,9 +233,10 @@ pub async fn install_command( install_flags: InstallFlags, ) -> Result<(), AnyError> { // ensure the module is cached - ProcState::from_flags(flags.clone()) + CliFactory::from_flags(flags.clone()) + .await? + .module_load_preparer() .await? - .module_load_preparer .load_and_type_check_files(&[install_flags.module_url.clone()]) .await?; diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index eae2f1032d..40c37ce773 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -11,8 +11,8 @@ use crate::args::FilesConfig; use crate::args::LintOptions; use crate::args::LintReporterKind; use crate::args::LintRulesConfig; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::tools::fmt::run_parallelized; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -98,11 +98,12 @@ pub async fn lint( }; let has_error = Arc::new(AtomicBool::new(false)); - let deno_dir = cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |paths: Vec| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.lint_incremental_cache_db(&deno_dir), + caches.lint_incremental_cache_db(), // use a hash of the rule names in order to bust the cache &{ // ensure this is stable by sorting it diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index f0faf74ec1..9f4b589196 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -4,8 +4,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::ReplFlags; use crate::colors; +use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; -use crate::proc_state::ProcState; use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_runtime::permissions::Permissions; @@ -98,17 +98,17 @@ async fn read_eval_file( } pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let cli_options = ps.options.clone(); - let npm_resolver = ps.npm_resolver.clone(); - let resolver = ps.resolver.clone(); - let dir = ps.dir.clone(); - let file_fetcher = ps.file_fetcher.clone(); - let worker_factory = ps.create_cli_main_worker_factory(); + let npm_resolver = factory.npm_resolver().await?.clone(); + let resolver = factory.resolver().await?.clone(); + let dir = factory.deno_dir()?; + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; let mut worker = worker_factory .create_main_worker(main_module, permissions) @@ -116,7 +116,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { worker.setup_repl().await?; let worker = worker.into_main_worker(); let mut repl_session = - ReplSession::initialize(&cli_options, npm_resolver, resolver, worker) + ReplSession::initialize(cli_options, npm_resolver, resolver, worker) .await?; let mut rustyline_channel = rustyline_channel(); @@ -130,7 +130,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { if let Some(eval_files) = repl_flags.eval_files { for eval_file in eval_files { - match read_eval_file(&cli_options, &file_fetcher, &eval_file).await { + match read_eval_file(cli_options, file_fetcher, &eval_file).await { Ok(eval_source) => { let output = repl_session .evaluate_line_and_get_output(&eval_source) diff --git a/cli/tools/run.rs b/cli/tools/run.rs index e1dc529bc2..c6e706285b 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -10,8 +10,9 @@ use deno_runtime::permissions::PermissionsContainer; use crate::args::EvalFlags; use crate::args::Flags; +use crate::factory::CliFactory; +use crate::factory::CliFactoryBuilder; use crate::file_fetcher::File; -use crate::proc_state::ProcState; use crate::util; pub async fn run_script(flags: Flags) -> Result { @@ -31,23 +32,25 @@ To grant permissions, set them before the script argument. For example: } // TODO(bartlomieju): actually I think it will also fail if there's an import - // map specified and bare specifier is used on the command line - this should - // probably call `ProcState::resolve` instead - let ps = ProcState::from_flags(flags).await?; + // map specified and bare specifier is used on the command line + let factory = CliFactory::from_flags(flags).await?; + let deno_dir = factory.deno_dir()?; + let http_client = factory.http_client()?; + let cli_options = factory.cli_options(); // Run a background task that checks for available upgrades. If an earlier // run of this background task found a new version of Deno. super::upgrade::check_for_upgrades( - ps.http_client.clone(), - ps.dir.upgrade_check_file_path(), + http_client.clone(), + deno_dir.upgrade_check_file_path(), ); - let main_module = ps.options.resolve_main_module()?; + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let worker_factory = ps.create_cli_main_worker_factory(); + let worker_factory = factory.create_cli_main_worker_factory().await?; let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -57,11 +60,14 @@ To grant permissions, set them before the script argument. For example: } pub async fn run_from_stdin(flags: Flags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); let mut source = Vec::new(); std::io::stdin().read_to_end(&mut source)?; @@ -76,9 +82,8 @@ pub async fn run_from_stdin(flags: Flags) -> Result { }; // Save our fake file into file fetcher cache // to allow module access by TS compiler - ps.file_fetcher.insert_cached(source_file); + file_fetcher.insert_cached(source_file); - let worker_factory = ps.create_cli_main_worker_factory(); let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -90,20 +95,26 @@ pub async fn run_from_stdin(flags: Flags) -> Result { // code properly. async fn run_with_watch(flags: Flags) -> Result { let (sender, receiver) = tokio::sync::mpsc::unbounded_channel(); - let ps = - ProcState::from_flags_for_file_watcher(flags, sender.clone()).await?; - let clear_screen = !ps.options.no_clear_screen(); - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactoryBuilder::new() + .with_watcher(sender.clone()) + .build_from_flags(flags) + .await?; + let file_watcher = factory.file_watcher()?; + let cli_options = factory.cli_options(); + let clear_screen = !cli_options.no_clear_screen(); + let main_module = cli_options.resolve_main_module()?; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |main_module: ModuleSpecifier| { - ps.reset_for_file_watcher(); + file_watcher.reset(); let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let worker_factory = ps.create_cli_main_worker_factory(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); Ok(async move { - let worker = worker_factory + let worker = create_cli_main_worker_factory() .create_main_worker(main_module, permissions) .await?; worker.run_for_watcher().await?; @@ -130,10 +141,14 @@ pub async fn eval_command( flags: Flags, eval_flags: EvalFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let main_worker_factory = factory.create_cli_main_worker_factory().await?; + + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); // Create a dummy source file. let source_code = if eval_flags.print { @@ -154,10 +169,9 @@ pub async fn eval_command( // Save our fake file into file fetcher cache // to allow module access by TS compiler. - ps.file_fetcher.insert_cached(file); + file_fetcher.insert_cached(file); - let mut worker = ps - .create_cli_main_worker_factory() + let mut worker = main_worker_factory .create_main_worker(main_module, permissions) .await?; let exit_code = worker.run().await?; diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index 94b1c01703..0e8d9ca733 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -2,11 +2,11 @@ use crate::args::CompileFlags; use crate::args::Flags; +use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; use crate::standalone::is_standalone_binary; use crate::standalone::DenoCompileBinaryWriter; use crate::util::path::path_has_trailing_slash; -use crate::ProcState; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::generic_error; @@ -23,30 +23,34 @@ pub async fn compile( flags: Flags, compile_flags: CompileFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; - let binary_writer = DenoCompileBinaryWriter::new( - ps.file_fetcher.clone(), - ps.http_client.clone(), - ps.dir.clone(), - ); - let module_specifier = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let http_client = factory.http_client()?; + let deno_dir = factory.deno_dir()?; + let module_graph_builder = factory.module_graph_builder().await?; + let parsed_source_cache = factory.parsed_source_cache()?; + + let binary_writer = + DenoCompileBinaryWriter::new(file_fetcher, http_client, deno_dir); + let module_specifier = cli_options.resolve_main_module()?; let module_roots = { let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); vec.push(module_specifier.clone()); for side_module in &compile_flags.include { - vec.push(resolve_url_or_path(side_module, ps.options.initial_cwd())?); + vec.push(resolve_url_or_path(side_module, cli_options.initial_cwd())?); } vec }; let output_path = resolve_compile_executable_output_path( &compile_flags, - ps.options.initial_cwd(), + cli_options.initial_cwd(), ) .await?; let graph = Arc::try_unwrap( - ps.module_graph_builder + module_graph_builder .create_graph_and_maybe_check(module_roots) .await?, ) @@ -55,7 +59,7 @@ pub async fn compile( // at the moment, we don't support npm specifiers in deno_compile, so show an error error_for_any_npm_specifier(&graph)?; - let parser = ps.parsed_source_cache.as_capturing_parser(); + let parser = parsed_source_cache.as_capturing_parser(); let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; log::info!( @@ -73,7 +77,7 @@ pub async fn compile( eszip, &module_specifier, &compile_flags, - &ps.options, + cli_options, ) .await .with_context(|| format!("Writing {}", output_path.display()))?; diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 5d34d39c75..6380d3822a 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -4,8 +4,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; +use crate::factory::CliFactory; use crate::npm::CliNpmResolver; -use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -26,9 +26,10 @@ pub async fn execute_script( flags: Flags, task_flags: TaskFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let tasks_config = ps.options.resolve_tasks_config()?; - let maybe_package_json = ps.options.maybe_package_json(); + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let tasks_config = cli_options.resolve_tasks_config()?; + let maybe_package_json = cli_options.maybe_package_json(); let package_json_scripts = maybe_package_json .as_ref() .and_then(|p| p.scripts.clone()) @@ -43,7 +44,7 @@ pub async fn execute_script( }; if let Some(script) = tasks_config.get(task_name) { - let config_file_url = ps.options.maybe_config_file_specifier().unwrap(); + let config_file_url = cli_options.maybe_config_file_specifier().unwrap(); let config_file_path = if config_file_url.scheme() == "file" { config_file_url.to_file_path().unwrap() } else { @@ -53,7 +54,7 @@ pub async fn execute_script( Some(path) => canonicalize_path(&PathBuf::from(path))?, None => config_file_path.parent().unwrap().to_owned(), }; - let script = get_script_with_args(script, &ps.options); + let script = get_script_with_args(script, cli_options); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; @@ -63,7 +64,12 @@ pub async fn execute_script( .await; Ok(exit_code) } else if let Some(script) = package_json_scripts.get(task_name) { - if let Some(package_deps) = ps.package_json_deps_installer.package_deps() { + let package_json_deps_installer = + factory.package_json_deps_installer().await?; + let npm_resolver = factory.npm_resolver().await?; + let node_resolver = factory.node_resolver().await?; + + if let Some(package_deps) = package_json_deps_installer.package_deps() { for (key, value) in package_deps { if let Err(err) = value { log::info!( @@ -75,13 +81,14 @@ pub async fn execute_script( } } } - ps.package_json_deps_installer + + package_json_deps_installer .ensure_top_level_install() .await?; - ps.npm_resolver.resolve_pending().await?; + npm_resolver.resolve_pending().await?; log::info!( - "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.", + "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release.", colors::yellow("Warning"), ); @@ -95,12 +102,11 @@ pub async fn execute_script( .unwrap() .to_owned(), }; - let script = get_script_with_args(script, &ps.options); + let script = get_script_with_args(script, cli_options); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; - let npx_commands = - resolve_npm_commands(&ps.npm_resolver, &ps.node_resolver)?; + let npx_commands = resolve_npm_commands(npm_resolver, node_resolver)?; let env_vars = collect_env_vars(); let exit_code = deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands).await; diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 429bee71b9..847260352a 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -6,12 +6,12 @@ use crate::args::TestOptions; use crate::args::TypeCheckMode; use crate::colors; use crate::display; +use crate::factory::CliFactory; use crate::file_fetcher::File; use crate::file_fetcher::FileFetcher; use crate::graph_util::graph_valid_with_cli_options; use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::util::checksum; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -1629,16 +1629,19 @@ pub async fn run_tests( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let log_level = ps.options.log_level(); + Permissions::from_options(&cli_options.permissions_options())?; + let log_level = cli_options.log_level(); let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps.file_fetcher, + file_fetcher, &test_options.files, &test_options.doc, ) @@ -1649,9 +1652,9 @@ pub async fn run_tests( } check_specifiers( - &ps.options, - &ps.file_fetcher, - &ps.module_load_preparer, + cli_options, + file_fetcher, + module_load_preparer, specifiers_with_mode.clone(), ) .await?; @@ -1660,7 +1663,8 @@ pub async fn run_tests( return Ok(()); } - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); test_specifiers( worker_factory, @@ -1692,22 +1696,27 @@ pub async fn run_tests_with_watch( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let module_load_preparer = factory.module_load_preparer().await?; + let file_fetcher = factory.file_fetcher()?; + let file_watcher = factory.file_watcher()?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; - let log_level = ps.options.log_level(); + Permissions::from_options(&cli_options.permissions_options())?; + let no_check = cli_options.type_check_mode() == TypeCheckMode::None; + let log_level = cli_options.log_level(); let resolver = |changed: Option>| { let paths_to_watch = test_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let test_options = &test_options; - let cli_options = ps.options.clone(); - let module_graph_builder = ps.module_graph_builder.clone(); + let cli_options = cli_options.clone(); + let module_graph_builder = module_graph_builder.clone(); async move { let test_modules = if test_options.doc { @@ -1815,16 +1824,19 @@ pub async fn run_tests_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let test_options = &test_options; - ps.reset_for_file_watcher(); - let cli_options = ps.options.clone(); - let file_fetcher = ps.file_fetcher.clone(); - let module_load_preparer = ps.module_load_preparer.clone(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + file_watcher.reset(); + let cli_options = cli_options.clone(); + let file_fetcher = file_fetcher.clone(); + let module_load_preparer = module_load_preparer.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers_with_mode = fetch_specifiers_with_test_mode( &file_fetcher, &test_options.files, @@ -1887,7 +1899,7 @@ pub async fn run_tests_with_watch( } }); - let clear_screen = !ps.options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index f16923bf83..c76d36777f 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -5,8 +5,8 @@ use crate::args::Flags; use crate::args::UpgradeFlags; use crate::colors; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::time; @@ -263,7 +263,8 @@ pub async fn upgrade( flags: Flags, upgrade_flags: UpgradeFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let client = factory.http_client()?; let current_exe_path = std::env::current_exe()?; let metadata = fs::metadata(¤t_exe_path)?; let permissions = metadata.permissions(); @@ -285,8 +286,6 @@ pub async fn upgrade( ), current_exe_path.display()); } - let client = &ps.http_client; - let install_version = match upgrade_flags.version { Some(passed_version) => { let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$"); diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index 225c3e6a81..d478c2b57f 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -15,8 +15,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::FmtOptionsConfig; use crate::args::VendorFlags; +use crate::factory::CliFactory; use crate::graph_util::ModuleGraphBuilder; -use crate::proc_state::ProcState; use crate::tools::fmt::format_json; use crate::util::fs::canonicalize_path; use crate::util::fs::resolve_from_cwd; @@ -43,19 +43,20 @@ pub async fn vendor( let output_dir = resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &vendor_flags)?; validate_options(&mut cli_options, &output_dir)?; - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); let graph = create_graph( - &ps.module_graph_builder, + factory.module_graph_builder().await?, &vendor_flags, - ps.options.initial_cwd(), + cli_options.initial_cwd(), ) .await?; let vendored_count = build::build( graph, - &ps.parsed_source_cache, + factory.parsed_source_cache()?, &output_dir, - ps.maybe_import_map.as_deref(), - ps.lockfile.clone(), + factory.maybe_import_map().await?.as_deref(), + factory.maybe_lockfile().clone(), &build::RealVendorEnvironment, )?; @@ -71,7 +72,7 @@ pub async fn vendor( ); if vendored_count > 0 { let import_map_path = raw_output_dir.join("import_map.json"); - if maybe_update_config_file(&output_dir, &ps.options) { + if maybe_update_config_file(&output_dir, cli_options) { log::info!( concat!( "\nUpdated your local Deno configuration file with a reference to the ", diff --git a/cli/watcher.rs b/cli/watcher.rs new file mode 100644 index 0000000000..f9c2c1b42d --- /dev/null +++ b/cli/watcher.rs @@ -0,0 +1,99 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CliOptions; +use crate::cache::ParsedSourceCache; +use crate::graph_util::ModuleGraphContainer; +use crate::module_loader::CjsResolutionStore; + +use deno_core::parking_lot::Mutex; +use deno_core::ModuleSpecifier; + +use std::path::PathBuf; +use std::sync::Arc; + +pub struct FileWatcher { + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, +} + +impl FileWatcher { + pub fn new( + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, + ) -> Self { + Self { + cli_options, + cjs_resolutions, + parsed_source_cache, + graph_container, + maybe_reporter, + } + } + /// Reset all runtime state to its default. This should be used on file + /// watcher restarts. + pub fn reset(&self) { + self.cjs_resolutions.clear(); + self.parsed_source_cache.clear(); + self.graph_container.clear(); + + self.init_watcher(); + } + + // Add invariant files like the import map and explicit watch flag list to + // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. + pub fn init_watcher(&self) { + let files_to_watch_sender = match &self.maybe_reporter { + Some(reporter) => &reporter.sender, + None => return, + }; + if let Some(watch_paths) = self.cli_options.watch_paths() { + files_to_watch_sender.send(watch_paths.clone()).unwrap(); + } + if let Ok(Some(import_map_path)) = self + .cli_options + .resolve_import_map_specifier() + .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) + { + files_to_watch_sender.send(vec![import_map_path]).unwrap(); + } + } +} + +#[derive(Clone, Debug)] +pub struct FileWatcherReporter { + sender: tokio::sync::mpsc::UnboundedSender>, + file_paths: Arc>>, +} + +impl FileWatcherReporter { + pub fn new(sender: tokio::sync::mpsc::UnboundedSender>) -> Self { + Self { + sender, + file_paths: Default::default(), + } + } +} + +impl deno_graph::source::Reporter for FileWatcherReporter { + fn on_load( + &self, + specifier: &ModuleSpecifier, + modules_done: usize, + modules_total: usize, + ) { + let mut file_paths = self.file_paths.lock(); + if specifier.scheme() == "file" { + file_paths.push(specifier.to_file_path().unwrap()); + } + + if modules_done == modules_total { + self.sender.send(file_paths.drain(..).collect()).unwrap(); + } + } +} diff --git a/cli/worker.rs b/cli/worker.rs index 1beaa27baf..64ce284776 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -66,6 +66,7 @@ pub trait HasNodeSpecifierChecker: Send + Sync { fn has_node_specifier(&self) -> bool; } +#[derive(Clone)] pub struct CliMainWorkerOptions { pub argv: Vec, pub debug: bool,