mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 04:52:26 -05:00
perf: analyze cjs exports and emit typescript in parallel (#23856)
This commit is contained in:
parent
fcb6a18b2b
commit
a2dbcf9e0a
13 changed files with 398 additions and 153 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1661,6 +1661,7 @@ version = "0.89.0"
|
|||
dependencies = [
|
||||
"aead-gcm-stream",
|
||||
"aes",
|
||||
"async-trait",
|
||||
"brotli 3.5.0",
|
||||
"bytes",
|
||||
"cbc",
|
||||
|
|
177
cli/emit.rs
177
cli/emit.rs
|
@ -17,8 +17,8 @@ use std::sync::Arc;
|
|||
pub struct Emitter {
|
||||
emit_cache: EmitCache,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
transpile_options: deno_ast::TranspileOptions,
|
||||
emit_options: deno_ast::EmitOptions,
|
||||
transpile_and_emit_options:
|
||||
Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>,
|
||||
// cached hash of the transpile and emit options
|
||||
transpile_and_emit_options_hash: u64,
|
||||
}
|
||||
|
@ -39,16 +39,16 @@ impl Emitter {
|
|||
Self {
|
||||
emit_cache,
|
||||
parsed_source_cache,
|
||||
emit_options,
|
||||
transpile_options,
|
||||
transpile_and_emit_options: Arc::new((transpile_options, emit_options)),
|
||||
transpile_and_emit_options_hash,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_module_emits(
|
||||
pub async fn cache_module_emits(
|
||||
&self,
|
||||
graph: &ModuleGraph,
|
||||
) -> Result<(), AnyError> {
|
||||
// todo(dsherret): we could do this concurrently
|
||||
for module in graph.modules() {
|
||||
if let Module::Js(module) = module {
|
||||
let is_emittable = matches!(
|
||||
|
@ -60,11 +60,13 @@ impl Emitter {
|
|||
| MediaType::Tsx
|
||||
);
|
||||
if is_emittable {
|
||||
self.emit_parsed_source(
|
||||
&module.specifier,
|
||||
module.media_type,
|
||||
&module.source,
|
||||
)?;
|
||||
self
|
||||
.emit_parsed_source(
|
||||
&module.specifier,
|
||||
module.media_type,
|
||||
&module.source,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -81,42 +83,70 @@ impl Emitter {
|
|||
self.emit_cache.get_emit_code(specifier, source_hash)
|
||||
}
|
||||
|
||||
pub fn emit_parsed_source(
|
||||
pub async fn emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &Arc<str>,
|
||||
) -> Result<ModuleCodeString, AnyError> {
|
||||
let source_hash = self.get_source_hash(source);
|
||||
// Note: keep this in sync with the sync version below
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, source) {
|
||||
PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
|
||||
PreEmitResult::NotCached { source_hash } => {
|
||||
let parsed_source_cache = self.parsed_source_cache.clone();
|
||||
let transpile_and_emit_options =
|
||||
self.transpile_and_emit_options.clone();
|
||||
let transpile_result = deno_core::unsync::spawn_blocking({
|
||||
let specifier = specifier.clone();
|
||||
let source = source.clone();
|
||||
move || -> Result<_, AnyError> {
|
||||
EmitParsedSourceHelper::transpile(
|
||||
&parsed_source_cache,
|
||||
&specifier,
|
||||
source.clone(),
|
||||
media_type,
|
||||
&transpile_and_emit_options.0,
|
||||
&transpile_and_emit_options.1,
|
||||
)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
Ok(helper.post_emit_parsed_source(
|
||||
specifier,
|
||||
transpile_result,
|
||||
source_hash,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(emit_code) =
|
||||
self.emit_cache.get_emit_code(specifier, source_hash)
|
||||
{
|
||||
Ok(emit_code.into())
|
||||
} else {
|
||||
// nothing else needs the parsed source at this point, so remove from
|
||||
// the cache in order to not transpile owned
|
||||
let parsed_source = self.parsed_source_cache.remove_or_parse_module(
|
||||
specifier,
|
||||
source.clone(),
|
||||
media_type,
|
||||
)?;
|
||||
let transpiled_source = match parsed_source
|
||||
.transpile(&self.transpile_options, &self.emit_options)?
|
||||
{
|
||||
TranspileResult::Owned(source) => source,
|
||||
TranspileResult::Cloned(source) => {
|
||||
debug_assert!(false, "Transpile owned failed.");
|
||||
source
|
||||
}
|
||||
};
|
||||
debug_assert!(transpiled_source.source_map.is_none());
|
||||
self.emit_cache.set_emit_code(
|
||||
specifier,
|
||||
source_hash,
|
||||
&transpiled_source.text,
|
||||
);
|
||||
Ok(transpiled_source.text.into())
|
||||
pub fn emit_parsed_source_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &Arc<str>,
|
||||
) -> Result<ModuleCodeString, AnyError> {
|
||||
// Note: keep this in sync with the async version above
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, source) {
|
||||
PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
|
||||
PreEmitResult::NotCached { source_hash } => {
|
||||
let transpile_result = EmitParsedSourceHelper::transpile(
|
||||
&self.parsed_source_cache,
|
||||
specifier,
|
||||
source.clone(),
|
||||
media_type,
|
||||
&self.transpile_and_emit_options.0,
|
||||
&self.transpile_and_emit_options.1,
|
||||
)?;
|
||||
Ok(helper.post_emit_parsed_source(
|
||||
specifier,
|
||||
transpile_result,
|
||||
source_hash,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,10 +164,10 @@ impl Emitter {
|
|||
let parsed_source = self
|
||||
.parsed_source_cache
|
||||
.remove_or_parse_module(specifier, source_arc, media_type)?;
|
||||
let mut options = self.emit_options.clone();
|
||||
let mut options = self.transpile_and_emit_options.1.clone();
|
||||
options.source_map = SourceMapOption::None;
|
||||
let transpiled_source = parsed_source
|
||||
.transpile(&self.transpile_options, &options)?
|
||||
.transpile(&self.transpile_and_emit_options.0, &options)?
|
||||
.into_source();
|
||||
Ok(transpiled_source.text)
|
||||
}
|
||||
|
@ -152,3 +182,66 @@ impl Emitter {
|
|||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
enum PreEmitResult {
|
||||
Cached(ModuleCodeString),
|
||||
NotCached { source_hash: u64 },
|
||||
}
|
||||
|
||||
/// Helper to share code between async and sync emit_parsed_source methods.
|
||||
struct EmitParsedSourceHelper<'a>(&'a Emitter);
|
||||
|
||||
impl<'a> EmitParsedSourceHelper<'a> {
|
||||
pub fn pre_emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &Arc<str>,
|
||||
) -> PreEmitResult {
|
||||
let source_hash = self.0.get_source_hash(source);
|
||||
|
||||
if let Some(emit_code) =
|
||||
self.0.emit_cache.get_emit_code(specifier, source_hash)
|
||||
{
|
||||
PreEmitResult::Cached(emit_code.into())
|
||||
} else {
|
||||
PreEmitResult::NotCached { source_hash }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn transpile(
|
||||
parsed_source_cache: &ParsedSourceCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: Arc<str>,
|
||||
media_type: MediaType,
|
||||
transpile_options: &deno_ast::TranspileOptions,
|
||||
emit_options: &deno_ast::EmitOptions,
|
||||
) -> Result<TranspileResult, AnyError> {
|
||||
// nothing else needs the parsed source at this point, so remove from
|
||||
// the cache in order to not transpile owned
|
||||
let parsed_source = parsed_source_cache
|
||||
.remove_or_parse_module(specifier, source, media_type)?;
|
||||
Ok(parsed_source.transpile(transpile_options, emit_options)?)
|
||||
}
|
||||
|
||||
pub fn post_emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
transpile_result: TranspileResult,
|
||||
source_hash: u64,
|
||||
) -> ModuleCodeString {
|
||||
let transpiled_source = match transpile_result {
|
||||
TranspileResult::Owned(source) => source,
|
||||
TranspileResult::Cloned(source) => {
|
||||
debug_assert!(false, "Transpile owned failed.");
|
||||
source
|
||||
}
|
||||
};
|
||||
debug_assert!(transpiled_source.source_map.is_none());
|
||||
self.0.emit_cache.set_emit_code(
|
||||
specifier,
|
||||
source_hash,
|
||||
&transpiled_source.text,
|
||||
);
|
||||
transpiled_source.text.into()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
main_graph_container
|
||||
.load_and_type_check_files(&cache_flags.files)
|
||||
.await?;
|
||||
emitter.cache_module_emits(&main_graph_container.graph())
|
||||
emitter.cache_module_emits(&main_graph_container.graph()).await
|
||||
}),
|
||||
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
|
|
|
@ -275,7 +275,7 @@ impl CliModuleLoaderFactory {
|
|||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
let loader = Rc::new(CliModuleLoader {
|
||||
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
|
@ -283,7 +283,7 @@ impl CliModuleLoaderFactory {
|
|||
emitter: self.shared.emitter.clone(),
|
||||
parsed_source_cache: self.shared.parsed_source_cache.clone(),
|
||||
shared: self.shared.clone(),
|
||||
});
|
||||
})));
|
||||
ModuleLoaderAndSourceMapGetter {
|
||||
module_loader: loader.clone(),
|
||||
source_map_getter: Some(loader),
|
||||
|
@ -322,7 +322,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
|||
}
|
||||
}
|
||||
|
||||
struct CliModuleLoader<TGraphContainer: ModuleGraphContainer> {
|
||||
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
|
||||
lib: TsTypeLib,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
|
@ -337,8 +337,10 @@ struct CliModuleLoader<TGraphContainer: ModuleGraphContainer> {
|
|||
graph_container: TGraphContainer,
|
||||
}
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
||||
fn load_sync(
|
||||
impl<TGraphContainer: ModuleGraphContainer>
|
||||
CliModuleLoaderInner<TGraphContainer>
|
||||
{
|
||||
async fn load_inner(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
|
@ -353,11 +355,12 @@ impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
|||
let code_source = if let Some(result) = self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.load_sync_if_in_npm_package(specifier, maybe_referrer, permissions)
|
||||
.load_if_in_npm_package(specifier, maybe_referrer, permissions)
|
||||
.await
|
||||
{
|
||||
result?
|
||||
} else {
|
||||
self.load_prepared_module(specifier, maybe_referrer)?
|
||||
self.load_prepared_module(specifier, maybe_referrer).await?
|
||||
};
|
||||
let code = if self.shared.is_inspecting {
|
||||
// we need the code with the source map in order for
|
||||
|
@ -574,27 +577,98 @@ impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
|||
Ok(Some(timestamp))
|
||||
}
|
||||
|
||||
fn load_prepared_module(
|
||||
async fn load_prepared_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<ModuleCodeStringSource, AnyError> {
|
||||
// Note: keep this in sync with the sync version below
|
||||
let graph = self.graph_container.graph();
|
||||
match self.load_prepared_module_or_defer_emit(
|
||||
&graph,
|
||||
specifier,
|
||||
maybe_referrer,
|
||||
) {
|
||||
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
|
||||
Ok(CodeOrDeferredEmit::DeferredEmit {
|
||||
specifier,
|
||||
media_type,
|
||||
source,
|
||||
}) => {
|
||||
let transpile_result = self
|
||||
.emitter
|
||||
.emit_parsed_source(specifier, media_type, source)
|
||||
.await?;
|
||||
|
||||
// at this point, we no longer need the parsed source in memory, so free it
|
||||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
code: transpile_result,
|
||||
found_url: specifier.clone(),
|
||||
media_type,
|
||||
})
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn load_prepared_module_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<ModuleCodeStringSource, AnyError> {
|
||||
// Note: keep this in sync with the async version above
|
||||
let graph = self.graph_container.graph();
|
||||
match self.load_prepared_module_or_defer_emit(
|
||||
&graph,
|
||||
specifier,
|
||||
maybe_referrer,
|
||||
) {
|
||||
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
|
||||
Ok(CodeOrDeferredEmit::DeferredEmit {
|
||||
specifier,
|
||||
media_type,
|
||||
source,
|
||||
}) => {
|
||||
let transpile_result = self
|
||||
.emitter
|
||||
.emit_parsed_source_sync(specifier, media_type, source)?;
|
||||
|
||||
// at this point, we no longer need the parsed source in memory, so free it
|
||||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
code: transpile_result,
|
||||
found_url: specifier.clone(),
|
||||
media_type,
|
||||
})
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn load_prepared_module_or_defer_emit<'graph>(
|
||||
&self,
|
||||
graph: &'graph ModuleGraph,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Result<CodeOrDeferredEmit<'graph>, AnyError> {
|
||||
if specifier.scheme() == "node" {
|
||||
unreachable!(); // Node built-in modules should be handled internally.
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
match graph.get(specifier) {
|
||||
Some(deno_graph::Module::Json(JsonModule {
|
||||
source,
|
||||
media_type,
|
||||
specifier,
|
||||
..
|
||||
})) => Ok(ModuleCodeStringSource {
|
||||
})) => Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
|
||||
code: source.clone().into(),
|
||||
found_url: specifier.clone(),
|
||||
media_type: *media_type,
|
||||
}),
|
||||
})),
|
||||
Some(deno_graph::Module::Js(JsModule {
|
||||
source,
|
||||
media_type,
|
||||
|
@ -615,10 +689,11 @@ impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
|||
| MediaType::Cts
|
||||
| MediaType::Jsx
|
||||
| MediaType::Tsx => {
|
||||
// get emit text
|
||||
self
|
||||
.emitter
|
||||
.emit_parsed_source(specifier, *media_type, source)?
|
||||
return Ok(CodeOrDeferredEmit::DeferredEmit {
|
||||
specifier,
|
||||
media_type: *media_type,
|
||||
source,
|
||||
});
|
||||
}
|
||||
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
|
||||
panic!("Unexpected media type {media_type} for {specifier}")
|
||||
|
@ -628,11 +703,11 @@ impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
|||
// at this point, we no longer need the parsed source in memory, so free it
|
||||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(ModuleCodeStringSource {
|
||||
Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
|
||||
code,
|
||||
found_url: specifier.clone(),
|
||||
media_type: *media_type,
|
||||
})
|
||||
}))
|
||||
}
|
||||
Some(
|
||||
deno_graph::Module::External(_)
|
||||
|
@ -650,6 +725,20 @@ impl<TGraphContainer: ModuleGraphContainer> CliModuleLoader<TGraphContainer> {
|
|||
}
|
||||
}
|
||||
|
||||
enum CodeOrDeferredEmit<'a> {
|
||||
Code(ModuleCodeStringSource),
|
||||
DeferredEmit {
|
||||
specifier: &'a ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &'a Arc<str>,
|
||||
},
|
||||
}
|
||||
|
||||
// todo(dsherret): this double Rc boxing is not ideal
|
||||
struct CliModuleLoader<TGraphContainer: ModuleGraphContainer>(
|
||||
Rc<CliModuleLoaderInner<TGraphContainer>>,
|
||||
);
|
||||
|
||||
impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
||||
for CliModuleLoader<TGraphContainer>
|
||||
{
|
||||
|
@ -672,8 +761,8 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
Ok(())
|
||||
}
|
||||
|
||||
let referrer = self.resolve_referrer(referrer)?;
|
||||
let specifier = self.inner_resolve(specifier, &referrer, kind)?;
|
||||
let referrer = self.0.resolve_referrer(referrer)?;
|
||||
let specifier = self.0.inner_resolve(specifier, &referrer, kind)?;
|
||||
ensure_not_jsr_non_jsr_remote_import(&specifier, &referrer)?;
|
||||
Ok(specifier)
|
||||
}
|
||||
|
@ -685,15 +774,22 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
is_dynamic: bool,
|
||||
requested_module_type: RequestedModuleType,
|
||||
) -> deno_core::ModuleLoadResponse {
|
||||
// NOTE: this block is async only because of `deno_core` interface
|
||||
// requirements; module was already loaded when constructing module graph
|
||||
// during call to `prepare_load` so we can load it synchronously.
|
||||
deno_core::ModuleLoadResponse::Sync(self.load_sync(
|
||||
specifier,
|
||||
maybe_referrer,
|
||||
is_dynamic,
|
||||
requested_module_type,
|
||||
))
|
||||
let inner = self.0.clone();
|
||||
let specifier = specifier.clone();
|
||||
let maybe_referrer = maybe_referrer.cloned();
|
||||
deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
inner
|
||||
.load_inner(
|
||||
&specifier,
|
||||
maybe_referrer.as_ref(),
|
||||
is_dynamic,
|
||||
requested_module_type,
|
||||
)
|
||||
.await
|
||||
}
|
||||
.boxed_local(),
|
||||
)
|
||||
}
|
||||
|
||||
fn prepare_load(
|
||||
|
@ -702,22 +798,23 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
if self.shared.node_resolver.in_npm_package(specifier) {
|
||||
if self.0.shared.node_resolver.in_npm_package(specifier) {
|
||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||
}
|
||||
|
||||
let specifier = specifier.clone();
|
||||
let graph_container = self.graph_container.clone();
|
||||
let module_load_preparer = self.shared.module_load_preparer.clone();
|
||||
|
||||
let root_permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
let lib = self.lib;
|
||||
let inner = self.0.clone();
|
||||
|
||||
async move {
|
||||
let graph_container = inner.graph_container.clone();
|
||||
let module_load_preparer = inner.shared.module_load_preparer.clone();
|
||||
|
||||
let root_permissions = if is_dynamic {
|
||||
inner.dynamic_permissions.clone()
|
||||
} else {
|
||||
inner.root_permissions.clone()
|
||||
};
|
||||
let lib = inner.lib;
|
||||
let mut update_permit = graph_container.acquire_update_permit().await;
|
||||
let graph = update_permit.graph_mut();
|
||||
module_load_preparer
|
||||
|
@ -740,9 +837,10 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
specifier: &ModuleSpecifier,
|
||||
code_cache: &[u8],
|
||||
) -> Pin<Box<dyn Future<Output = ()>>> {
|
||||
if let Some(cache) = self.shared.code_cache.as_ref() {
|
||||
if let Some(cache) = self.0.shared.code_cache.as_ref() {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
let code_hash = self
|
||||
.0
|
||||
.get_code_hash_or_timestamp(specifier, media_type)
|
||||
.ok()
|
||||
.flatten();
|
||||
|
@ -774,7 +872,7 @@ impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
|
|||
"wasm" | "file" | "http" | "https" | "data" | "blob" => (),
|
||||
_ => return None,
|
||||
}
|
||||
let source = self.load_prepared_module(&specifier, None).ok()?;
|
||||
let source = self.0.load_prepared_module_sync(&specifier, None).ok()?;
|
||||
source_map_from_code(&source.code)
|
||||
}
|
||||
|
||||
|
@ -783,7 +881,7 @@ impl<TGraphContainer: ModuleGraphContainer> SourceMapGetter
|
|||
file_name: &str,
|
||||
line_number: usize,
|
||||
) -> Option<String> {
|
||||
let graph = self.graph_container.graph();
|
||||
let graph = self.0.graph_container.graph();
|
||||
let code = match graph.get(&resolve_url(file_name).ok()?) {
|
||||
Some(deno_graph::Module::Js(module)) => &module.source,
|
||||
Some(deno_graph::Module::Json(module)) => &module.source,
|
||||
|
|
59
cli/node.rs
59
cli/node.rs
|
@ -1,5 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -56,7 +58,7 @@ impl CliCjsCodeAnalyzer {
|
|||
Self { cache, fs }
|
||||
}
|
||||
|
||||
fn inner_cjs_analysis(
|
||||
async fn inner_cjs_analysis(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
|
@ -77,23 +79,32 @@ impl CliCjsCodeAnalyzer {
|
|||
});
|
||||
}
|
||||
|
||||
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
|
||||
specifier: specifier.clone(),
|
||||
text_info: deno_ast::SourceTextInfo::new(source.into()),
|
||||
media_type,
|
||||
capture_tokens: true,
|
||||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let analysis = if parsed_source.is_script() {
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
CliCjsAnalysis::Cjs {
|
||||
exports: analysis.exports,
|
||||
reexports: analysis.reexports,
|
||||
let analysis = deno_core::unsync::spawn_blocking({
|
||||
let specifier = specifier.clone();
|
||||
let source: Arc<str> = source.into();
|
||||
move || -> Result<_, deno_ast::ParseDiagnostic> {
|
||||
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
|
||||
specifier,
|
||||
text_info: deno_ast::SourceTextInfo::new(source),
|
||||
media_type,
|
||||
capture_tokens: true,
|
||||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
if parsed_source.is_script() {
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
Ok(CliCjsAnalysis::Cjs {
|
||||
exports: analysis.exports,
|
||||
reexports: analysis.reexports,
|
||||
})
|
||||
} else {
|
||||
Ok(CliCjsAnalysis::Esm)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
CliCjsAnalysis::Esm
|
||||
};
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
self
|
||||
.cache
|
||||
.set_cjs_analysis(specifier.as_str(), &source_hash, &analysis);
|
||||
|
@ -102,19 +113,23 @@ impl CliCjsCodeAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
|
||||
fn analyze_cjs(
|
||||
async fn analyze_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: Option<String>,
|
||||
) -> Result<ExtNodeCjsAnalysis, AnyError> {
|
||||
let source = match source {
|
||||
Some(source) => source,
|
||||
None => self
|
||||
.fs
|
||||
.read_text_file_sync(&specifier.to_file_path().unwrap(), None)?,
|
||||
None => {
|
||||
self
|
||||
.fs
|
||||
.read_text_file_async(specifier.to_file_path().unwrap(), None)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
let analysis = self.inner_cjs_analysis(specifier, &source)?;
|
||||
let analysis = self.inner_cjs_analysis(specifier, &source).await?;
|
||||
match analysis {
|
||||
CliCjsAnalysis::Esm => Ok(ExtNodeCjsAnalysis::Esm(source)),
|
||||
CliCjsAnalysis::Cjs { exports, reexports } => {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use dashmap::DashMap;
|
||||
use dashmap::DashSet;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
|
@ -8,7 +9,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::future;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::ModuleCodeString;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::NpmPackageReqResolution;
|
||||
|
@ -34,7 +34,6 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use deno_semver::package::PackageReq;
|
||||
use import_map::ImportMap;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
@ -272,20 +271,20 @@ impl NpmModuleLoader {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn load_sync_if_in_npm_package(
|
||||
pub async fn load_if_in_npm_package(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Option<Result<ModuleCodeStringSource, AnyError>> {
|
||||
if self.node_resolver.in_npm_package(specifier) {
|
||||
Some(self.load_sync(specifier, maybe_referrer, permissions))
|
||||
Some(self.load(specifier, maybe_referrer, permissions).await)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn load_sync(
|
||||
pub async fn load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
|
@ -294,7 +293,8 @@ impl NpmModuleLoader {
|
|||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = self
|
||||
.fs
|
||||
.read_text_file_sync(&file_path, None)
|
||||
.read_text_file_async(file_path.clone(), None)
|
||||
.await
|
||||
.map_err(AnyError::from)
|
||||
.with_context(|| {
|
||||
if file_path.is_dir() {
|
||||
|
@ -329,11 +329,10 @@ impl NpmModuleLoader {
|
|||
|
||||
let code = if self.cjs_resolutions.contains(specifier) {
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
self.node_code_translator.translate_cjs_to_esm(
|
||||
specifier,
|
||||
Some(code),
|
||||
permissions,
|
||||
)?
|
||||
self
|
||||
.node_code_translator
|
||||
.translate_cjs_to_esm(specifier, Some(code), permissions)
|
||||
.await?
|
||||
} else {
|
||||
// esm and json code is untouched
|
||||
code
|
||||
|
@ -348,15 +347,15 @@ impl NpmModuleLoader {
|
|||
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
|
||||
pub struct CjsResolutionStore(DashSet<ModuleSpecifier>);
|
||||
|
||||
impl CjsResolutionStore {
|
||||
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.0.lock().contains(specifier)
|
||||
self.0.contains(specifier)
|
||||
}
|
||||
|
||||
pub fn insert(&self, specifier: ModuleSpecifier) {
|
||||
self.0.lock().insert(specifier);
|
||||
self.0.insert(specifier);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -193,33 +193,33 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
));
|
||||
}
|
||||
|
||||
let permissions = if is_dynamic {
|
||||
&self.dynamic_permissions
|
||||
} else {
|
||||
&self.root_permissions
|
||||
};
|
||||
if let Some(result) =
|
||||
self.shared.npm_module_loader.load_sync_if_in_npm_package(
|
||||
original_specifier,
|
||||
maybe_referrer,
|
||||
permissions,
|
||||
)
|
||||
{
|
||||
return match result {
|
||||
Ok(code_source) => deno_core::ModuleLoadResponse::Sync(Ok(
|
||||
deno_core::ModuleSource::new_with_redirect(
|
||||
if self.shared.node_resolver.in_npm_package(original_specifier) {
|
||||
let npm_module_loader = self.shared.npm_module_loader.clone();
|
||||
let original_specifier = original_specifier.clone();
|
||||
let maybe_referrer = maybe_referrer.cloned();
|
||||
let permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
return deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code_source = npm_module_loader
|
||||
.load(&original_specifier, maybe_referrer.as_ref(), &permissions)
|
||||
.await?;
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match code_source.media_type {
|
||||
MediaType::Json => ModuleType::Json,
|
||||
_ => ModuleType::JavaScript,
|
||||
},
|
||||
ModuleSourceCode::String(code_source.code),
|
||||
original_specifier,
|
||||
&original_specifier,
|
||||
&code_source.found_url,
|
||||
None,
|
||||
),
|
||||
)),
|
||||
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(err)),
|
||||
};
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
);
|
||||
}
|
||||
|
||||
let Some(module) =
|
||||
|
|
|
@ -16,6 +16,7 @@ path = "lib.rs"
|
|||
[dependencies]
|
||||
aead-gcm-stream = "0.1"
|
||||
aes.workspace = true
|
||||
async-trait.workspace = true
|
||||
brotli.workspace = true
|
||||
bytes.workspace = true
|
||||
cbc.workspace = true
|
||||
|
|
|
@ -36,6 +36,7 @@ pub struct CjsAnalysisExports {
|
|||
}
|
||||
|
||||
/// Code analyzer for CJS and ESM files.
|
||||
#[async_trait::async_trait(?Send)]
|
||||
pub trait CjsCodeAnalyzer {
|
||||
/// Analyzes CommonJs code for exports and reexports, which is
|
||||
/// then used to determine the wrapper ESM module exports.
|
||||
|
@ -44,7 +45,7 @@ pub trait CjsCodeAnalyzer {
|
|||
/// already has it. If the source is needed by the implementation,
|
||||
/// then it can use the provided source, or otherwise load it if
|
||||
/// necessary.
|
||||
fn analyze_cjs(
|
||||
async fn analyze_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_source: Option<String>,
|
||||
|
@ -79,7 +80,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
/// For all discovered reexports the analysis will be performed recursively.
|
||||
///
|
||||
/// If successful a source code for equivalent ES module is returned.
|
||||
pub fn translate_cjs_to_esm(
|
||||
pub async fn translate_cjs_to_esm(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: Option<String>,
|
||||
|
@ -88,7 +89,10 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
let mut temp_var_count = 0;
|
||||
let mut handled_reexports: HashSet<ModuleSpecifier> = HashSet::default();
|
||||
|
||||
let analysis = self.cjs_code_analyzer.analyze_cjs(specifier, source)?;
|
||||
let analysis = self
|
||||
.cjs_code_analyzer
|
||||
.analyze_cjs(specifier, source)
|
||||
.await?;
|
||||
|
||||
let analysis = match analysis {
|
||||
CjsAnalysis::Esm(source) => return Ok(source),
|
||||
|
@ -113,6 +117,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
reexports_to_handle.push_back((reexport, specifier.clone()));
|
||||
}
|
||||
|
||||
// todo(dsherret): we could run this analysis concurrently in a FuturesOrdered
|
||||
while let Some((reexport, referrer)) = reexports_to_handle.pop_front() {
|
||||
// First, resolve the reexport specifier
|
||||
let reexport_specifier = self.resolve(
|
||||
|
@ -133,6 +138,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
let analysis = self
|
||||
.cjs_code_analyzer
|
||||
.analyze_cjs(&reexport_specifier, None)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Could not load '{}' ({}) referenced from {}",
|
||||
|
|
|
@ -929,19 +929,32 @@ async fn inspector_with_ts_files() {
|
|||
.await;
|
||||
|
||||
// receive messages with sources from this test
|
||||
let script1 = tester.recv().await;
|
||||
assert_contains!(script1, "testdata/inspector/test.ts");
|
||||
let mut scripts = vec![
|
||||
tester.recv().await,
|
||||
tester.recv().await,
|
||||
tester.recv().await,
|
||||
];
|
||||
let script1 = scripts.remove(
|
||||
scripts
|
||||
.iter()
|
||||
.position(|s| s.contains("testdata/inspector/test.ts"))
|
||||
.unwrap(),
|
||||
);
|
||||
let script1_id = {
|
||||
let v: serde_json::Value = serde_json::from_str(&script1).unwrap();
|
||||
v["params"]["scriptId"].as_str().unwrap().to_string()
|
||||
};
|
||||
let script2 = tester.recv().await;
|
||||
assert_contains!(script2, "testdata/inspector/foo.ts");
|
||||
let script2 = scripts.remove(
|
||||
scripts
|
||||
.iter()
|
||||
.position(|s| s.contains("testdata/inspector/foo.ts"))
|
||||
.unwrap(),
|
||||
);
|
||||
let script2_id = {
|
||||
let v: serde_json::Value = serde_json::from_str(&script2).unwrap();
|
||||
v["params"]["scriptId"].as_str().unwrap().to_string()
|
||||
};
|
||||
let script3 = tester.recv().await;
|
||||
let script3 = scripts.remove(0);
|
||||
assert_contains!(script3, "testdata/inspector/bar.js");
|
||||
let script3_id = {
|
||||
let v: serde_json::Value = serde_json::from_str(&script3).unwrap();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"tempDir": true,
|
||||
"args": "run -A --log-level=debug main.tsx",
|
||||
"args": "run -A run_main_sorted_lines.ts",
|
||||
"output": "main.out"
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[WILDCARD]Resolved preact from file:///[WILDLINE]/preact@10.19.6/node_modules/preact/jsx-runtime/dist/jsxRuntime.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
DEBUG RS - [WILDLINE] - Resolved preact from file:///[WILDLINE]/preact@10.19.6/node_modules/preact/hooks/dist/hooks.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
[# ensure that preact is resolving to .deno/preact@10.19.6/node_modules/preact and not .deno/preact-render-to-string@6.4.0/node_modules/preact]
|
||||
DEBUG RS - [WILDLINE] - Resolved preact from file:///[WILDLINE]/preact-render-to-string@6.4.0/node_modules/preact-render-to-string/dist/index.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
[WILDCARD]
|
||||
[WILDCARD]/preact-render-to-string@6.4.0/node_modules/preact-render-to-string/dist/index.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
[WILDCARD]/preact@10.19.6/node_modules/preact/hooks/dist/hooks.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
[WILDCARD]/preact@10.19.6/node_modules/preact/jsx-runtime/dist/jsxRuntime.mjs to [WILDLINE]node_modules[WILDCHAR].deno[WILDCHAR]preact@10.19.6[WILDCHAR]node_modules[WILDCHAR]preact
|
||||
[WILDCARD]
|
|
@ -0,0 +1,19 @@
|
|||
const { success, stderr } = new Deno.Command(
|
||||
Deno.execPath(),
|
||||
{
|
||||
args: ["run", "-A", "--log-level=debug", "main.tsx"],
|
||||
},
|
||||
).outputSync();
|
||||
const stderrText = new TextDecoder().decode(stderr);
|
||||
if (!success) {
|
||||
console.error(stderrText);
|
||||
throw new Error("Failed to run script.");
|
||||
}
|
||||
|
||||
// create some stability with the output
|
||||
const lines = stderrText.split("\n")
|
||||
.filter((line) => line.includes("Resolved preact from"));
|
||||
lines.sort();
|
||||
for (const line of lines) {
|
||||
console.error(line);
|
||||
}
|
Loading…
Add table
Reference in a new issue