mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 21:50:00 -05:00
refactor: new module graph used for no check (#7621)
This commit is contained in:
parent
7726cfb932
commit
c489589e2b
24 changed files with 1762 additions and 213 deletions
32
cli/ast.rs
32
cli/ast.rs
|
@ -1,6 +1,8 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::error::Error;
|
||||
|
@ -245,7 +247,7 @@ impl ParsedModule {
|
|||
pub fn transpile(
|
||||
self,
|
||||
options: &TranspileOptions,
|
||||
) -> Result<(String, Option<String>)> {
|
||||
) -> Result<(TextDocument, Option<TextDocument>)> {
|
||||
let program = Program::Module(self.module);
|
||||
|
||||
let jsx_pass = react::react(
|
||||
|
@ -295,7 +297,7 @@ impl ParsedModule {
|
|||
program.emit_with(&mut emitter)?;
|
||||
}
|
||||
let mut src = String::from_utf8(buf)?;
|
||||
let mut map: Option<String> = None;
|
||||
let mut map: Option<TextDocument> = None;
|
||||
{
|
||||
let mut buf = Vec::new();
|
||||
self
|
||||
|
@ -308,10 +310,10 @@ impl ParsedModule {
|
|||
let encoded_map = base64::encode(buf);
|
||||
src.push_str(&encoded_map);
|
||||
} else {
|
||||
map = Some(String::from_utf8(buf)?);
|
||||
map = Some(TextDocument::from(buf));
|
||||
}
|
||||
}
|
||||
Ok((src, map))
|
||||
Ok((src.into(), map))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -437,10 +439,14 @@ mod tests {
|
|||
let (code, maybe_map) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.starts_with("var D;\n(function(D) {\n"));
|
||||
assert!(
|
||||
code.contains("\n//# sourceMappingURL=data:application/json;base64,")
|
||||
);
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.starts_with("var D;\n(function(D) {\n"));
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("\n//# sourceMappingURL=data:application/json;base64,"));
|
||||
assert!(maybe_map.is_none());
|
||||
}
|
||||
|
||||
|
@ -461,7 +467,10 @@ mod tests {
|
|||
let (code, _) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.contains("React.createElement(\"div\", null"));
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("React.createElement(\"div\", null"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -492,6 +501,9 @@ mod tests {
|
|||
let (code, _) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.contains("_applyDecoratedDescriptor("));
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("_applyDecoratedDescriptor("));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ use std::sync::Arc;
|
|||
use std::sync::Mutex;
|
||||
|
||||
/// Structure representing a text document.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TextDocument {
|
||||
bytes: Vec<u8>,
|
||||
charset: Cow<'static, str>,
|
||||
|
@ -73,6 +73,18 @@ impl From<Vec<u8>> for TextDocument {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<String> for TextDocument {
|
||||
fn from(s: String) -> Self {
|
||||
TextDocument::new(s.as_bytes().to_vec(), Option::<&str>::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for TextDocument {
|
||||
fn from(s: &str) -> Self {
|
||||
TextDocument::new(s.as_bytes().to_vec(), Option::<&str>::None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Structure representing local or remote file.
|
||||
///
|
||||
/// In case of remote file `url` might be different than originally requested URL, if so
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
use crate::deno_dir;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::flags;
|
||||
use crate::graph::GraphBuilder;
|
||||
use crate::graph::TranspileOptions;
|
||||
use crate::http_cache;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::lockfile::Lockfile;
|
||||
|
@ -10,12 +12,17 @@ use crate::media_type::MediaType;
|
|||
use crate::module_graph::ModuleGraphFile;
|
||||
use crate::module_graph::ModuleGraphLoader;
|
||||
use crate::permissions::Permissions;
|
||||
use crate::specifier_handler::FetchHandler;
|
||||
use crate::tsc::CompiledModule;
|
||||
use crate::tsc::TargetLib;
|
||||
use crate::tsc::TsCompiler;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::cell::RefCell;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
@ -68,7 +75,7 @@ impl GlobalState {
|
|||
)?;
|
||||
|
||||
let lockfile = if let Some(filename) = &flags.lock {
|
||||
let lockfile = Lockfile::new(filename.to_string(), flags.lock_write)?;
|
||||
let lockfile = Lockfile::new(filename.clone(), flags.lock_write)?;
|
||||
Some(Mutex::new(lockfile))
|
||||
} else {
|
||||
None
|
||||
|
@ -113,54 +120,91 @@ impl GlobalState {
|
|||
) -> Result<(), AnyError> {
|
||||
let module_specifier = module_specifier.clone();
|
||||
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
self.file_fetcher.clone(),
|
||||
maybe_import_map,
|
||||
permissions.clone(),
|
||||
is_dyn_import,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
if self.flags.no_check {
|
||||
debug!("Transpiling root: {}", module_specifier);
|
||||
let handler =
|
||||
Rc::new(RefCell::new(FetchHandler::new(&self.flags, &permissions)?));
|
||||
let mut builder = GraphBuilder::new(handler, maybe_import_map);
|
||||
builder.insert(&module_specifier).await?;
|
||||
let mut graph = builder.get_graph(&self.lockfile)?;
|
||||
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, permissions.clone())
|
||||
.expect("Source file not found");
|
||||
// TODO(kitsonk) this needs to move, but CompilerConfig is way too
|
||||
// complicated to use here.
|
||||
let maybe_config = if let Some(path) = self.flags.config_path.clone() {
|
||||
let cwd = std::env::current_dir()?;
|
||||
let config_file = cwd.join(path);
|
||||
let config_path = config_file.canonicalize().map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Could not find the config file: {}",
|
||||
config_file.to_string_lossy()
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let config_str = fs::read_to_string(config_path)?;
|
||||
|
||||
let module_graph_files = module_graph.values().collect::<Vec<_>>();
|
||||
// Check integrity of every file in module graph
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let mut g = lockfile.lock().unwrap();
|
||||
Some(config_str)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
for graph_file in &module_graph_files {
|
||||
let check_passed =
|
||||
g.check_or_insert(&graph_file.url, &graph_file.source_code);
|
||||
let (stats, maybe_ignored_options) =
|
||||
graph.transpile(TranspileOptions {
|
||||
debug: self.flags.log_level == Some(log::Level::Debug),
|
||||
maybe_config,
|
||||
})?;
|
||||
|
||||
if !check_passed {
|
||||
eprintln!(
|
||||
"Subresource integrity check failed --lock={}\n{}",
|
||||
g.filename, graph_file.url
|
||||
);
|
||||
std::process::exit(10);
|
||||
debug!("{}", stats);
|
||||
if let Some(ignored_options) = maybe_ignored_options {
|
||||
println!("Some compiler options were ignored:\n {}", ignored_options);
|
||||
}
|
||||
} else {
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
self.file_fetcher.clone(),
|
||||
maybe_import_map,
|
||||
permissions.clone(),
|
||||
is_dyn_import,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, permissions.clone())
|
||||
.expect("Source file not found");
|
||||
|
||||
let module_graph_files = module_graph.values().collect::<Vec<_>>();
|
||||
// Check integrity of every file in module graph
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let mut g = lockfile.lock().unwrap();
|
||||
|
||||
for graph_file in &module_graph_files {
|
||||
let check_passed =
|
||||
g.check_or_insert(&graph_file.url, &graph_file.source_code);
|
||||
|
||||
if !check_passed {
|
||||
eprintln!(
|
||||
"Subresource integrity check failed --lock={}\n{}",
|
||||
g.filename, graph_file.url
|
||||
);
|
||||
std::process::exit(10);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we need to compile files.
|
||||
let should_compile = needs_compilation(
|
||||
self.ts_compiler.compile_js,
|
||||
out.media_type,
|
||||
&module_graph_files,
|
||||
);
|
||||
let allow_js = should_allow_js(&module_graph_files);
|
||||
// Check if we need to compile files.
|
||||
let should_compile = needs_compilation(
|
||||
self.ts_compiler.compile_js,
|
||||
out.media_type,
|
||||
&module_graph_files,
|
||||
);
|
||||
let allow_js = should_allow_js(&module_graph_files);
|
||||
|
||||
if should_compile {
|
||||
if self.flags.no_check {
|
||||
self.ts_compiler.transpile(&module_graph).await?;
|
||||
} else {
|
||||
if should_compile {
|
||||
self
|
||||
.ts_compiler
|
||||
.compile(self, &out, target_lib, permissions, &module_graph, allow_js)
|
||||
|
|
994
cli/graph.rs
Normal file
994
cli/graph.rs
Normal file
|
@ -0,0 +1,994 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::parse;
|
||||
use crate::ast::Location;
|
||||
use crate::ast::ParsedModule;
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::lockfile::Lockfile;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::specifier_handler::CachedModule;
|
||||
use crate::specifier_handler::DependencyMap;
|
||||
use crate::specifier_handler::EmitMap;
|
||||
use crate::specifier_handler::EmitType;
|
||||
use crate::specifier_handler::FetchFuture;
|
||||
use crate::specifier_handler::SpecifierHandler;
|
||||
use crate::tsc_config::json_merge;
|
||||
use crate::tsc_config::parse_config;
|
||||
use crate::tsc_config::IgnoredCompilerOptions;
|
||||
use crate::AnyError;
|
||||
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::stream::StreamExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
use std::result;
|
||||
use std::sync::Mutex;
|
||||
use std::time::Instant;
|
||||
use swc_ecmascript::dep_graph::DependencyKind;
|
||||
|
||||
type Result<V> = result::Result<V, AnyError>;
|
||||
|
||||
pub type BuildInfoMap = HashMap<EmitType, TextDocument>;
|
||||
|
||||
lazy_static! {
|
||||
/// Matched the `@deno-types` pragma.
|
||||
static ref DENO_TYPES_RE: Regex =
|
||||
Regex::new(r#"(?i)^\s*@deno-types\s*=\s*(?:["']([^"']+)["']|(\S+))"#)
|
||||
.unwrap();
|
||||
/// Matches a `/// <reference ... />` comment reference.
|
||||
static ref TRIPLE_SLASH_REFERENCE_RE: Regex =
|
||||
Regex::new(r"(?i)^/\s*<reference\s.*?/>").unwrap();
|
||||
/// Matches a path reference, which adds a dependency to a module
|
||||
static ref PATH_REFERENCE_RE: Regex =
|
||||
Regex::new(r#"(?i)\spath\s*=\s*["']([^"']*)["']"#).unwrap();
|
||||
/// Matches a types reference, which for JavaScript files indicates the
|
||||
/// location of types to use when type checking a program that includes it as
|
||||
/// a dependency.
|
||||
static ref TYPES_REFERENCE_RE: Regex =
|
||||
Regex::new(r#"(?i)\stypes\s*=\s*["']([^"']*)["']"#).unwrap();
|
||||
}
|
||||
|
||||
/// A group of errors that represent errors that can occur when interacting with
|
||||
/// a module graph.
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum GraphError {
|
||||
/// A module using the HTTPS protocol is trying to import a module with an
|
||||
/// HTTP schema.
|
||||
InvalidDowngrade(ModuleSpecifier, Location),
|
||||
/// A remote module is trying to import a local module.
|
||||
InvalidLocalImport(ModuleSpecifier, Location),
|
||||
/// A remote module is trying to import a local module.
|
||||
InvalidSource(ModuleSpecifier, String),
|
||||
/// A module specifier could not be resolved for a given import.
|
||||
InvalidSpecifier(String, Location),
|
||||
/// An unexpected dependency was requested for a module.
|
||||
MissingDependency(ModuleSpecifier, String),
|
||||
/// An unexpected specifier was requested.
|
||||
MissingSpecifier(ModuleSpecifier),
|
||||
/// Snapshot data was not present in a situation where it was required.
|
||||
MissingSnapshotData,
|
||||
/// The current feature is not supported.
|
||||
NotSupported(String),
|
||||
}
|
||||
use GraphError::*;
|
||||
|
||||
impl fmt::Display for GraphError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
InvalidDowngrade(ref specifier, ref location) => write!(f, "Modules imported via https are not allowed to import http modules.\n Importing: {}\n at {}:{}:{}", specifier, location.filename, location.line, location.col),
|
||||
InvalidLocalImport(ref specifier, ref location) => write!(f, "Remote modules are not allowed to import local modules.\n Importing: {}\n at {}:{}:{}", specifier, location.filename, location.line, location.col),
|
||||
InvalidSource(ref specifier, ref lockfile) => write!(f, "The source code is invalid, as it does not match the expected hash in the lock file.\n Specifier: {}\n Lock file: {}", specifier, lockfile),
|
||||
InvalidSpecifier(ref specifier, ref location) => write!(f, "Unable to resolve dependency specifier.\n Specifier: {}\n at {}:{}:{}", specifier, location.filename, location.line, location.col),
|
||||
MissingDependency(ref referrer, specifier) => write!(
|
||||
f,
|
||||
"The graph is missing a dependency.\n Specifier: {} from {}",
|
||||
specifier, referrer
|
||||
),
|
||||
MissingSpecifier(ref specifier) => write!(
|
||||
f,
|
||||
"The graph is missing a specifier.\n Specifier: {}",
|
||||
specifier
|
||||
),
|
||||
MissingSnapshotData => write!(f, "Snapshot data was not supplied, but required."),
|
||||
NotSupported(ref msg) => write!(f, "{}", msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for GraphError {}
|
||||
|
||||
/// A trait, implemented by `Graph` that provides the interfaces that the
|
||||
/// compiler ops require to be able to retrieve information about the graph.
|
||||
pub trait ModuleProvider {
|
||||
/// Get the source for a given module specifier. If the module is not part
|
||||
/// of the graph, the result will be `None`.
|
||||
fn get_source(&self, specifier: &ModuleSpecifier) -> Option<String>;
|
||||
/// Given a string specifier and a referring module specifier, provide the
|
||||
/// resulting module specifier and media type for the module that is part of
|
||||
/// the graph.
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<(ModuleSpecifier, MediaType)>;
|
||||
}
|
||||
|
||||
/// An enum which represents the parsed out values of references in source code.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
enum TypeScriptReference {
|
||||
Path(String),
|
||||
Types(String),
|
||||
}
|
||||
|
||||
/// Determine if a comment contains a triple slash reference and optionally
|
||||
/// return its kind and value.
|
||||
fn parse_ts_reference(comment: &str) -> Option<TypeScriptReference> {
|
||||
if !TRIPLE_SLASH_REFERENCE_RE.is_match(comment) {
|
||||
None
|
||||
} else if let Some(captures) = PATH_REFERENCE_RE.captures(comment) {
|
||||
Some(TypeScriptReference::Path(
|
||||
captures.get(1).unwrap().as_str().to_string(),
|
||||
))
|
||||
} else if let Some(captures) = TYPES_REFERENCE_RE.captures(comment) {
|
||||
Some(TypeScriptReference::Types(
|
||||
captures.get(1).unwrap().as_str().to_string(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if a comment contains a `@deno-types` pragma and optionally return
|
||||
/// its value.
|
||||
fn parse_deno_types(comment: &str) -> Option<String> {
|
||||
if let Some(captures) = DENO_TYPES_RE.captures(comment) {
|
||||
if let Some(m) = captures.get(1) {
|
||||
Some(m.as_str().to_string())
|
||||
} else if let Some(m) = captures.get(2) {
|
||||
Some(m.as_str().to_string())
|
||||
} else {
|
||||
panic!("unreachable");
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A logical representation of a module within a graph.
|
||||
#[derive(Debug, Clone)]
|
||||
struct Module {
|
||||
dependencies: DependencyMap,
|
||||
emits: EmitMap,
|
||||
is_dirty: bool,
|
||||
is_hydrated: bool,
|
||||
is_parsed: bool,
|
||||
maybe_import_map: Option<Rc<RefCell<ImportMap>>>,
|
||||
maybe_parsed_module: Option<ParsedModule>,
|
||||
maybe_types: Option<(String, ModuleSpecifier)>,
|
||||
media_type: MediaType,
|
||||
specifier: ModuleSpecifier,
|
||||
source: TextDocument,
|
||||
}
|
||||
|
||||
impl Default for Module {
|
||||
fn default() -> Self {
|
||||
Module {
|
||||
dependencies: HashMap::new(),
|
||||
emits: HashMap::new(),
|
||||
is_dirty: false,
|
||||
is_hydrated: false,
|
||||
is_parsed: false,
|
||||
maybe_import_map: None,
|
||||
maybe_parsed_module: None,
|
||||
maybe_types: None,
|
||||
media_type: MediaType::Unknown,
|
||||
specifier: ModuleSpecifier::resolve_url("https://deno.land/x/").unwrap(),
|
||||
source: TextDocument::new(Vec::new(), Option::<&str>::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub fn new(
|
||||
specifier: ModuleSpecifier,
|
||||
maybe_import_map: Option<Rc<RefCell<ImportMap>>>,
|
||||
) -> Self {
|
||||
Module {
|
||||
specifier,
|
||||
maybe_import_map,
|
||||
..Module::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hydrate(&mut self, cached_module: CachedModule) {
|
||||
self.media_type = cached_module.media_type;
|
||||
self.source = cached_module.source;
|
||||
if self.maybe_import_map.is_none() {
|
||||
if let Some(dependencies) = cached_module.maybe_dependencies {
|
||||
self.dependencies = dependencies;
|
||||
self.is_parsed = true;
|
||||
}
|
||||
}
|
||||
self.maybe_types = if let Some(ref specifier) = cached_module.maybe_types {
|
||||
Some((
|
||||
specifier.clone(),
|
||||
self
|
||||
.resolve_import(&specifier, None)
|
||||
.expect("could not resolve module"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.is_dirty = false;
|
||||
self.emits = cached_module.emits;
|
||||
self.is_hydrated = true;
|
||||
}
|
||||
|
||||
pub fn parse(&mut self) -> Result<()> {
|
||||
let parsed_module =
|
||||
parse(&self.specifier, &self.source.to_str()?, &self.media_type)?;
|
||||
|
||||
// parse out any triple slash references
|
||||
for comment in parsed_module.get_leading_comments().iter() {
|
||||
if let Some(ts_reference) = parse_ts_reference(&comment.text) {
|
||||
let location: Location = parsed_module.get_location(&comment.span);
|
||||
match ts_reference {
|
||||
TypeScriptReference::Path(import) => {
|
||||
let specifier = self.resolve_import(&import, Some(location))?;
|
||||
let dep = self.dependencies.entry(import).or_default();
|
||||
dep.maybe_code = Some(specifier);
|
||||
}
|
||||
TypeScriptReference::Types(import) => {
|
||||
let specifier = self.resolve_import(&import, Some(location))?;
|
||||
if self.media_type == MediaType::JavaScript
|
||||
|| self.media_type == MediaType::JSX
|
||||
{
|
||||
// TODO(kitsonk) we need to specifically update the cache when
|
||||
// this value changes
|
||||
self.maybe_types = Some((import.clone(), specifier));
|
||||
} else {
|
||||
let dep = self.dependencies.entry(import).or_default();
|
||||
dep.maybe_type = Some(specifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse out all the syntactical dependencies for a module
|
||||
let dependencies = parsed_module.analyze_dependencies();
|
||||
for desc in dependencies.iter() {
|
||||
let location = Location {
|
||||
filename: self.specifier.to_string(),
|
||||
col: desc.col,
|
||||
line: desc.line,
|
||||
};
|
||||
let specifier =
|
||||
self.resolve_import(&desc.specifier, Some(location.clone()))?;
|
||||
|
||||
// Parse out any `@deno-types` pragmas and modify dependency
|
||||
let maybe_types_specifier = if !desc.leading_comments.is_empty() {
|
||||
let comment = desc.leading_comments.last().unwrap();
|
||||
if let Some(deno_types) = parse_deno_types(&comment.text).as_ref() {
|
||||
Some(self.resolve_import(deno_types, Some(location))?)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let dep = self
|
||||
.dependencies
|
||||
.entry(desc.specifier.to_string())
|
||||
.or_default();
|
||||
if desc.kind == DependencyKind::ExportType
|
||||
|| desc.kind == DependencyKind::ImportType
|
||||
{
|
||||
dep.maybe_type = Some(specifier);
|
||||
} else {
|
||||
dep.maybe_code = Some(specifier);
|
||||
}
|
||||
if let Some(types_specifier) = maybe_types_specifier {
|
||||
dep.maybe_type = Some(types_specifier);
|
||||
}
|
||||
}
|
||||
|
||||
self.maybe_parsed_module = Some(parsed_module);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_import(
|
||||
&self,
|
||||
specifier: &str,
|
||||
maybe_location: Option<Location>,
|
||||
) -> Result<ModuleSpecifier> {
|
||||
let maybe_resolve = if let Some(import_map) = self.maybe_import_map.clone()
|
||||
{
|
||||
import_map
|
||||
.borrow()
|
||||
.resolve(specifier, self.specifier.as_str())?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let specifier = if let Some(module_specifier) = maybe_resolve {
|
||||
module_specifier
|
||||
} else {
|
||||
ModuleSpecifier::resolve_import(specifier, self.specifier.as_str())?
|
||||
};
|
||||
|
||||
let referrer_scheme = self.specifier.as_url().scheme();
|
||||
let specifier_scheme = specifier.as_url().scheme();
|
||||
let location = maybe_location.unwrap_or(Location {
|
||||
filename: self.specifier.to_string(),
|
||||
line: 0,
|
||||
col: 0,
|
||||
});
|
||||
|
||||
// Disallow downgrades from HTTPS to HTTP
|
||||
if referrer_scheme == "https" && specifier_scheme == "http" {
|
||||
return Err(InvalidDowngrade(specifier.clone(), location).into());
|
||||
}
|
||||
|
||||
// Disallow a remote URL from trying to import a local URL
|
||||
if (referrer_scheme == "https" || referrer_scheme == "http")
|
||||
&& !(specifier_scheme == "https" || specifier_scheme == "http")
|
||||
{
|
||||
return Err(InvalidLocalImport(specifier.clone(), location).into());
|
||||
}
|
||||
|
||||
Ok(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Stats(Vec<(String, u128)>);
|
||||
|
||||
impl<'de> Deserialize<'de> for Stats {
|
||||
fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let items: Vec<(String, u128)> = Deserialize::deserialize(deserializer)?;
|
||||
Ok(Stats(items))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Stats {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for (key, value) in self.0.clone() {
|
||||
write!(f, "{}: {}", key, value)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure which provides options when transpiling modules.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TranspileOptions {
|
||||
/// If `true` then debug logging will be output from the isolate.
|
||||
pub debug: bool,
|
||||
/// A string of configuration data that augments the the default configuration
|
||||
/// passed to the TypeScript compiler. This is typically the contents of a
|
||||
/// user supplied `tsconfig.json`.
|
||||
pub maybe_config: Option<String>,
|
||||
}
|
||||
|
||||
/// The transpile options that are significant out of a user provided tsconfig
|
||||
/// file, that we want to deserialize out of the final config for a transpile.
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct TranspileConfigOptions {
|
||||
pub check_js: bool,
|
||||
pub emit_decorator_metadata: bool,
|
||||
pub jsx: String,
|
||||
pub jsx_factory: String,
|
||||
pub jsx_fragment_factory: String,
|
||||
}
|
||||
|
||||
/// A dependency graph of modules, were the modules that have been inserted via
|
||||
/// the builder will be loaded into the graph. Also provides an interface to
|
||||
/// be able to manipulate and handle the graph.
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Graph {
|
||||
build_info: BuildInfoMap,
|
||||
handler: Rc<RefCell<dyn SpecifierHandler>>,
|
||||
modules: HashMap<ModuleSpecifier, Module>,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl Graph {
|
||||
/// Create a new instance of a graph, ready to have modules loaded it.
|
||||
///
|
||||
/// The argument `handler` is an instance of a structure that implements the
|
||||
/// `SpecifierHandler` trait.
|
||||
///
|
||||
pub fn new(handler: Rc<RefCell<dyn SpecifierHandler>>) -> Self {
|
||||
Graph {
|
||||
build_info: HashMap::new(),
|
||||
handler,
|
||||
modules: HashMap::new(),
|
||||
roots: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the handler with any modules that are marked as _dirty_ and update
|
||||
/// any build info if present.
|
||||
fn flush(&mut self, emit_type: &EmitType) -> Result<()> {
|
||||
let mut handler = self.handler.borrow_mut();
|
||||
for (_, module) in self.modules.iter_mut() {
|
||||
if module.is_dirty {
|
||||
let (code, maybe_map) = module.emits.get(emit_type).unwrap();
|
||||
handler.set_cache(
|
||||
&module.specifier,
|
||||
&emit_type,
|
||||
code.clone(),
|
||||
maybe_map.clone(),
|
||||
)?;
|
||||
module.is_dirty = false;
|
||||
}
|
||||
}
|
||||
for root_specifier in self.roots.iter() {
|
||||
if let Some(build_info) = self.build_info.get(&emit_type) {
|
||||
handler.set_build_info(
|
||||
root_specifier,
|
||||
&emit_type,
|
||||
build_info.to_owned(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Verify the subresource integrity of the graph based upon the optional
|
||||
/// lockfile, updating the lockfile with any missing resources. This will
|
||||
/// error if any of the resources do not match their lock status.
|
||||
pub fn lock(&self, maybe_lockfile: &Option<Mutex<Lockfile>>) -> Result<()> {
|
||||
if let Some(lf) = maybe_lockfile {
|
||||
let mut lockfile = lf.lock().unwrap();
|
||||
for (ms, module) in self.modules.iter() {
|
||||
let specifier = module.specifier.to_string();
|
||||
let code = module.source.to_string()?;
|
||||
let valid = lockfile.check_or_insert(&specifier, &code);
|
||||
if !valid {
|
||||
return Err(
|
||||
InvalidSource(ms.clone(), lockfile.filename.clone()).into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Transpile (only transform) the graph, updating any emitted modules
|
||||
/// with the specifier handler. The result contains any performance stats
|
||||
/// from the compiler and optionally any user provided configuration compiler
|
||||
/// options that were ignored.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// - `options` - A structure of options which impact how the code is
|
||||
/// transpiled.
|
||||
///
|
||||
pub fn transpile(
|
||||
&mut self,
|
||||
options: TranspileOptions,
|
||||
) -> Result<(Stats, Option<IgnoredCompilerOptions>)> {
|
||||
let start = Instant::now();
|
||||
let emit_type = EmitType::Cli;
|
||||
let mut compiler_options = json!({
|
||||
"checkJs": false,
|
||||
"emitDecoratorMetadata": false,
|
||||
"jsx": "react",
|
||||
"jsxFactory": "React.createElement",
|
||||
"jsxFragmentFactory": "React.Fragment",
|
||||
});
|
||||
|
||||
let maybe_ignored_options = if let Some(config_text) = options.maybe_config
|
||||
{
|
||||
let (user_config, ignored_options) = parse_config(&config_text)?;
|
||||
json_merge(&mut compiler_options, &user_config);
|
||||
ignored_options
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let compiler_options: TranspileConfigOptions =
|
||||
serde_json::from_value(compiler_options)?;
|
||||
let check_js = compiler_options.check_js;
|
||||
let transform_jsx = compiler_options.jsx == "react";
|
||||
let emit_options = ast::TranspileOptions {
|
||||
emit_metadata: compiler_options.emit_decorator_metadata,
|
||||
inline_source_map: true,
|
||||
jsx_factory: compiler_options.jsx_factory,
|
||||
jsx_fragment_factory: compiler_options.jsx_fragment_factory,
|
||||
transform_jsx,
|
||||
};
|
||||
|
||||
let mut emit_count: u128 = 0;
|
||||
for (_, module) in self.modules.iter_mut() {
|
||||
// if the module is a Dts file we should skip it
|
||||
if module.media_type == MediaType::Dts {
|
||||
continue;
|
||||
}
|
||||
// skip modules that already have a valid emit
|
||||
if module.emits.contains_key(&emit_type) {
|
||||
continue;
|
||||
}
|
||||
// if we don't have check_js enabled, we won't touch non TypeScript
|
||||
// modules
|
||||
if !(check_js
|
||||
|| module.media_type == MediaType::TSX
|
||||
|| module.media_type == MediaType::TypeScript)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if module.maybe_parsed_module.is_none() {
|
||||
module.parse()?;
|
||||
}
|
||||
let parsed_module = module.maybe_parsed_module.clone().unwrap();
|
||||
let emit = parsed_module.transpile(&emit_options)?;
|
||||
emit_count += 1;
|
||||
module.emits.insert(emit_type.clone(), emit);
|
||||
module.is_dirty = true;
|
||||
}
|
||||
self.flush(&emit_type)?;
|
||||
|
||||
let stats = Stats(vec![
|
||||
("Files".to_string(), self.modules.len() as u128),
|
||||
("Emitted".to_string(), emit_count),
|
||||
("Total time".to_string(), start.elapsed().as_millis()),
|
||||
]);
|
||||
|
||||
Ok((stats, maybe_ignored_options))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ModuleProvider for Graph {
|
||||
fn get_source(&self, specifier: &ModuleSpecifier) -> Option<String> {
|
||||
if let Some(module) = self.modules.get(specifier) {
|
||||
if let Ok(source) = module.source.to_string() {
|
||||
Some(source)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<(ModuleSpecifier, MediaType)> {
|
||||
if !self.modules.contains_key(referrer) {
|
||||
return Err(MissingSpecifier(referrer.to_owned()).into());
|
||||
}
|
||||
let module = self.modules.get(referrer).unwrap();
|
||||
if !module.dependencies.contains_key(specifier) {
|
||||
return Err(
|
||||
MissingDependency(referrer.to_owned(), specifier.to_owned()).into(),
|
||||
);
|
||||
}
|
||||
let dependency = module.dependencies.get(specifier).unwrap();
|
||||
// If there is a @deno-types pragma that impacts the dependency, then the
|
||||
// maybe_type property will be set with that specifier, otherwise we use the
|
||||
// specifier that point to the runtime code.
|
||||
let resolved_specifier =
|
||||
if let Some(type_specifier) = dependency.maybe_type.clone() {
|
||||
type_specifier
|
||||
} else if let Some(code_specifier) = dependency.maybe_code.clone() {
|
||||
code_specifier
|
||||
} else {
|
||||
return Err(
|
||||
MissingDependency(referrer.to_owned(), specifier.to_owned()).into(),
|
||||
);
|
||||
};
|
||||
if !self.modules.contains_key(&resolved_specifier) {
|
||||
return Err(
|
||||
MissingDependency(referrer.to_owned(), resolved_specifier.to_string())
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
let dep_module = self.modules.get(&resolved_specifier).unwrap();
|
||||
// In the case that there is a X-TypeScript-Types or a triple-slash types,
|
||||
// then the `maybe_types` specifier will be populated and we should use that
|
||||
// instead.
|
||||
let result = if let Some((_, types)) = dep_module.maybe_types.clone() {
|
||||
if let Some(types_module) = self.modules.get(&types) {
|
||||
(types, types_module.media_type)
|
||||
} else {
|
||||
return Err(
|
||||
MissingDependency(referrer.to_owned(), types.to_string()).into(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
(resolved_specifier, dep_module.media_type)
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure for building a dependency graph of modules.
|
||||
pub struct GraphBuilder {
|
||||
fetched: HashSet<ModuleSpecifier>,
|
||||
graph: Graph,
|
||||
maybe_import_map: Option<Rc<RefCell<ImportMap>>>,
|
||||
pending: FuturesUnordered<FetchFuture>,
|
||||
}
|
||||
|
||||
impl GraphBuilder {
|
||||
pub fn new(
|
||||
handler: Rc<RefCell<dyn SpecifierHandler>>,
|
||||
maybe_import_map: Option<ImportMap>,
|
||||
) -> Self {
|
||||
let internal_import_map = if let Some(import_map) = maybe_import_map {
|
||||
Some(Rc::new(RefCell::new(import_map)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
GraphBuilder {
|
||||
graph: Graph::new(handler),
|
||||
fetched: HashSet::new(),
|
||||
maybe_import_map: internal_import_map,
|
||||
pending: FuturesUnordered::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Request a module to be fetched from the handler and queue up its future
|
||||
/// to be awaited to be resolved.
|
||||
fn fetch(&mut self, specifier: &ModuleSpecifier) -> Result<()> {
|
||||
if self.fetched.contains(&specifier) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.fetched.insert(specifier.clone());
|
||||
let future = self.graph.handler.borrow_mut().fetch(specifier.clone());
|
||||
self.pending.push(future);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Visit a module that has been fetched, hydrating the module, analyzing its
|
||||
/// dependencies if required, fetching those dependencies, and inserting the
|
||||
/// module into the graph.
|
||||
fn visit(&mut self, cached_module: CachedModule) -> Result<()> {
|
||||
let specifier = cached_module.specifier.clone();
|
||||
let mut module =
|
||||
Module::new(specifier.clone(), self.maybe_import_map.clone());
|
||||
module.hydrate(cached_module);
|
||||
if !module.is_parsed {
|
||||
let has_types = module.maybe_types.is_some();
|
||||
module.parse()?;
|
||||
if self.maybe_import_map.is_none() {
|
||||
let mut handler = self.graph.handler.borrow_mut();
|
||||
handler.set_deps(&specifier, module.dependencies.clone())?;
|
||||
if !has_types {
|
||||
if let Some((types, _)) = module.maybe_types.clone() {
|
||||
handler.set_types(&specifier, types)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (_, dep) in module.dependencies.iter() {
|
||||
if let Some(specifier) = dep.maybe_code.as_ref() {
|
||||
self.fetch(specifier)?;
|
||||
}
|
||||
if let Some(specifier) = dep.maybe_type.as_ref() {
|
||||
self.fetch(specifier)?;
|
||||
}
|
||||
}
|
||||
if let Some((_, specifier)) = module.maybe_types.as_ref() {
|
||||
self.fetch(specifier)?;
|
||||
}
|
||||
self.graph.modules.insert(specifier, module);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Insert a module into the graph based on a module specifier. The module
|
||||
/// and any dependencies will be fetched from the handler. The module will
|
||||
/// also be treated as a _root_ module in the graph.
|
||||
pub async fn insert(&mut self, specifier: &ModuleSpecifier) -> Result<()> {
|
||||
self.fetch(specifier)?;
|
||||
|
||||
loop {
|
||||
let cached_module = self.pending.next().await.unwrap()?;
|
||||
self.visit(cached_module)?;
|
||||
if self.pending.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !self.graph.roots.contains(specifier) {
|
||||
self.graph.roots.push(specifier.clone());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Move out the graph from the builder to be utilized further. An optional
|
||||
/// lockfile can be provided, where if the sources in the graph do not match
|
||||
/// the expected lockfile, the method with error instead of returning the
|
||||
/// graph.
|
||||
pub fn get_graph(
|
||||
self,
|
||||
maybe_lockfile: &Option<Mutex<Lockfile>>,
|
||||
) -> Result<Graph> {
|
||||
self.graph.lock(maybe_lockfile)?;
|
||||
Ok(self.graph)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::specifier_handler::tests::MockSpecifierHandler;
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_builder() {
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let mut builder = GraphBuilder::new(handler, None);
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/mod.ts")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let graph = builder.get_graph(&None).expect("error getting graph");
|
||||
let actual = graph
|
||||
.resolve("./a.ts", &specifier)
|
||||
.expect("module to resolve");
|
||||
let expected = (
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/a.ts")
|
||||
.expect("unable to resolve"),
|
||||
MediaType::TypeScript,
|
||||
);
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_builder_import_map() {
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let import_map = ImportMap::from_json(
|
||||
"https://deno.land/x/import_map.ts",
|
||||
r#"{
|
||||
"imports": {
|
||||
"jquery": "./jquery.js",
|
||||
"lodash": "https://unpkg.com/lodash/index.js"
|
||||
}
|
||||
}"#,
|
||||
)
|
||||
.expect("could not load import map");
|
||||
let mut builder = GraphBuilder::new(handler, Some(import_map));
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/import_map.ts")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let graph = builder.get_graph(&None).expect("could not get graph");
|
||||
let actual_jquery = graph
|
||||
.resolve("jquery", &specifier)
|
||||
.expect("module to resolve");
|
||||
let expected_jquery = (
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/jquery.js")
|
||||
.expect("unable to resolve"),
|
||||
MediaType::JavaScript,
|
||||
);
|
||||
assert_eq!(actual_jquery, expected_jquery);
|
||||
let actual_lodash = graph
|
||||
.resolve("lodash", &specifier)
|
||||
.expect("module to resolve");
|
||||
let expected_lodash = (
|
||||
ModuleSpecifier::resolve_url_or_path("https://unpkg.com/lodash/index.js")
|
||||
.expect("unable to resolve"),
|
||||
MediaType::JavaScript,
|
||||
);
|
||||
assert_eq!(actual_lodash, expected_lodash);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_transpile() {
|
||||
// This is a complex scenario of transpiling, where we have TypeScript
|
||||
// importing a JavaScript file (with type definitions) which imports
|
||||
// TypeScript, JavaScript, and JavaScript with type definitions.
|
||||
// For scenarios where we transpile, we only want the TypeScript files
|
||||
// to be actually emitted.
|
||||
//
|
||||
// This also exercises "@deno-types" and type references.
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let mut builder = GraphBuilder::new(handler.clone(), None);
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("file:///tests/main.ts")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let mut graph = builder.get_graph(&None).expect("could not get graph");
|
||||
let (stats, maybe_ignored_options) =
|
||||
graph.transpile(TranspileOptions::default()).unwrap();
|
||||
assert_eq!(stats.0.len(), 3);
|
||||
assert_eq!(maybe_ignored_options, None);
|
||||
let h = handler.borrow();
|
||||
assert_eq!(h.cache_calls.len(), 2);
|
||||
assert_eq!(h.cache_calls[0].1, EmitType::Cli);
|
||||
assert!(h.cache_calls[0]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("# sourceMappingURL=data:application/json;base64,"));
|
||||
assert_eq!(h.cache_calls[0].3, None);
|
||||
assert_eq!(h.cache_calls[1].1, EmitType::Cli);
|
||||
assert!(h.cache_calls[1]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("# sourceMappingURL=data:application/json;base64,"));
|
||||
assert_eq!(h.cache_calls[0].3, None);
|
||||
assert_eq!(h.deps_calls.len(), 7);
|
||||
assert_eq!(
|
||||
h.deps_calls[0].0,
|
||||
ModuleSpecifier::resolve_url_or_path("file:///tests/main.ts").unwrap()
|
||||
);
|
||||
assert_eq!(h.deps_calls[0].1.len(), 1);
|
||||
assert_eq!(
|
||||
h.deps_calls[1].0,
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/lib/mod.js")
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(h.deps_calls[1].1.len(), 3);
|
||||
assert_eq!(
|
||||
h.deps_calls[2].0,
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/lib/mod.d.ts")
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(h.deps_calls[2].1.len(), 3, "should have 3 dependencies");
|
||||
// sometimes the calls are not deterministic, and so checking the contents
|
||||
// can cause some failures
|
||||
assert_eq!(h.deps_calls[3].1.len(), 0, "should have no dependencies");
|
||||
assert_eq!(h.deps_calls[4].1.len(), 0, "should have no dependencies");
|
||||
assert_eq!(h.deps_calls[5].1.len(), 0, "should have no dependencies");
|
||||
assert_eq!(h.deps_calls[6].1.len(), 0, "should have no dependencies");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_transpile_user_config() {
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let mut builder = GraphBuilder::new(handler.clone(), None);
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/transpile.tsx")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let mut graph = builder.get_graph(&None).expect("could not get graph");
|
||||
let config = r#"{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"jsx": "preserve"
|
||||
}
|
||||
}"#;
|
||||
let (_, maybe_ignored_options) = graph
|
||||
.transpile(TranspileOptions {
|
||||
debug: false,
|
||||
maybe_config: Some(config.to_string()),
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
maybe_ignored_options,
|
||||
Some(IgnoredCompilerOptions(vec!["target".to_string()])),
|
||||
"the 'target' options should have been ignored"
|
||||
);
|
||||
let h = handler.borrow();
|
||||
assert_eq!(h.cache_calls.len(), 1, "only one file should be emitted");
|
||||
assert!(
|
||||
h.cache_calls[0]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("<div>Hello world!</div>"),
|
||||
"jsx should have been preserved"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_with_lockfile() {
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let lockfile_path = fixtures.join("lockfile.json");
|
||||
let lockfile =
|
||||
Lockfile::new(lockfile_path.to_string_lossy().to_string(), false)
|
||||
.expect("could not load lockfile");
|
||||
let maybe_lockfile = Some(Mutex::new(lockfile));
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let mut builder = GraphBuilder::new(handler.clone(), None);
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("file:///tests/main.ts")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
builder
|
||||
.get_graph(&maybe_lockfile)
|
||||
.expect("could not get graph");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_graph_with_lockfile_fail() {
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let fixtures = c.join("tests/module_graph");
|
||||
let lockfile_path = fixtures.join("lockfile_fail.json");
|
||||
let lockfile =
|
||||
Lockfile::new(lockfile_path.to_string_lossy().to_string(), false)
|
||||
.expect("could not load lockfile");
|
||||
let maybe_lockfile = Some(Mutex::new(lockfile));
|
||||
let handler = Rc::new(RefCell::new(MockSpecifierHandler {
|
||||
fixtures,
|
||||
..MockSpecifierHandler::default()
|
||||
}));
|
||||
let mut builder = GraphBuilder::new(handler.clone(), None);
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path("file:///tests/main.ts")
|
||||
.expect("could not resolve module");
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
builder
|
||||
.get_graph(&maybe_lockfile)
|
||||
.expect_err("expected an error");
|
||||
}
|
||||
}
|
|
@ -5,6 +5,7 @@ use deno_core::serde_json::json;
|
|||
use std::collections::BTreeMap;
|
||||
use std::io::Result;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Lockfile {
|
||||
write: bool,
|
||||
map: BTreeMap<String, String>,
|
||||
|
|
|
@ -25,6 +25,7 @@ pub mod fmt_errors;
|
|||
mod fs;
|
||||
pub mod global_state;
|
||||
mod global_timer;
|
||||
mod graph;
|
||||
pub mod http_cache;
|
||||
mod http_util;
|
||||
mod import_map;
|
||||
|
@ -44,6 +45,7 @@ mod repl;
|
|||
pub mod resolve_addr;
|
||||
pub mod signal;
|
||||
pub mod source_maps;
|
||||
mod specifier_handler;
|
||||
pub mod state;
|
||||
mod test_runner;
|
||||
mod text_encoding;
|
||||
|
|
585
cli/specifier_handler.rs
Normal file
585
cli/specifier_handler.rs
Normal file
|
@ -0,0 +1,585 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::deno_dir::DenoDir;
|
||||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::flags::Flags;
|
||||
use crate::http_cache::HttpCache;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::permissions::Permissions;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::Future;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::pin::Pin;
|
||||
use std::result;
|
||||
|
||||
type Result<V> = result::Result<V, AnyError>;
|
||||
|
||||
pub type DependencyMap = HashMap<String, Dependency>;
|
||||
pub type EmitMap = HashMap<EmitType, (TextDocument, Option<TextDocument>)>;
|
||||
pub type FetchFuture =
|
||||
Pin<Box<(dyn Future<Output = Result<CachedModule>> + 'static)>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedModule {
|
||||
pub emits: EmitMap,
|
||||
pub maybe_dependencies: Option<DependencyMap>,
|
||||
pub maybe_types: Option<String>,
|
||||
pub maybe_version: Option<String>,
|
||||
pub media_type: MediaType,
|
||||
pub source: TextDocument,
|
||||
pub specifier: ModuleSpecifier,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl Default for CachedModule {
|
||||
fn default() -> Self {
|
||||
CachedModule {
|
||||
emits: HashMap::new(),
|
||||
maybe_dependencies: None,
|
||||
maybe_types: None,
|
||||
maybe_version: None,
|
||||
media_type: MediaType::Unknown,
|
||||
source: TextDocument::new(Vec::new(), Option::<&str>::None),
|
||||
specifier: ModuleSpecifier::resolve_url("https://deno.land/x/mod.ts")
|
||||
.unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An enum that represents the different types of emitted code that can be
|
||||
/// cached. Different types can utilise different configurations which can
|
||||
/// change the validity of the emitted code.
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum EmitType {
|
||||
/// Code that was emitted for use by the CLI
|
||||
Cli,
|
||||
/// Code that was emitted for bundling purposes
|
||||
Bundle,
|
||||
/// Code that was emitted based on a request to the runtime APIs
|
||||
Runtime,
|
||||
}
|
||||
|
||||
impl Default for EmitType {
|
||||
fn default() -> Self {
|
||||
EmitType::Cli
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Dependency {
|
||||
/// The module specifier that resolves to the runtime code dependency for the
|
||||
/// module.
|
||||
pub maybe_code: Option<ModuleSpecifier>,
|
||||
/// The module specifier that resolves to the type only dependency for the
|
||||
/// module.
|
||||
pub maybe_type: Option<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
pub trait SpecifierHandler {
|
||||
/// Instructs the handler to fetch a specifier or retrieve its value from the
|
||||
/// cache if there is a valid cached version.
|
||||
fn fetch(&mut self, specifier: ModuleSpecifier) -> FetchFuture;
|
||||
|
||||
/// Get the optional build info from the cache for a given module specifier.
|
||||
/// Because build infos are only associated with the "root" modules, they are
|
||||
/// not expected to be cached for each module, but are "lazily" checked when
|
||||
/// a root module is identified. The `emit_type` also indicates what form
|
||||
/// of the module the build info is valid for.
|
||||
fn get_build_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>>;
|
||||
|
||||
/// Set the emitted code (and maybe map) for a given module specifier. The
|
||||
/// cache type indicates what form the emit is related to.
|
||||
fn set_cache(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
) -> Result<()>;
|
||||
|
||||
/// When parsed out of a JavaScript module source, the triple slash reference
|
||||
/// to the types should be stored in the cache.
|
||||
fn set_types(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
types: String,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Set the build info for a module specifier, also providing the cache type.
|
||||
fn set_build_info(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Set the graph dependencies for a given module specifier.
|
||||
fn set_deps(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
dependencies: DependencyMap,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Set the version of the source for a given module, which is used to help
|
||||
/// determine if a module needs to be re-type-checked.
|
||||
fn set_version(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
version: String,
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
||||
impl fmt::Debug for dyn SpecifierHandler {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "SpecifierHandler {{ }}")
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that could be raised by a `SpecifierHandler` implementation.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum SpecifierHandlerError {
|
||||
/// An error representing an error the `EmitType` that was supplied to a
|
||||
/// method of an implementor of the `SpecifierHandler` trait.
|
||||
UnsupportedEmitType(EmitType),
|
||||
}
|
||||
use SpecifierHandlerError::*;
|
||||
|
||||
impl fmt::Display for SpecifierHandlerError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
UnsupportedEmitType(ref emit_type) => write!(
|
||||
f,
|
||||
"The emit type of \"{:?}\" is unsupported for this operation.",
|
||||
emit_type
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for SpecifierHandlerError {}
|
||||
|
||||
/// A representation of meta data for a compiled file.
|
||||
///
|
||||
/// *Note* this is currently just a copy of what is located in `tsc.rs` but will
|
||||
/// be refactored to be able to store dependencies and type information in the
|
||||
/// future.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct CompiledFileMetadata {
|
||||
pub version_hash: String,
|
||||
}
|
||||
|
||||
impl CompiledFileMetadata {
|
||||
pub fn from_json_string(metadata_string: &str) -> Result<Self> {
|
||||
serde_json::from_str::<Self>(metadata_string).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub fn to_json_string(&self) -> Result<String> {
|
||||
serde_json::to_string(self).map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
/// An implementation of the `SpecifierHandler` trait that integrates with the
|
||||
/// existing `file_fetcher` interface, which will eventually be refactored to
|
||||
/// align it more to the `SpecifierHandler` trait.
|
||||
pub struct FetchHandler {
|
||||
disk_cache: DiskCache,
|
||||
file_fetcher: SourceFileFetcher,
|
||||
permissions: Permissions,
|
||||
}
|
||||
|
||||
impl FetchHandler {
|
||||
pub fn new(flags: &Flags, permissions: &Permissions) -> Result<Self> {
|
||||
let custom_root = env::var("DENO_DIR").map(String::into).ok();
|
||||
let deno_dir = DenoDir::new(custom_root)?;
|
||||
let deps_cache_location = deno_dir.root.join("deps");
|
||||
let http_cache = HttpCache::new(&deps_cache_location);
|
||||
let ca_file = flags.ca_file.clone().or_else(|| env::var("DENO_CERT").ok());
|
||||
|
||||
let file_fetcher = SourceFileFetcher::new(
|
||||
http_cache,
|
||||
!flags.reload,
|
||||
flags.cache_blocklist.clone(),
|
||||
flags.no_remote,
|
||||
flags.cached_only,
|
||||
ca_file.as_deref(),
|
||||
)?;
|
||||
let disk_cache = deno_dir.gen_cache;
|
||||
|
||||
Ok(FetchHandler {
|
||||
disk_cache,
|
||||
file_fetcher,
|
||||
permissions: permissions.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SpecifierHandler for FetchHandler {
|
||||
fn fetch(&mut self, specifier: ModuleSpecifier) -> FetchFuture {
|
||||
let permissions = self.permissions.clone();
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let disk_cache = self.disk_cache.clone();
|
||||
|
||||
async move {
|
||||
let source_file = file_fetcher
|
||||
.fetch_source_file(&specifier, None, permissions)
|
||||
.await?;
|
||||
let url = source_file.url;
|
||||
let filename = disk_cache.get_cache_filename_with_extension(&url, "meta");
|
||||
let maybe_version = if let Ok(bytes) = disk_cache.get(&filename) {
|
||||
if let Ok(metadata_string) = std::str::from_utf8(&bytes) {
|
||||
if let Ok(compiled_file_metadata) =
|
||||
CompiledFileMetadata::from_json_string(metadata_string)
|
||||
{
|
||||
Some(compiled_file_metadata.version_hash)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let filename =
|
||||
disk_cache.get_cache_filename_with_extension(&url, "js.map");
|
||||
let maybe_map: Option<TextDocument> =
|
||||
if let Ok(map) = disk_cache.get(&filename) {
|
||||
Some(map.into())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut emits = HashMap::new();
|
||||
let filename = disk_cache.get_cache_filename_with_extension(&url, "js");
|
||||
if let Ok(code) = disk_cache.get(&filename) {
|
||||
emits.insert(EmitType::Cli, (code.into(), maybe_map));
|
||||
};
|
||||
|
||||
Ok(CachedModule {
|
||||
emits,
|
||||
maybe_dependencies: None,
|
||||
maybe_types: source_file.types_header,
|
||||
maybe_version,
|
||||
media_type: source_file.media_type,
|
||||
source: source_file.source_code,
|
||||
specifier,
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn get_build_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
}
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "buildinfo");
|
||||
if let Ok(build_info) = self.disk_cache.get(&filename) {
|
||||
return Ok(Some(build_info.into()));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn set_build_info(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
) -> Result<()> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
}
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "buildinfo");
|
||||
self
|
||||
.disk_cache
|
||||
.set(&filename, build_info.as_bytes())
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
fn set_cache(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
) -> Result<()> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
}
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "js");
|
||||
self.disk_cache.set(&filename, code.as_bytes())?;
|
||||
|
||||
if let Some(map) = maybe_map {
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "js.map");
|
||||
self.disk_cache.set(&filename, map.as_bytes())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_deps(
|
||||
&mut self,
|
||||
_specifier: &ModuleSpecifier,
|
||||
_dependencies: DependencyMap,
|
||||
) -> Result<()> {
|
||||
// file_fetcher doesn't have the concept of caching dependencies
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_types(
|
||||
&mut self,
|
||||
_specifier: &ModuleSpecifier,
|
||||
_types: String,
|
||||
) -> Result<()> {
|
||||
// file_fetcher doesn't have the concept of caching of the types
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_version(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
version_hash: String,
|
||||
) -> Result<()> {
|
||||
let compiled_file_metadata = CompiledFileMetadata { version_hash };
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "meta");
|
||||
|
||||
self
|
||||
.disk_cache
|
||||
.set(
|
||||
&filename,
|
||||
compiled_file_metadata.to_json_string()?.as_bytes(),
|
||||
)
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
|
||||
use deno_core::futures::future;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// This is a testing mock for `SpecifierHandler` that uses a special file
|
||||
/// system renaming to mock local and remote modules as well as provides
|
||||
/// "spies" for the critical methods for testing purposes.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MockSpecifierHandler {
|
||||
pub fixtures: PathBuf,
|
||||
pub build_info: HashMap<ModuleSpecifier, TextDocument>,
|
||||
pub build_info_calls: Vec<(ModuleSpecifier, EmitType, TextDocument)>,
|
||||
pub cache_calls: Vec<(
|
||||
ModuleSpecifier,
|
||||
EmitType,
|
||||
TextDocument,
|
||||
Option<TextDocument>,
|
||||
)>,
|
||||
pub deps_calls: Vec<(ModuleSpecifier, DependencyMap)>,
|
||||
pub types_calls: Vec<(ModuleSpecifier, String)>,
|
||||
pub version_calls: Vec<(ModuleSpecifier, String)>,
|
||||
}
|
||||
|
||||
impl MockSpecifierHandler {}
|
||||
|
||||
impl MockSpecifierHandler {
|
||||
fn get_cache(&self, specifier: ModuleSpecifier) -> Result<CachedModule> {
|
||||
let specifier_text = specifier
|
||||
.to_string()
|
||||
.replace(":///", "_")
|
||||
.replace("://", "_")
|
||||
.replace("/", "-");
|
||||
let specifier_path = self.fixtures.join(specifier_text);
|
||||
let media_type =
|
||||
match specifier_path.extension().unwrap().to_str().unwrap() {
|
||||
"ts" => {
|
||||
if specifier_path.to_string_lossy().ends_with(".d.ts") {
|
||||
MediaType::Dts
|
||||
} else {
|
||||
MediaType::TypeScript
|
||||
}
|
||||
}
|
||||
"tsx" => MediaType::TSX,
|
||||
"js" => MediaType::JavaScript,
|
||||
"jsx" => MediaType::JSX,
|
||||
_ => MediaType::Unknown,
|
||||
};
|
||||
let source =
|
||||
TextDocument::new(fs::read(specifier_path)?, Option::<&str>::None);
|
||||
|
||||
Ok(CachedModule {
|
||||
source,
|
||||
specifier,
|
||||
media_type,
|
||||
..CachedModule::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SpecifierHandler for MockSpecifierHandler {
|
||||
fn fetch(&mut self, specifier: ModuleSpecifier) -> FetchFuture {
|
||||
Box::pin(future::ready(self.get_cache(specifier)))
|
||||
}
|
||||
fn get_build_info(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_cache_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>> {
|
||||
Ok(self.build_info.get(specifier).cloned())
|
||||
}
|
||||
fn set_cache(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
cache_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
) -> Result<()> {
|
||||
self.cache_calls.push((
|
||||
specifier.clone(),
|
||||
cache_type.clone(),
|
||||
code,
|
||||
maybe_map,
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
fn set_types(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
types: String,
|
||||
) -> Result<()> {
|
||||
self.types_calls.push((specifier.clone(), types));
|
||||
Ok(())
|
||||
}
|
||||
fn set_build_info(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
cache_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
) -> Result<()> {
|
||||
self
|
||||
.build_info
|
||||
.insert(specifier.clone(), build_info.clone());
|
||||
self.build_info_calls.push((
|
||||
specifier.clone(),
|
||||
cache_type.clone(),
|
||||
build_info,
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
fn set_deps(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
dependencies: DependencyMap,
|
||||
) -> Result<()> {
|
||||
self.deps_calls.push((specifier.clone(), dependencies));
|
||||
Ok(())
|
||||
}
|
||||
fn set_version(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
version: String,
|
||||
) -> Result<()> {
|
||||
self.version_calls.push((specifier.clone(), version));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn setup() -> (TempDir, FetchHandler) {
|
||||
let temp_dir = TempDir::new().expect("could not setup");
|
||||
let deno_dir = DenoDir::new(Some(temp_dir.path().to_path_buf()))
|
||||
.expect("could not setup");
|
||||
|
||||
let file_fetcher = SourceFileFetcher::new(
|
||||
HttpCache::new(&temp_dir.path().to_path_buf().join("deps")),
|
||||
true,
|
||||
Vec::new(),
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
)
|
||||
.expect("could not setup");
|
||||
let disk_cache = deno_dir.gen_cache;
|
||||
|
||||
let fetch_handler = FetchHandler {
|
||||
disk_cache,
|
||||
file_fetcher,
|
||||
permissions: Permissions::allow_all(),
|
||||
};
|
||||
|
||||
(temp_dir, fetch_handler)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_handler_fetch() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let (_, mut file_fetcher) = setup();
|
||||
let specifier = ModuleSpecifier::resolve_url_or_path(
|
||||
"http://localhost:4545/cli/tests/subdir/mod2.ts",
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
assert_eq!(cached_module.emits.len(), 0);
|
||||
assert!(cached_module.maybe_dependencies.is_none());
|
||||
assert_eq!(cached_module.media_type, MediaType::TypeScript);
|
||||
assert_eq!(
|
||||
cached_module.source.to_str().unwrap(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(cached_module.specifier, specifier);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_handler_set_cache() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let (_, mut file_fetcher) = setup();
|
||||
let specifier = ModuleSpecifier::resolve_url_or_path(
|
||||
"http://localhost:4545/cli/tests/subdir/mod2.ts",
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
assert_eq!(cached_module.emits.len(), 0);
|
||||
let code = TextDocument::from("some code");
|
||||
file_fetcher
|
||||
.set_cache(&specifier, &EmitType::Cli, code, None)
|
||||
.expect("could not set cache");
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
assert_eq!(cached_module.emits.len(), 1);
|
||||
let actual_emit = cached_module.emits.get(&EmitType::Cli).unwrap();
|
||||
assert_eq!(actual_emit.0.to_str().unwrap(), "some code");
|
||||
assert_eq!(actual_emit.1, None);
|
||||
}
|
||||
}
|
4
cli/tests/module_graph/file_tests-main.ts
Normal file
4
cli/tests/module_graph/file_tests-main.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
// @deno-types="https://deno.land/x/lib/mod.d.ts"
|
||||
import * as lib from "https://deno.land/x/lib/mod.js";
|
||||
|
||||
console.log(lib);
|
1
cli/tests/module_graph/https_deno.land-x-a.ts
Normal file
1
cli/tests/module_graph/https_deno.land-x-a.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export const a = "hello";
|
4
cli/tests/module_graph/https_deno.land-x-import_map.ts
Normal file
4
cli/tests/module_graph/https_deno.land-x-import_map.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import * as $ from "jquery";
|
||||
import * as _ from "lodash";
|
||||
|
||||
console.log($, _);
|
3
cli/tests/module_graph/https_deno.land-x-jquery.js
Normal file
3
cli/tests/module_graph/https_deno.land-x-jquery.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
const $ = {};
|
||||
|
||||
export default $;
|
1
cli/tests/module_graph/https_deno.land-x-lib-a.ts
Normal file
1
cli/tests/module_graph/https_deno.land-x-lib-a.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export const a: string[] = [];
|
1
cli/tests/module_graph/https_deno.land-x-lib-b.js
Normal file
1
cli/tests/module_graph/https_deno.land-x-lib-b.js
Normal file
|
@ -0,0 +1 @@
|
|||
export const b = [];
|
1
cli/tests/module_graph/https_deno.land-x-lib-c.d.ts
vendored
Normal file
1
cli/tests/module_graph/https_deno.land-x-lib-c.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export const c: string[];
|
3
cli/tests/module_graph/https_deno.land-x-lib-c.js
Normal file
3
cli/tests/module_graph/https_deno.land-x-lib-c.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
/// <reference types="./c.d.ts" />
|
||||
|
||||
export const c = [];
|
9
cli/tests/module_graph/https_deno.land-x-lib-mod.d.ts
vendored
Normal file
9
cli/tests/module_graph/https_deno.land-x-lib-mod.d.ts
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
export * as a from "./a.ts";
|
||||
export * as b from "./b.js";
|
||||
export * as c from "./c.js";
|
||||
|
||||
export interface A {
|
||||
a: string;
|
||||
}
|
||||
|
||||
export const mod: A[];
|
5
cli/tests/module_graph/https_deno.land-x-lib-mod.js
Normal file
5
cli/tests/module_graph/https_deno.land-x-lib-mod.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
export * as a from "./a.ts";
|
||||
export * as b from "./b.js";
|
||||
export * as c from "./c.js";
|
||||
|
||||
export const mod = [];
|
3
cli/tests/module_graph/https_deno.land-x-mod.ts
Normal file
3
cli/tests/module_graph/https_deno.land-x-mod.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
import * as a from "./a.ts";
|
||||
|
||||
console.log(a);
|
5
cli/tests/module_graph/https_deno.land-x-transpile.tsx
Normal file
5
cli/tests/module_graph/https_deno.land-x-transpile.tsx
Normal file
|
@ -0,0 +1,5 @@
|
|||
export default class A {
|
||||
render() {
|
||||
return (<div>Hello world!</div>);
|
||||
}
|
||||
}
|
3
cli/tests/module_graph/https_unpkg.com-lodash-index.js
Normal file
3
cli/tests/module_graph/https_unpkg.com-lodash-index.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
const _ = {};
|
||||
|
||||
export default _;
|
8
cli/tests/module_graph/lockfile.json
Normal file
8
cli/tests/module_graph/lockfile.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"https://deno.land/x/lib/a.ts": "4437fee72a750d9540a9575ea6426761d0aa1beedfa308fb1bc38701d97011b8",
|
||||
"https://deno.land/x/lib/b.js": "093cc4164ca7a9adb11597ad291e021634f0b2d8c048137f7e9fb0d709499028",
|
||||
"https://deno.land/x/lib/c.d.ts": "a95647377477cc663559f5e857bf318c584622ed1295a8ccb0c091d06bee0456",
|
||||
"https://deno.land/x/lib/c.js": "4ff934f4b3b06f320c3130326376d9f2435e2ecedd582940ca90938137d004e1",
|
||||
"https://deno.land/x/lib/mod.d.ts": "e54b994fbf63cb7f01076ea54f2ed67b185f2a48e8ff71d74bd9c8180a338eb7",
|
||||
"https://deno.land/x/lib/mod.js": "3f6fcb8ef83ed6c66e71774d5079d14d22a6948dc6e5358ac30e0ab55e1a6404"
|
||||
}
|
8
cli/tests/module_graph/lockfile_fail.json
Normal file
8
cli/tests/module_graph/lockfile_fail.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"https://deno.land/x/lib/a.ts": "4437fee72a750d9540a9575ea6426761d0aa1beedfa308fb1bc38701d97011b8",
|
||||
"https://deno.land/x/lib/b.js": "093cc4164ca7a9adb11597ad291e021634f0b2d8c048137f7e9fb0d709499028",
|
||||
"https://deno.land/x/lib/c.d.ts": "a95647377477cc663559f5e857bf318c584622ed1295a8ccb0c091d06bee0456",
|
||||
"https://deno.land/x/lib/c.js": "4ff934f4b3b06f320c3130326376d9f2435e2ecedd582940ca90938137d004e1",
|
||||
"https://deno.land/x/lib/mod.d.ts": "e54b994fbf63cb7f01076ea54f2ed67b185f2a48e8ff71d74bd9c8180a338eb7",
|
||||
"https://deno.land/x/lib/mod.js": "3f6fcb8ef83fd6c66e71774d5079d14d22a6948dc6e5358ac30e0ab55e1a6404"
|
||||
}
|
159
cli/tsc.rs
159
cli/tsc.rs
|
@ -2,7 +2,6 @@
|
|||
|
||||
use crate::ast::parse;
|
||||
use crate::ast::Location;
|
||||
use crate::ast::TranspileOptions;
|
||||
use crate::colors;
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::disk_cache::DiskCache;
|
||||
|
@ -338,13 +337,6 @@ impl CompiledFileMetadata {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct TranspileSourceFile {
|
||||
pub source_code: String,
|
||||
pub file_name: String,
|
||||
}
|
||||
|
||||
/// Emit a SHA256 hash based on source code, deno version and TS config.
|
||||
/// Used to check if a recompilation for source code is needed.
|
||||
fn source_code_version_hash(
|
||||
|
@ -420,16 +412,6 @@ struct CompileResponse {
|
|||
stats: Option<Vec<Stat>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct TranspileTsOptions {
|
||||
check_js: bool,
|
||||
emit_decorator_metadata: bool,
|
||||
jsx: String,
|
||||
jsx_factory: String,
|
||||
jsx_fragment_factory: String,
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): possible deduplicate once TS refactor is stabilized
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -788,80 +770,6 @@ impl TsCompiler {
|
|||
Ok(output)
|
||||
}
|
||||
|
||||
pub async fn transpile(
|
||||
&self,
|
||||
module_graph: &ModuleGraph,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut source_files: Vec<TranspileSourceFile> = Vec::new();
|
||||
for (_, value) in module_graph.iter() {
|
||||
let url = Url::parse(&value.url).expect("Filename is not a valid url");
|
||||
if !value.url.ends_with(".d.ts")
|
||||
&& (!self.use_disk_cache || !self.has_compiled_source(&url))
|
||||
{
|
||||
source_files.push(TranspileSourceFile {
|
||||
source_code: value.source_code.clone(),
|
||||
file_name: value.url.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if source_files.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut emit_map = HashMap::new();
|
||||
|
||||
let mut compiler_options = json!({
|
||||
"checkJs": false,
|
||||
"emitDecoratorMetadata": false,
|
||||
"jsx": "react",
|
||||
"jsxFactory": "React.createElement",
|
||||
"jsxFragmentFactory": "React.Fragment",
|
||||
});
|
||||
|
||||
let compiler_config = self.config.clone();
|
||||
|
||||
tsc_config::json_merge(&mut compiler_options, &compiler_config.options);
|
||||
|
||||
warn_ignored_options(
|
||||
compiler_config.maybe_ignored_options,
|
||||
compiler_config.path.as_ref().unwrap(),
|
||||
);
|
||||
|
||||
let compiler_options: TranspileTsOptions =
|
||||
serde_json::from_value(compiler_options)?;
|
||||
|
||||
let transpile_options = TranspileOptions {
|
||||
emit_metadata: compiler_options.emit_decorator_metadata,
|
||||
inline_source_map: true,
|
||||
jsx_factory: compiler_options.jsx_factory,
|
||||
jsx_fragment_factory: compiler_options.jsx_fragment_factory,
|
||||
transform_jsx: compiler_options.jsx == "react",
|
||||
};
|
||||
let media_type = MediaType::TypeScript;
|
||||
for source_file in source_files {
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path(&source_file.file_name)?;
|
||||
let parsed_module =
|
||||
parse(&specifier, &source_file.source_code, &media_type)?;
|
||||
let (stripped_source, _) = parsed_module.transpile(&transpile_options)?;
|
||||
|
||||
// TODO(bartlomieju): this is superfluous, just to make caching function happy
|
||||
let emitted_filename = PathBuf::from(&source_file.file_name)
|
||||
.with_extension("js")
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let emitted_source = EmittedSource {
|
||||
filename: source_file.file_name.to_string(),
|
||||
contents: stripped_source,
|
||||
};
|
||||
|
||||
emit_map.insert(emitted_filename, emitted_source);
|
||||
}
|
||||
|
||||
self.cache_emitted_files(emit_map)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
||||
fn get_metadata(&self, url: &Url) -> Option<CompiledFileMetadata> {
|
||||
// Try to load cached version:
|
||||
|
@ -1742,73 +1650,6 @@ mod tests {
|
|||
.starts_with("//# sourceMappingURL=data:application/json;base64"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transpile() {
|
||||
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("cli/tests/002_hello.ts");
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
|
||||
let out = SourceFile {
|
||||
url: specifier.as_url().clone(),
|
||||
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: include_bytes!("./tests/002_hello.ts").to_vec().into(),
|
||||
types_header: None,
|
||||
};
|
||||
let dir =
|
||||
deno_dir::DenoDir::new(Some(test_util::new_deno_dir().path().to_owned()))
|
||||
.unwrap();
|
||||
let http_cache = http_cache::HttpCache::new(&dir.root.join("deps"));
|
||||
let mock_state = GlobalState::mock(
|
||||
vec![String::from("deno"), String::from("hello.ts")],
|
||||
None,
|
||||
);
|
||||
let file_fetcher = SourceFileFetcher::new(
|
||||
http_cache,
|
||||
true,
|
||||
mock_state.flags.cache_blocklist.clone(),
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
file_fetcher.clone(),
|
||||
None,
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&specifier, None)
|
||||
.await
|
||||
.expect("Failed to create graph");
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let ts_compiler = TsCompiler::new(
|
||||
file_fetcher,
|
||||
mock_state.flags.clone(),
|
||||
dir.gen_cache.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = ts_compiler.transpile(&module_graph).await;
|
||||
assert!(result.is_ok());
|
||||
let compiled_file = ts_compiler.get_compiled_module(&out.url).unwrap();
|
||||
let source_code = compiled_file.code;
|
||||
assert!(source_code
|
||||
.as_bytes()
|
||||
.starts_with(b"console.log(\"Hello World\");"));
|
||||
let mut lines: Vec<String> =
|
||||
source_code.split('\n').map(|s| s.to_string()).collect();
|
||||
let last_line = lines.pop().unwrap();
|
||||
assert!(last_line
|
||||
.starts_with("//# sourceMappingURL=data:application/json;base64"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bundle() {
|
||||
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
|
|
|
@ -16,9 +16,8 @@ impl fmt::Display for IgnoredCompilerOptions {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut codes = self.0.clone();
|
||||
codes.sort();
|
||||
write!(f, "{}", codes.join(", "))?;
|
||||
|
||||
Ok(())
|
||||
write!(f, "{}", codes.join(", "))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue