mirror of
https://github.com/denoland/deno.git
synced 2025-03-03 17:34:47 -05:00
fix(cli): don't store blob and data urls in the module cache (#18261)
This commit is contained in:
parent
8b596cbae1
commit
b4c61c146a
7 changed files with 49 additions and 84 deletions
|
@ -50,10 +50,6 @@ pub const SUPPORTED_SCHEMES: [&str; 5] =
|
||||||
/// A structure representing a source file.
|
/// A structure representing a source file.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct File {
|
pub struct File {
|
||||||
/// The path to the local version of the source file. For local files this
|
|
||||||
/// will be the direct path to that file. For remote files, it will be the
|
|
||||||
/// path to the file in the HTTP cache.
|
|
||||||
pub local: PathBuf,
|
|
||||||
/// For remote files, if there was an `X-TypeScript-Type` header, the parsed
|
/// For remote files, if there was an `X-TypeScript-Type` header, the parsed
|
||||||
/// out value of that header.
|
/// out value of that header.
|
||||||
pub maybe_types: Option<String>,
|
pub maybe_types: Option<String>,
|
||||||
|
@ -90,13 +86,12 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
|
||||||
let local = specifier.to_file_path().map_err(|_| {
|
let local = specifier.to_file_path().map_err(|_| {
|
||||||
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
|
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
|
||||||
})?;
|
})?;
|
||||||
let bytes = fs::read(&local)?;
|
let bytes = fs::read(local)?;
|
||||||
let charset = text_encoding::detect_charset(&bytes).to_string();
|
let charset = text_encoding::detect_charset(&bytes).to_string();
|
||||||
let source = get_source_from_bytes(bytes, Some(charset))?;
|
let source = get_source_from_bytes(bytes, Some(charset))?;
|
||||||
let media_type = MediaType::from_specifier(specifier);
|
let media_type = MediaType::from_specifier(specifier);
|
||||||
|
|
||||||
Ok(File {
|
Ok(File {
|
||||||
local,
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type,
|
media_type,
|
||||||
source: source.into(),
|
source: source.into(),
|
||||||
|
@ -218,13 +213,6 @@ impl FileFetcher {
|
||||||
bytes: Vec<u8>,
|
bytes: Vec<u8>,
|
||||||
headers: &HashMap<String, String>,
|
headers: &HashMap<String, String>,
|
||||||
) -> Result<File, AnyError> {
|
) -> Result<File, AnyError> {
|
||||||
let local =
|
|
||||||
self
|
|
||||||
.http_cache
|
|
||||||
.get_cache_filename(specifier)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
generic_error("Cannot convert specifier to cached filename.")
|
|
||||||
})?;
|
|
||||||
let maybe_content_type = headers.get("content-type");
|
let maybe_content_type = headers.get("content-type");
|
||||||
let (media_type, maybe_charset) =
|
let (media_type, maybe_charset) =
|
||||||
map_content_type(specifier, maybe_content_type);
|
map_content_type(specifier, maybe_content_type);
|
||||||
|
@ -238,7 +226,6 @@ impl FileFetcher {
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(File {
|
Ok(File {
|
||||||
local,
|
|
||||||
maybe_types,
|
maybe_types,
|
||||||
media_type,
|
media_type,
|
||||||
source: source.into(),
|
source: source.into(),
|
||||||
|
@ -290,39 +277,12 @@ impl FileFetcher {
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> Result<File, AnyError> {
|
) -> Result<File, AnyError> {
|
||||||
debug!("FileFetcher::fetch_data_url() - specifier: {}", specifier);
|
debug!("FileFetcher::fetch_data_url() - specifier: {}", specifier);
|
||||||
match self.fetch_cached(specifier, 0) {
|
|
||||||
Ok(Some(file)) => return Ok(file),
|
|
||||||
Ok(None) => {}
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.cache_setting == CacheSetting::Only {
|
|
||||||
return Err(custom_error(
|
|
||||||
"NotCached",
|
|
||||||
format!(
|
|
||||||
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
|
|
||||||
),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (source, content_type) = get_source_from_data_url(specifier)?;
|
let (source, content_type) = get_source_from_data_url(specifier)?;
|
||||||
let (media_type, _) = map_content_type(specifier, Some(&content_type));
|
let (media_type, _) = map_content_type(specifier, Some(&content_type));
|
||||||
|
|
||||||
let local =
|
|
||||||
self
|
|
||||||
.http_cache
|
|
||||||
.get_cache_filename(specifier)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
generic_error("Cannot convert specifier to cached filename.")
|
|
||||||
})?;
|
|
||||||
let mut headers = HashMap::new();
|
let mut headers = HashMap::new();
|
||||||
headers.insert("content-type".to_string(), content_type);
|
headers.insert("content-type".to_string(), content_type);
|
||||||
self
|
|
||||||
.http_cache
|
|
||||||
.set(specifier, headers.clone(), source.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(File {
|
Ok(File {
|
||||||
local,
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type,
|
media_type,
|
||||||
source: source.into(),
|
source: source.into(),
|
||||||
|
@ -337,21 +297,6 @@ impl FileFetcher {
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> Result<File, AnyError> {
|
) -> Result<File, AnyError> {
|
||||||
debug!("FileFetcher::fetch_blob_url() - specifier: {}", specifier);
|
debug!("FileFetcher::fetch_blob_url() - specifier: {}", specifier);
|
||||||
match self.fetch_cached(specifier, 0) {
|
|
||||||
Ok(Some(file)) => return Ok(file),
|
|
||||||
Ok(None) => {}
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.cache_setting == CacheSetting::Only {
|
|
||||||
return Err(custom_error(
|
|
||||||
"NotCached",
|
|
||||||
format!(
|
|
||||||
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
|
|
||||||
),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let blob = {
|
let blob = {
|
||||||
let blob_store = self.blob_store.borrow();
|
let blob_store = self.blob_store.borrow();
|
||||||
blob_store
|
blob_store
|
||||||
|
@ -370,22 +315,10 @@ impl FileFetcher {
|
||||||
let (media_type, maybe_charset) =
|
let (media_type, maybe_charset) =
|
||||||
map_content_type(specifier, Some(&content_type));
|
map_content_type(specifier, Some(&content_type));
|
||||||
let source = get_source_from_bytes(bytes, maybe_charset)?;
|
let source = get_source_from_bytes(bytes, maybe_charset)?;
|
||||||
|
|
||||||
let local =
|
|
||||||
self
|
|
||||||
.http_cache
|
|
||||||
.get_cache_filename(specifier)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
generic_error("Cannot convert specifier to cached filename.")
|
|
||||||
})?;
|
|
||||||
let mut headers = HashMap::new();
|
let mut headers = HashMap::new();
|
||||||
headers.insert("content-type".to_string(), content_type);
|
headers.insert("content-type".to_string(), content_type);
|
||||||
self
|
|
||||||
.http_cache
|
|
||||||
.set(specifier, headers.clone(), source.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(File {
|
Ok(File {
|
||||||
local,
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type,
|
media_type,
|
||||||
source: source.into(),
|
source: source.into(),
|
||||||
|
@ -562,17 +495,9 @@ impl FileFetcher {
|
||||||
// disk changing effecting things like workers and dynamic imports.
|
// disk changing effecting things like workers and dynamic imports.
|
||||||
fetch_local(specifier)
|
fetch_local(specifier)
|
||||||
} else if scheme == "data" {
|
} else if scheme == "data" {
|
||||||
let result = self.fetch_data_url(specifier);
|
self.fetch_data_url(specifier)
|
||||||
if let Ok(file) = &result {
|
|
||||||
self.cache.insert(specifier.clone(), file.clone());
|
|
||||||
}
|
|
||||||
result
|
|
||||||
} else if scheme == "blob" {
|
} else if scheme == "blob" {
|
||||||
let result = self.fetch_blob_url(specifier).await;
|
self.fetch_blob_url(specifier).await
|
||||||
if let Ok(file) = &result {
|
|
||||||
self.cache.insert(specifier.clone(), file.clone());
|
|
||||||
}
|
|
||||||
result
|
|
||||||
} else if !self.allow_remote {
|
} else if !self.allow_remote {
|
||||||
Err(custom_error(
|
Err(custom_error(
|
||||||
"NoRemote",
|
"NoRemote",
|
||||||
|
@ -1037,7 +962,6 @@ mod tests {
|
||||||
ModuleSpecifier::from_file_path(local.as_os_str().to_str().unwrap())
|
ModuleSpecifier::from_file_path(local.as_os_str().to_str().unwrap())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let file = File {
|
let file = File {
|
||||||
local,
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type: MediaType::TypeScript,
|
media_type: MediaType::TypeScript,
|
||||||
source: "some source code".into(),
|
source: "some source code".into(),
|
||||||
|
|
|
@ -136,6 +136,7 @@ impl ProcState {
|
||||||
/// Reset all runtime state to its default. This should be used on file
|
/// Reset all runtime state to its default. This should be used on file
|
||||||
/// watcher restarts.
|
/// watcher restarts.
|
||||||
pub fn reset_for_file_watcher(&mut self) {
|
pub fn reset_for_file_watcher(&mut self) {
|
||||||
|
self.blob_store.clear();
|
||||||
self.0 = Arc::new(Inner {
|
self.0 = Arc::new(Inner {
|
||||||
dir: self.dir.clone(),
|
dir: self.dir.clone(),
|
||||||
options: self.options.clone(),
|
options: self.options.clone(),
|
||||||
|
|
|
@ -1121,6 +1121,46 @@ fn test_watch_unload_handler_error_on_drop() {
|
||||||
check_alive_then_kill(child);
|
check_alive_then_kill(child);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn run_watch_blob_urls_reset() {
|
||||||
|
let _g = util::http_server();
|
||||||
|
let t = TempDir::new();
|
||||||
|
let file_to_watch = t.path().join("file_to_watch.js");
|
||||||
|
let file_content = r#"
|
||||||
|
const prevUrl = localStorage.getItem("url");
|
||||||
|
if (prevUrl == null) {
|
||||||
|
console.log("first run, storing blob url");
|
||||||
|
const url = URL.createObjectURL(
|
||||||
|
new Blob(["export {}"], { type: "application/javascript" }),
|
||||||
|
);
|
||||||
|
await import(url); // this shouldn't insert into the fs module cache
|
||||||
|
localStorage.setItem("url", url);
|
||||||
|
} else {
|
||||||
|
await import(prevUrl)
|
||||||
|
.then(() => console.log("importing old blob url incorrectly works"))
|
||||||
|
.catch(() => console.log("importing old blob url correctly failed"));
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
write(&file_to_watch, file_content).unwrap();
|
||||||
|
let mut child = util::deno_cmd()
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--watch")
|
||||||
|
.arg(&file_to_watch)
|
||||||
|
.env("NO_COLOR", "1")
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||||
|
wait_contains("first run, storing blob url", &mut stdout_lines);
|
||||||
|
wait_contains("finished", &mut stderr_lines);
|
||||||
|
write(&file_to_watch, file_content).unwrap();
|
||||||
|
wait_contains("importing old blob url correctly failed", &mut stdout_lines);
|
||||||
|
wait_contains("finished", &mut stderr_lines);
|
||||||
|
check_alive_then_kill(child);
|
||||||
|
}
|
||||||
|
|
||||||
// Regression test for https://github.com/denoland/deno/issues/15465.
|
// Regression test for https://github.com/denoland/deno/issues/15465.
|
||||||
#[test]
|
#[test]
|
||||||
fn run_watch_reload_once() {
|
fn run_watch_reload_once() {
|
||||||
|
|
|
@ -17,7 +17,6 @@ use deno_core::resolve_path;
|
||||||
use deno_core::resolve_url_or_path;
|
use deno_core::resolve_url_or_path;
|
||||||
use deno_doc as doc;
|
use deno_doc as doc;
|
||||||
use deno_graph::ModuleSpecifier;
|
use deno_graph::ModuleSpecifier;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
pub async fn print_docs(
|
pub async fn print_docs(
|
||||||
flags: Flags,
|
flags: Flags,
|
||||||
|
@ -69,7 +68,6 @@ pub async fn print_docs(
|
||||||
let root_specifier =
|
let root_specifier =
|
||||||
resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap();
|
resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap();
|
||||||
let root = File {
|
let root = File {
|
||||||
local: PathBuf::from("./$deno$doc.ts"),
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type: MediaType::TypeScript,
|
media_type: MediaType::TypeScript,
|
||||||
source: format!("export * from \"{module_specifier}\";").into(),
|
source: format!("export * from \"{module_specifier}\";").into(),
|
||||||
|
|
|
@ -72,7 +72,6 @@ pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
|
||||||
std::io::stdin().read_to_end(&mut source)?;
|
std::io::stdin().read_to_end(&mut source)?;
|
||||||
// Create a dummy source file.
|
// Create a dummy source file.
|
||||||
let source_file = File {
|
let source_file = File {
|
||||||
local: main_module.clone().to_file_path().unwrap(),
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type: MediaType::TypeScript,
|
media_type: MediaType::TypeScript,
|
||||||
source: String::from_utf8(source)?.into(),
|
source: String::from_utf8(source)?.into(),
|
||||||
|
@ -144,7 +143,6 @@ pub async fn eval_command(
|
||||||
.into_bytes();
|
.into_bytes();
|
||||||
|
|
||||||
let file = File {
|
let file = File {
|
||||||
local: main_module.clone().to_file_path().unwrap(),
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type: MediaType::Unknown,
|
media_type: MediaType::Unknown,
|
||||||
source: String::from_utf8(source_code)?.into(),
|
source: String::from_utf8(source_code)?.into(),
|
||||||
|
|
|
@ -1000,7 +1000,6 @@ fn extract_files_from_regex_blocks(
|
||||||
.unwrap_or(file_specifier);
|
.unwrap_or(file_specifier);
|
||||||
|
|
||||||
Some(File {
|
Some(File {
|
||||||
local: file_specifier.to_file_path().unwrap(),
|
|
||||||
maybe_types: None,
|
maybe_types: None,
|
||||||
media_type: file_media_type,
|
media_type: file_media_type,
|
||||||
source: file_source.into(),
|
source: file_source.into(),
|
||||||
|
|
|
@ -79,6 +79,11 @@ impl BlobStore {
|
||||||
let mut blob_store = self.object_urls.lock();
|
let mut blob_store = self.object_urls.lock();
|
||||||
blob_store.remove(url);
|
blob_store.remove(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn clear(&self) {
|
||||||
|
self.parts.lock().clear();
|
||||||
|
self.object_urls.lock().clear();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
Loading…
Add table
Reference in a new issue