mirror of
https://github.com/denoland/deno.git
synced 2025-01-21 21:50:00 -05:00
BREAKING: remove support for JSON imports (#5037)
This commit removes support for importing JSON files as modules. This change is dictated by security; browsers rolled back on this support as well.
This commit is contained in:
parent
96fd0f4692
commit
de2c042482
11 changed files with 15 additions and 137 deletions
|
@ -1,50 +0,0 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
use crate::compilers::CompiledModule;
|
||||
use crate::file_fetcher::SourceFile;
|
||||
use deno_core::ErrBox;
|
||||
use regex::Regex;
|
||||
|
||||
// From https://github.com/mathiasbynens/mothereff.in/blob/master/js-variables/eff.js
|
||||
static JS_RESERVED_WORDS: &str = r"^(?:do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|await|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)$";
|
||||
|
||||
pub struct JsonCompiler {}
|
||||
|
||||
impl JsonCompiler {
|
||||
pub async fn compile(
|
||||
&self,
|
||||
source_file: &SourceFile,
|
||||
) -> Result<CompiledModule, ErrBox> {
|
||||
let maybe_json_value = serde_json::from_slice(&source_file.source_code);
|
||||
if let Err(err) = maybe_json_value {
|
||||
return Err(ErrBox::from(err));
|
||||
}
|
||||
|
||||
let mut code = format!(
|
||||
"export default {};\n",
|
||||
std::str::from_utf8(&source_file.source_code).unwrap()
|
||||
);
|
||||
|
||||
if let serde_json::Value::Object(m) = maybe_json_value.unwrap() {
|
||||
// Best effort variable name exports
|
||||
// Actual all allowed JS variable names are way tricker.
|
||||
// We only handle a subset of alphanumeric names.
|
||||
let js_var_regex = Regex::new(r"^[a-zA-Z_$][0-9a-zA-Z_$]*$").unwrap();
|
||||
// Also avoid collision with reserved words.
|
||||
let reserved_words = Regex::new(JS_RESERVED_WORDS).unwrap();
|
||||
for (key, value) in m.iter() {
|
||||
if js_var_regex.is_match(&key) && !reserved_words.is_match(&key) {
|
||||
code.push_str(&format!(
|
||||
"export const {} = {};\n",
|
||||
key,
|
||||
value.to_string()
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(CompiledModule {
|
||||
code,
|
||||
name: source_file.url.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
|
@ -5,12 +5,10 @@ use futures::Future;
|
|||
|
||||
mod compiler_worker;
|
||||
mod js;
|
||||
mod json;
|
||||
mod ts;
|
||||
mod wasm;
|
||||
|
||||
pub use js::JsCompiler;
|
||||
pub use json::JsonCompiler;
|
||||
pub use ts::runtime_compile;
|
||||
pub use ts::runtime_transpile;
|
||||
pub use ts::TargetLib;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
use crate::compilers::CompiledModule;
|
||||
use crate::compilers::JsCompiler;
|
||||
use crate::compilers::JsonCompiler;
|
||||
use crate::compilers::TargetLib;
|
||||
use crate::compilers::TsCompiler;
|
||||
use crate::compilers::WasmCompiler;
|
||||
|
@ -36,7 +35,6 @@ pub struct GlobalStateInner {
|
|||
pub dir: deno_dir::DenoDir,
|
||||
pub file_fetcher: SourceFileFetcher,
|
||||
pub js_compiler: JsCompiler,
|
||||
pub json_compiler: JsonCompiler,
|
||||
pub ts_compiler: TsCompiler,
|
||||
pub wasm_compiler: WasmCompiler,
|
||||
pub lockfile: Option<Mutex<Lockfile>>,
|
||||
|
@ -88,7 +86,6 @@ impl GlobalState {
|
|||
file_fetcher,
|
||||
ts_compiler,
|
||||
js_compiler: JsCompiler {},
|
||||
json_compiler: JsonCompiler {},
|
||||
wasm_compiler: WasmCompiler::default(),
|
||||
lockfile,
|
||||
compiler_starts: AtomicUsize::new(0),
|
||||
|
@ -118,8 +115,9 @@ impl GlobalState {
|
|||
let compile_lock = self.compile_lock.lock().await;
|
||||
|
||||
let compiled_module = match out.media_type {
|
||||
msg::MediaType::Unknown => state1.js_compiler.compile(out).await,
|
||||
msg::MediaType::Json => state1.json_compiler.compile(&out).await,
|
||||
msg::MediaType::Json | msg::MediaType::Unknown => {
|
||||
state1.js_compiler.compile(out).await
|
||||
}
|
||||
msg::MediaType::Wasm => {
|
||||
state1.wasm_compiler.compile(state1.clone(), &out).await
|
||||
}
|
||||
|
|
|
@ -83,8 +83,6 @@ function getMediaType(filename: string): MediaType {
|
|||
return MediaType.JavaScript;
|
||||
case "jsx":
|
||||
return MediaType.JSX;
|
||||
case "json":
|
||||
return MediaType.Json;
|
||||
case "ts":
|
||||
return MediaType.TypeScript;
|
||||
case "tsx":
|
||||
|
|
|
@ -34,11 +34,6 @@ function getExtension(fileName: string, mediaType: MediaType): ts.Extension {
|
|||
return fileName.endsWith(".d.ts") ? ts.Extension.Dts : ts.Extension.Ts;
|
||||
case MediaType.TSX:
|
||||
return ts.Extension.Tsx;
|
||||
case MediaType.Json:
|
||||
// we internally compile JSON, so what gets provided to the TypeScript
|
||||
// compiler is an ES module, but in order to get TypeScript to handle it
|
||||
// properly we have to pretend it is TS.
|
||||
return ts.Extension.Ts;
|
||||
case MediaType.Wasm:
|
||||
// Custom marker for Wasm type.
|
||||
return ts.Extension.Js;
|
||||
|
|
|
@ -48,13 +48,6 @@ function cache(
|
|||
const sf = SourceFile.get(moduleId);
|
||||
|
||||
if (sf) {
|
||||
// NOTE: If it's a `.json` file we don't want to write it to disk.
|
||||
// JSON files are loaded and used by TS compiler to check types, but we don't want
|
||||
// to emit them to disk because output file is the same as input file.
|
||||
if (sf.mediaType === MediaType.Json) {
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
||||
// option in on
|
||||
if (sf.mediaType === MediaType.JavaScript && !checkJs) {
|
||||
|
@ -65,10 +58,7 @@ function cache(
|
|||
if (emittedFileName.endsWith(".map")) {
|
||||
// Source Map
|
||||
compilerOps.cache(".map", moduleId, contents);
|
||||
} else if (
|
||||
emittedFileName.endsWith(".js") ||
|
||||
emittedFileName.endsWith(".json")
|
||||
) {
|
||||
} else if (emittedFileName.endsWith(".js")) {
|
||||
// Compiled JavaScript
|
||||
compilerOps.cache(".js", moduleId, contents);
|
||||
} else {
|
||||
|
|
|
@ -149,14 +149,6 @@ fn op_fetch_source_files(
|
|||
.map_err(|e| OpError::other(e.to_string()))?
|
||||
.code
|
||||
}
|
||||
msg::MediaType::Json => {
|
||||
global_state
|
||||
.json_compiler
|
||||
.compile(&file)
|
||||
.await
|
||||
.map_err(|e| OpError::other(e.to_string()))?
|
||||
.code
|
||||
}
|
||||
_ => String::from_utf8(file.source_code)
|
||||
.map_err(|_| OpError::invalid_utf8())?,
|
||||
};
|
||||
|
|
|
@ -1 +1,9 @@
|
|||
{"foo":{"bar":true,"baz":["qat",1]}}
|
||||
[WILDCARD]
|
||||
error: Uncaught TypeError: Cannot resolve extension for "[WILDCARD]config.json" with mediaType "Json".
|
||||
at getExtension ($deno$/compiler/sourcefile.ts:[WILDCARD])
|
||||
at new SourceFile ($deno$/compiler/sourcefile.ts:[WILDCARD])
|
||||
at processImports ($deno$/compiler/imports.ts:[WILDCARD])
|
||||
at async Object.processImports ($deno$/compiler/imports.ts:[WILDCARD])
|
||||
at async compile ([WILDCARD]compiler.ts:[WILDCARD])
|
||||
at async tsCompilerOnMessage ([WILDCARD]compiler.ts:[WILDCARD])
|
||||
at async workerMessageRecvCallback ($deno$/runtime_worker.ts:[WILDCARD])
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
import j1, { $var } from "./subdir/json_1.json";
|
||||
import j2 from "./subdir/json_2.json";
|
||||
console.log($var);
|
||||
console.log($var.a);
|
||||
console.log(j1);
|
||||
console.log(j1["with space"]);
|
||||
console.log(j2);
|
|
@ -1,9 +0,0 @@
|
|||
{ a: 123, b: [ 1, 2, 3 ], c: null }
|
||||
123
|
||||
{
|
||||
$var: { a: 123, b: [ 1, 2, 3 ], c: null },
|
||||
with space: "invalid variable name",
|
||||
function: "reserved word"
|
||||
}
|
||||
invalid variable name
|
||||
just a string
|
|
@ -395,38 +395,6 @@ fn bundle_single_module() {
|
|||
assert_eq!(output.stderr, b"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bundle_json() {
|
||||
let json_modules = util::root_path().join("cli/tests/020_json_modules.ts");
|
||||
assert!(json_modules.is_file());
|
||||
let t = TempDir::new().expect("tempdir fail");
|
||||
let bundle = t.path().join("020_json_modules.bundle.js");
|
||||
let mut deno = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.arg("bundle")
|
||||
.arg(json_modules)
|
||||
.arg(&bundle)
|
||||
.spawn()
|
||||
.expect("failed to spawn script");
|
||||
let status = deno.wait().expect("failed to wait for the child process");
|
||||
assert!(status.success());
|
||||
assert!(bundle.is_file());
|
||||
|
||||
let output = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.arg("run")
|
||||
.arg("--reload")
|
||||
.arg(&bundle)
|
||||
.output()
|
||||
.expect("failed to spawn script");
|
||||
// check the output of the the bundle program.
|
||||
assert!(std::str::from_utf8(&output.stdout)
|
||||
.unwrap()
|
||||
.trim()
|
||||
.ends_with("{\"foo\":{\"bar\":true,\"baz\":[\"qat\",1]}}"));
|
||||
assert_eq!(output.stderr, b"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bundle_tla() {
|
||||
// First we have to generate a bundle of some module that has exports.
|
||||
|
@ -927,7 +895,9 @@ itest_ignore!(_019_media_types {
|
|||
|
||||
itest!(_020_json_modules {
|
||||
args: "run --reload 020_json_modules.ts",
|
||||
check_stderr: true,
|
||||
output: "020_json_modules.ts.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(_021_mjs_modules {
|
||||
|
@ -1127,11 +1097,6 @@ itest_ignore!(_049_info_flag_script_jsx {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(_050_more_jsons {
|
||||
args: "run --reload 050_more_jsons.ts",
|
||||
output: "050_more_jsons.ts.out",
|
||||
});
|
||||
|
||||
itest!(_051_wasm_import {
|
||||
args: "run --reload --allow-net --allow-read 051_wasm_import.ts",
|
||||
output: "051_wasm_import.ts.out",
|
||||
|
|
Loading…
Add table
Reference in a new issue