1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge remote-tracking branch 'upstream/main' into check-workspace-member-compiler-options

This commit is contained in:
Nayeem Rahman 2024-12-17 16:30:12 +00:00
commit c7f28998d9
77 changed files with 1442 additions and 2542 deletions

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 30;
const cacheVersion = 31;
const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl";

View file

@ -184,8 +184,8 @@ jobs:
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git/db
key: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '30-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
key: '31-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '31-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)'
@ -379,7 +379,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '31-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -689,7 +689,7 @@ jobs:
!./target/*/gn_root
!./target/*/*.zip
!./target/*/*.tar.gz
key: '30-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '31-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

99
Cargo.lock generated
View file

@ -596,9 +596,9 @@ dependencies = [
[[package]]
name = "boxed_error"
version = "0.2.2"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69aae56aaf59d1994b902ed5c0c79024012bdc2426741def75a635999a030e7e"
checksum = "17d4f95e880cfd28c4ca5a006cf7f6af52b4bcb7b5866f573b2faa126fb7affb"
dependencies = [
"quote",
"syn 2.0.87",
@ -670,9 +670,9 @@ checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee"
[[package]]
name = "capacity_builder"
version = "0.1.0"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2c0f637033edd76ceb881faaee372868a383f0ed7a4a59e8fdf90db2502f3d3"
checksum = "58ec49028cb308564429cd8fac4ef21290067a0afe8f5955330a8d487d0d790c"
dependencies = [
"itoa",
]
@ -1193,9 +1193,9 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]]
name = "data-url"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41b319d1b62ffbd002e057f36bebd1f42b9f97927c9577461d855f3513c4289f"
checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"
[[package]]
name = "debug-ignore"
@ -1221,8 +1221,10 @@ dependencies = [
"async-trait",
"base64 0.21.7",
"bincode",
"boxed_error",
"bytes",
"cache_control",
"capacity_builder",
"chrono",
"clap",
"clap_complete",
@ -1237,6 +1239,7 @@ dependencies = [
"deno_config",
"deno_core",
"deno_doc",
"deno_error",
"deno_graph",
"deno_lint",
"deno_lockfile",
@ -1388,7 +1391,7 @@ dependencies = [
[[package]]
name = "deno_bench_util"
version = "0.176.0"
version = "0.177.0"
dependencies = [
"bencher",
"deno_core",
@ -1397,7 +1400,7 @@ dependencies = [
[[package]]
name = "deno_broadcast_channel"
version = "0.176.0"
version = "0.177.0"
dependencies = [
"async-trait",
"deno_core",
@ -1408,7 +1411,7 @@ dependencies = [
[[package]]
name = "deno_cache"
version = "0.114.0"
version = "0.115.0"
dependencies = [
"async-trait",
"deno_core",
@ -1421,13 +1424,21 @@ dependencies = [
[[package]]
name = "deno_cache_dir"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cca43605c8cbce6c6787e0daf227864487c07c2b31d438c0bf43d1b38da94b7f"
checksum = "54df1c5177ace01d92b872584ab9af8290681bb150fd9b423c37a494ad5ddbdc"
dependencies = [
"async-trait",
"base32",
"base64 0.21.7",
"boxed_error",
"cache_control",
"chrono",
"data-url",
"deno_error",
"deno_media_type",
"deno_path_util",
"http 1.1.0",
"indexmap 2.3.0",
"log",
"once_cell",
@ -1441,7 +1452,7 @@ dependencies = [
[[package]]
name = "deno_canvas"
version = "0.51.0"
version = "0.52.0"
dependencies = [
"deno_core",
"deno_webgpu",
@ -1475,7 +1486,7 @@ dependencies = [
[[package]]
name = "deno_console"
version = "0.182.0"
version = "0.183.0"
dependencies = [
"deno_core",
]
@ -1524,7 +1535,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
[[package]]
name = "deno_cron"
version = "0.62.0"
version = "0.63.0"
dependencies = [
"anyhow",
"async-trait",
@ -1537,7 +1548,7 @@ dependencies = [
[[package]]
name = "deno_crypto"
version = "0.196.0"
version = "0.197.0"
dependencies = [
"aes",
"aes-gcm",
@ -1611,6 +1622,7 @@ dependencies = [
"libc",
"serde",
"serde_json",
"url",
]
[[package]]
@ -1626,7 +1638,7 @@ dependencies = [
[[package]]
name = "deno_fetch"
version = "0.206.0"
version = "0.207.0"
dependencies = [
"base64 0.21.7",
"bytes",
@ -1661,7 +1673,7 @@ dependencies = [
[[package]]
name = "deno_ffi"
version = "0.169.0"
version = "0.170.0"
dependencies = [
"deno_core",
"deno_permissions",
@ -1681,7 +1693,7 @@ dependencies = [
[[package]]
name = "deno_fs"
version = "0.92.0"
version = "0.93.0"
dependencies = [
"async-trait",
"base32",
@ -1734,7 +1746,7 @@ dependencies = [
[[package]]
name = "deno_http"
version = "0.180.0"
version = "0.181.0"
dependencies = [
"async-compression",
"async-trait",
@ -1773,7 +1785,7 @@ dependencies = [
[[package]]
name = "deno_io"
version = "0.92.0"
version = "0.93.0"
dependencies = [
"async-trait",
"deno_core",
@ -1794,7 +1806,7 @@ dependencies = [
[[package]]
name = "deno_kv"
version = "0.90.0"
version = "0.91.0"
dependencies = [
"anyhow",
"async-trait",
@ -1856,9 +1868,9 @@ dependencies = [
[[package]]
name = "deno_media_type"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcf552fbdedbe81c89705349d7d2485c7051382b000dfddbdbf7fc25931cf83"
checksum = "eaa135b8a9febc9a51c16258e294e268a1276750780d69e46edb31cced2826e4"
dependencies = [
"data-url",
"serde",
@ -1867,7 +1879,7 @@ dependencies = [
[[package]]
name = "deno_napi"
version = "0.113.0"
version = "0.114.0"
dependencies = [
"deno_core",
"deno_permissions",
@ -1895,7 +1907,7 @@ dependencies = [
[[package]]
name = "deno_net"
version = "0.174.0"
version = "0.175.0"
dependencies = [
"deno_core",
"deno_permissions",
@ -1912,7 +1924,7 @@ dependencies = [
[[package]]
name = "deno_node"
version = "0.119.0"
version = "0.120.0"
dependencies = [
"aead-gcm-stream",
"aes",
@ -2023,7 +2035,7 @@ dependencies = [
[[package]]
name = "deno_npm_cache"
version = "0.2.0"
version = "0.3.0"
dependencies = [
"anyhow",
"async-trait",
@ -2084,18 +2096,19 @@ dependencies = [
[[package]]
name = "deno_path_util"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff25f6e08e7a0214bbacdd6f7195c7f1ebcd850c87a624e4ff06326b68b42d99"
checksum = "b02c7d341e1b2cf089daff0f4fb2b4be8f3b5511b1d96040b3f7ed63a66c737b"
dependencies = [
"deno_error",
"percent-encoding",
"thiserror 1.0.64",
"thiserror 2.0.3",
"url",
]
[[package]]
name = "deno_permissions"
version = "0.42.0"
version = "0.43.0"
dependencies = [
"capacity_builder",
"deno_core",
@ -2114,7 +2127,7 @@ dependencies = [
[[package]]
name = "deno_resolver"
version = "0.14.0"
version = "0.15.0"
dependencies = [
"anyhow",
"base32",
@ -2133,7 +2146,7 @@ dependencies = [
[[package]]
name = "deno_runtime"
version = "0.191.0"
version = "0.192.0"
dependencies = [
"color-print",
"deno_ast",
@ -2234,7 +2247,7 @@ dependencies = [
[[package]]
name = "deno_telemetry"
version = "0.4.0"
version = "0.5.0"
dependencies = [
"async-trait",
"deno_core",
@ -2275,7 +2288,7 @@ dependencies = [
[[package]]
name = "deno_tls"
version = "0.169.0"
version = "0.170.0"
dependencies = [
"deno_core",
"deno_native_certs",
@ -2325,7 +2338,7 @@ dependencies = [
[[package]]
name = "deno_url"
version = "0.182.0"
version = "0.183.0"
dependencies = [
"deno_bench_util",
"deno_console",
@ -2337,7 +2350,7 @@ dependencies = [
[[package]]
name = "deno_web"
version = "0.213.0"
version = "0.214.0"
dependencies = [
"async-trait",
"base64-simd 0.8.0",
@ -2359,7 +2372,7 @@ dependencies = [
[[package]]
name = "deno_webgpu"
version = "0.149.0"
version = "0.150.0"
dependencies = [
"deno_core",
"raw-window-handle",
@ -2372,7 +2385,7 @@ dependencies = [
[[package]]
name = "deno_webidl"
version = "0.182.0"
version = "0.183.0"
dependencies = [
"deno_bench_util",
"deno_core",
@ -2380,7 +2393,7 @@ dependencies = [
[[package]]
name = "deno_websocket"
version = "0.187.0"
version = "0.188.0"
dependencies = [
"bytes",
"deno_core",
@ -2402,7 +2415,7 @@ dependencies = [
[[package]]
name = "deno_webstorage"
version = "0.177.0"
version = "0.178.0"
dependencies = [
"deno_core",
"deno_web",
@ -4916,7 +4929,7 @@ dependencies = [
[[package]]
name = "napi_sym"
version = "0.112.0"
version = "0.113.0"
dependencies = [
"quote",
"serde",
@ -4971,7 +4984,7 @@ dependencies = [
[[package]]
name = "node_resolver"
version = "0.21.0"
version = "0.22.0"
dependencies = [
"anyhow",
"async-trait",

View file

@ -50,18 +50,18 @@ repository = "https://github.com/denoland/deno"
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.326.0" }
deno_bench_util = { version = "0.176.0", path = "./bench_util" }
deno_bench_util = { version = "0.177.0", path = "./bench_util" }
# TODO(nayeemrmn): Use proper version when https://github.com/denoland/deno_config/pull/143 lands!
deno_config = { git = "https://github.com/denoland/deno_config.git", rev = "0d588fb1831bf4d33d3279a1a75db6138d81a75b", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.2"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.26.0"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.42.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.191.0", path = "./runtime" }
deno_path_util = "=0.2.2"
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.192.0", path = "./runtime" }
deno_semver = "=0.6.1"
deno_terminal = "0.2.0"
napi_sym = { version = "0.112.0", path = "./ext/napi/sym" }
napi_sym = { version = "0.113.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.4"
@ -70,34 +70,34 @@ denokv_remote = "0.8.4"
denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts
deno_broadcast_channel = { version = "0.176.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.114.0", path = "./ext/cache" }
deno_canvas = { version = "0.51.0", path = "./ext/canvas" }
deno_console = { version = "0.182.0", path = "./ext/console" }
deno_cron = { version = "0.62.0", path = "./ext/cron" }
deno_crypto = { version = "0.196.0", path = "./ext/crypto" }
deno_fetch = { version = "0.206.0", path = "./ext/fetch" }
deno_ffi = { version = "0.169.0", path = "./ext/ffi" }
deno_fs = { version = "0.92.0", path = "./ext/fs" }
deno_http = { version = "0.180.0", path = "./ext/http" }
deno_io = { version = "0.92.0", path = "./ext/io" }
deno_kv = { version = "0.90.0", path = "./ext/kv" }
deno_napi = { version = "0.113.0", path = "./ext/napi" }
deno_net = { version = "0.174.0", path = "./ext/net" }
deno_node = { version = "0.119.0", path = "./ext/node" }
deno_telemetry = { version = "0.4.0", path = "./ext/telemetry" }
deno_tls = { version = "0.169.0", path = "./ext/tls" }
deno_url = { version = "0.182.0", path = "./ext/url" }
deno_web = { version = "0.213.0", path = "./ext/web" }
deno_webgpu = { version = "0.149.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.182.0", path = "./ext/webidl" }
deno_websocket = { version = "0.187.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.177.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.177.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.115.0", path = "./ext/cache" }
deno_canvas = { version = "0.52.0", path = "./ext/canvas" }
deno_console = { version = "0.183.0", path = "./ext/console" }
deno_cron = { version = "0.63.0", path = "./ext/cron" }
deno_crypto = { version = "0.197.0", path = "./ext/crypto" }
deno_fetch = { version = "0.207.0", path = "./ext/fetch" }
deno_ffi = { version = "0.170.0", path = "./ext/ffi" }
deno_fs = { version = "0.93.0", path = "./ext/fs" }
deno_http = { version = "0.181.0", path = "./ext/http" }
deno_io = { version = "0.93.0", path = "./ext/io" }
deno_kv = { version = "0.91.0", path = "./ext/kv" }
deno_napi = { version = "0.114.0", path = "./ext/napi" }
deno_net = { version = "0.175.0", path = "./ext/net" }
deno_node = { version = "0.120.0", path = "./ext/node" }
deno_telemetry = { version = "0.5.0", path = "./ext/telemetry" }
deno_tls = { version = "0.170.0", path = "./ext/tls" }
deno_url = { version = "0.183.0", path = "./ext/url" }
deno_web = { version = "0.214.0", path = "./ext/web" }
deno_webgpu = { version = "0.150.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.183.0", path = "./ext/webidl" }
deno_websocket = { version = "0.188.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.178.0", path = "./ext/webstorage" }
# resolvers
deno_npm_cache = { version = "0.2.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.14.0", path = "./resolvers/deno" }
node_resolver = { version = "0.21.0", path = "./resolvers/node" }
deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.15.0", path = "./resolvers/deno" }
node_resolver = { version = "0.22.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"
@ -105,11 +105,11 @@ async-trait = "0.1.73"
base32 = "=0.5.1"
base64 = "0.21.7"
bencher = "0.1"
boxed_error = "0.2.2"
boxed_error = "0.2.3"
brotli = "6.0.0"
bytes = "1.4.0"
cache_control = "=0.2.0"
capacity_builder = "0.1.0"
capacity_builder = "0.1.3"
cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now()
@ -118,8 +118,9 @@ color-print = "0.3.5"
console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.0"
deno_cache_dir = "=0.14.0"
data-url = "=0.3.1"
deno_cache_dir = "=0.15.0"
deno_error = "=0.5.2"
deno_package_json = { version = "0.2.1", default-features = false }
deno_unsync = "0.4.2"
dlopen2 = "0.6.1"

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.176.0"
version = "0.177.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -73,6 +73,7 @@ deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.86.3" }
deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true
@ -93,8 +94,10 @@ anstream = "0.6.14"
async-trait.workspace = true
base64.workspace = true
bincode = "=1.3.3"
boxed_error.workspace = true
bytes.workspace = true
cache_control.workspace = true
capacity_builder.workspace = true
chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
clap_complete = "=4.5.24"

View file

@ -4,21 +4,21 @@ use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
pub async fn resolve_import_map_value_from_specifier(
specifier: &Url,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
) -> Result<serde_json::Value, AnyError> {
if specifier.scheme() == "data" {
let data_url_text =
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
Ok(serde_json::from_str(&data_url_text)?)
} else {
let file = file_fetcher
.fetch_bypass_permissions(specifier)
.await?
.into_text_decoded()?;
let file = TextDecodedFile::decode(
file_fetcher.fetch_bypass_permissions(specifier).await?,
)?;
Ok(serde_json::from_str(&file.source)?)
}
}

View file

@ -9,6 +9,7 @@ mod package_json;
use deno_ast::MediaType;
use deno_ast::SourceMapOption;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions;
use deno_config::workspace::FolderConfigs;
@ -23,11 +24,11 @@ use deno_config::workspace::WorkspaceLintConfig;
use deno_config::workspace::WorkspaceResolver;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCacheSetting;
use deno_path_util::normalize_path;
use deno_semver::npm::NpmPackageReqReference;
use deno_telemetry::OtelConfig;
@ -85,7 +86,7 @@ use thiserror::Error;
use crate::cache;
use crate::cache::DenoDirProvider;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version;
@ -217,52 +218,6 @@ pub fn ts_config_to_transpile_and_emit_options(
))
}
/// Indicates how cached source files should be handled.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum CacheSetting {
/// Only the cached files should be used. Any files not in the cache will
/// error. This is the equivalent of `--cached-only` in the CLI.
Only,
/// No cached source files should be used, and all files should be reloaded.
/// This is the equivalent of `--reload` in the CLI.
ReloadAll,
/// Only some cached resources should be used. This is the equivalent of
/// `--reload=jsr:@std/http/file-server` or
/// `--reload=jsr:@std/http/file-server,jsr:@std/assert/assert-equals`.
ReloadSome(Vec<String>),
/// The usability of a cached value is determined by analyzing the cached
/// headers and other metadata associated with a cached response, reloading
/// any cached "non-fresh" cached responses.
RespectHeaders,
/// The cached source files should be used for local modules. This is the
/// default behavior of the CLI.
Use,
}
impl CacheSetting {
pub fn as_npm_cache_setting(&self) -> NpmCacheSetting {
match self {
CacheSetting::Only => NpmCacheSetting::Only,
CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll,
CacheSetting::ReloadSome(values) => {
if values.iter().any(|v| v == "npm:") {
NpmCacheSetting::ReloadAll
} else {
NpmCacheSetting::ReloadSome {
npm_package_names: values
.iter()
.filter_map(|v| v.strip_prefix("npm:"))
.map(|n| n.to_string())
.collect(),
}
}
}
CacheSetting::RespectHeaders => unreachable!(), // not supported
CacheSetting::Use => NpmCacheSetting::Use,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct BenchOptions {
pub files: FilePatterns,
@ -1083,7 +1038,7 @@ impl CliOptions {
pub async fn create_workspace_resolver(
&self,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
pkg_json_dep_resolution: PackageJsonDepResolution,
) -> Result<WorkspaceResolver, AnyError> {
let overrode_no_import_map: bool = self
@ -1430,9 +1385,7 @@ impl CliOptions {
Ok(result)
}
pub fn resolve_deno_lint_config(
&self,
) -> Result<deno_lint::linter::LintConfig, AnyError> {
pub fn resolve_deno_lint_config(&self) -> Result<DenoLintConfig, AnyError> {
let ts_config_result =
self.resolve_ts_config_for_emit(TsConfigType::Emit)?;
@ -1441,7 +1394,7 @@ impl CliOptions {
ts_config_result.ts_config,
)?;
Ok(deno_lint::linter::LintConfig {
Ok(DenoLintConfig {
default_jsx_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_factory.clone()),
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)

View file

@ -1,369 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use deno_core::ModuleSpecifier;
use log::debug;
use log::error;
use std::borrow::Cow;
use std::fmt;
use std::net::IpAddr;
use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
use std::net::SocketAddr;
use std::str::FromStr;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AuthTokenData {
Bearer(String),
Basic { username: String, password: String },
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AuthToken {
host: AuthDomain,
token: AuthTokenData,
}
impl fmt::Display for AuthToken {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.token {
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
AuthTokenData::Basic { username, password } => {
let credentials = format!("{username}:{password}");
write!(f, "Basic {}", BASE64_STANDARD.encode(credentials))
}
}
}
}
/// A structure which contains bearer tokens that can be used when sending
/// requests to websites, intended to authorize access to private resources
/// such as remote modules.
#[derive(Debug, Clone)]
pub struct AuthTokens(Vec<AuthToken>);
/// An authorization domain, either an exact or suffix match.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AuthDomain {
Ip(IpAddr),
IpPort(SocketAddr),
/// Suffix match, no dot. May include a port.
Suffix(Cow<'static, str>),
}
impl<T: ToString> From<T> for AuthDomain {
fn from(value: T) -> Self {
let s = value.to_string().to_lowercase();
if let Ok(ip) = SocketAddr::from_str(&s) {
return AuthDomain::IpPort(ip);
};
if s.starts_with('[') && s.ends_with(']') {
if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) {
return AuthDomain::Ip(ip.into());
}
} else if let Ok(ip) = Ipv4Addr::from_str(&s) {
return AuthDomain::Ip(ip.into());
}
if let Some(s) = s.strip_prefix('.') {
AuthDomain::Suffix(Cow::Owned(s.to_owned()))
} else {
AuthDomain::Suffix(Cow::Owned(s))
}
}
}
impl AuthDomain {
pub fn matches(&self, specifier: &ModuleSpecifier) -> bool {
let Some(host) = specifier.host_str() else {
return false;
};
match *self {
Self::Ip(ip) => {
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
return false;
};
ip == parsed && specifier.port().is_none()
}
Self::IpPort(ip) => {
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
return false;
};
ip.ip() == parsed && specifier.port() == Some(ip.port())
}
Self::Suffix(ref suffix) => {
let hostname = if let Some(port) = specifier.port() {
Cow::Owned(format!("{}:{}", host, port))
} else {
Cow::Borrowed(host)
};
if suffix.len() == hostname.len() {
return suffix == &hostname;
}
// If it's a suffix match, ensure a dot
if hostname.ends_with(suffix.as_ref())
&& hostname.ends_with(&format!(".{suffix}"))
{
return true;
}
false
}
}
}
}
impl AuthTokens {
/// Create a new set of tokens based on the provided string. It is intended
/// that the string be the value of an environment variable and the string is
/// parsed for token values. The string is expected to be a semi-colon
/// separated string, where each value is `{token}@{hostname}`.
pub fn new(maybe_tokens_str: Option<String>) -> Self {
let mut tokens = Vec::new();
if let Some(tokens_str) = maybe_tokens_str {
for token_str in tokens_str.trim().split(';') {
if token_str.contains('@') {
let mut iter = token_str.rsplitn(2, '@');
let host = AuthDomain::from(iter.next().unwrap());
let token = iter.next().unwrap();
if token.contains(':') {
let mut iter = token.rsplitn(2, ':');
let password = iter.next().unwrap().to_owned();
let username = iter.next().unwrap().to_owned();
tokens.push(AuthToken {
host,
token: AuthTokenData::Basic { username, password },
});
} else {
tokens.push(AuthToken {
host,
token: AuthTokenData::Bearer(token.to_string()),
});
}
} else {
error!("Badly formed auth token discarded.");
}
}
debug!("Parsed {} auth token(s).", tokens.len());
}
Self(tokens)
}
/// Attempt to match the provided specifier to the tokens in the set. The
/// matching occurs from the right of the hostname plus port, irrespective of
/// scheme. For example `https://www.deno.land:8080/` would match a token
/// with a host value of `deno.land:8080` but not match `www.deno.land`. The
/// matching is case insensitive.
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<AuthToken> {
self.0.iter().find_map(|t| {
if t.host.matches(specifier) {
Some(t.clone())
} else {
None
}
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use deno_core::resolve_url;
#[test]
fn test_auth_token() {
let auth_tokens = AuthTokens::new(Some("abc123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123"
);
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture =
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
}
#[test]
fn test_auth_tokens_multiple() {
let auth_tokens =
AuthTokens::new(Some("abc123@deno.land;def456@example.com".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_space() {
let auth_tokens = AuthTokens::new(Some(
" abc123@deno.land;def456@example.com\t".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_newline() {
let auth_tokens = AuthTokens::new(Some(
"\nabc123@deno.land;def456@example.com\n".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_port() {
let auth_tokens =
AuthTokens::new(Some("abc123@deno.land:8080".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("http://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
}
#[test]
fn test_auth_tokens_contain_at() {
let auth_tokens = AuthTokens::new(Some("abc@123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc@123".to_string()
);
}
#[test]
fn test_auth_token_basic() {
let auth_tokens = AuthTokens::new(Some("abc:123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Basic YWJjOjEyMw=="
);
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Basic YWJjOjEyMw==".to_string()
);
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture =
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
}
#[test]
fn test_parse_ip() {
let ip = AuthDomain::from("[2001:db8:a::123]");
assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}"));
let ip = AuthDomain::from("[2001:db8:a::123]:8080");
assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}"));
let ip = AuthDomain::from("1.1.1.1");
assert_eq!("Ip(1.1.1.1)", format!("{ip:?}"));
}
#[test]
fn test_case_insensitive() {
let domain = AuthDomain::from("EXAMPLE.com");
assert!(
domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap())
);
assert!(
domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap())
);
}
#[test]
fn test_matches() {
let candidates = [
"example.com",
"www.example.com",
"1.1.1.1",
"[2001:db8:a::123]",
// These will never match
"example.com.evil.com",
"1.1.1.1.evil.com",
"notexample.com",
"www.notexample.com",
];
let domains = [
("example.com", vec!["example.com", "www.example.com"]),
(".example.com", vec!["example.com", "www.example.com"]),
("www.example.com", vec!["www.example.com"]),
("1.1.1.1", vec!["1.1.1.1"]),
("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]),
];
let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap();
let url_port =
|c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap();
// Generate each candidate with and without a port
let candidates = candidates
.into_iter()
.flat_map(|c| [url(c), url_port(c)])
.collect::<Vec<_>>();
for (domain, expected_domain) in domains {
// Test without a port -- all candidates return without a port
let auth_domain = AuthDomain::from(domain);
let actual = candidates
.iter()
.filter(|c| auth_domain.matches(c))
.cloned()
.collect::<Vec<_>>();
let expected = expected_domain.iter().map(|u| url(u)).collect::<Vec<_>>();
assert_eq!(actual, expected);
// Test with a port, all candidates return with a port
let auth_domain = AuthDomain::from(&format!("{domain}:8080"));
let actual = candidates
.iter()
.filter(|c| auth_domain.matches(c))
.cloned()
.collect::<Vec<_>>();
let expected = expected_domain
.iter()
.map(|u| url_port(u))
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
}
}

93
cli/cache/mod.rs vendored
View file

@ -1,18 +1,19 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::args::CacheSetting;
use crate::errors::get_error_class_name;
use crate::file_fetcher::CliFetchNoFollowErrorKind;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
use deno_cache_dir::file_fetcher::FileOrRedirect;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::FutureExt;
use deno_core::ModuleSpecifier;
@ -190,7 +191,7 @@ pub struct FetchCacherOptions {
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -202,7 +203,7 @@ pub struct FetchCacher {
impl FetchCacher {
pub fn new(
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -320,18 +321,18 @@ impl Loader for FetchCacher {
LoaderCacheSetting::Only => Some(CacheSetting::Only),
};
file_fetcher
.fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions {
specifier: &specifier,
permissions: if is_statically_analyzable {
FetchPermissionsOptionRef::StaticContainer(&permissions)
} else {
FetchPermissionsOptionRef::DynamicContainer(&permissions)
},
maybe_auth: None,
maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(),
},
.fetch_no_follow(
&specifier,
FetchPermissionsOptionRef::Restricted(&permissions,
if is_statically_analyzable {
deno_runtime::deno_permissions::CheckSpecifierKind::Static
} else {
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic
}),
FetchNoFollowOptions {
maybe_auth: None,
maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(),
maybe_checksum: options.maybe_checksum.as_ref(),
})
.await
@ -348,7 +349,7 @@ impl Loader for FetchCacher {
(None, None) => None,
};
Ok(Some(LoadResponse::Module {
specifier: file.specifier,
specifier: file.url,
maybe_headers,
content: file.source,
}))
@ -361,18 +362,46 @@ impl Loader for FetchCacher {
}
})
.unwrap_or_else(|err| {
if let Some(io_err) = err.downcast_ref::<std::io::Error>() {
if io_err.kind() == std::io::ErrorKind::NotFound {
return Ok(None);
} else {
return Err(err);
}
}
let error_class_name = get_error_class_name(&err);
match error_class_name {
"NotFound" => Ok(None),
"NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None),
_ => Err(err),
let err = err.into_kind();
match err {
CliFetchNoFollowErrorKind::FetchNoFollow(err) => {
let err = err.into_kind();
match err {
FetchNoFollowErrorKind::NotFound(_) => Ok(None),
FetchNoFollowErrorKind::UrlToFilePath { .. } |
FetchNoFollowErrorKind::ReadingBlobUrl { .. } |
FetchNoFollowErrorKind::ReadingFile { .. } |
FetchNoFollowErrorKind::FetchingRemote { .. } |
FetchNoFollowErrorKind::ClientError { .. } |
FetchNoFollowErrorKind::NoRemote { .. } |
FetchNoFollowErrorKind::DataUrlDecode { .. } |
FetchNoFollowErrorKind::RedirectResolution { .. } |
FetchNoFollowErrorKind::CacheRead { .. } |
FetchNoFollowErrorKind::CacheSave { .. } |
FetchNoFollowErrorKind::UnsupportedScheme { .. } |
FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)),
FetchNoFollowErrorKind::NotCached { .. } => {
if options.cache_setting == LoaderCacheSetting::Only {
Ok(None)
} else {
Err(AnyError::from(err))
}
},
FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph
Err(
deno_graph::source::ChecksumIntegrityError {
actual: err.actual,
expected: err.expected,
}
.into(),
)
}
}
},
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)),
}
})
}

View file

@ -23,8 +23,7 @@ use crate::cache::ModuleInfoCache;
use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::file_fetcher::File;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::MainModuleGraphContainer;
use crate::graph_util::FileWatcherReporter;
use crate::graph_util::ModuleGraphBuilder;
@ -72,6 +71,7 @@ use std::collections::HashSet;
use std::path::PathBuf;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::file_fetcher::File;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::glob::FilePatterns;
use deno_config::workspace::PackageJsonDepResolution;
@ -198,7 +198,7 @@ struct CliFactoryServices {
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<FileFetcher>>,
file_fetcher: Deferred<Arc<CliFileFetcher>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>,
@ -363,16 +363,17 @@ impl CliFactory {
})
}
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
pub fn file_fetcher(&self) -> Result<&Arc<CliFileFetcher>, AnyError> {
self.services.file_fetcher.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(FileFetcher::new(
Ok(Arc::new(CliFileFetcher::new(
self.http_cache()?.clone(),
cli_options.cache_setting(),
!cli_options.no_remote(),
self.http_client_provider().clone(),
self.blob_store().clone(),
Some(self.text_only_progress_bar().clone()),
!cli_options.no_remote(),
cli_options.cache_setting(),
log::Level::Info,
)))
})
}
@ -1173,7 +1174,7 @@ impl WorkspaceFilesFactory {
collect_specifiers: fn(
FilePatterns,
Arc<CliOptions>,
Arc<FileFetcher>,
Arc<CliFileFetcher>,
T,
) -> std::pin::Pin<
Box<
@ -1238,7 +1239,7 @@ impl WorkspaceFilesFactory {
let file = file_fetcher.fetch(s, root_permissions).await?;
let snippet_files = extract_doc_files(file)?;
for snippet_file in snippet_files {
doc_snippet_specifiers.push(snippet_file.specifier.clone());
doc_snippet_specifiers.push(snippet_file.url.clone());
file_fetcher.insert_memory_files(snippet_file);
}
}

File diff suppressed because it is too large Load diff

View file

@ -13,7 +13,7 @@ use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliResolver;
@ -429,7 +429,7 @@ pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -448,7 +448,7 @@ impl ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use dashmap::DashMap;
use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo;
@ -19,11 +19,11 @@ pub struct JsrFetchResolver {
/// It can be large and we don't want to store it.
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
}
impl JsrFetchResolver {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
Self {
nv_by_req: Default::default(),
info_by_nv: Default::default(),

View file

@ -63,7 +63,7 @@ use crate::args::ConfigFile;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::cache::FastInsecureHasher;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
@ -1218,7 +1218,7 @@ impl ConfigData {
specified_config: Option<&Path>,
scope: &ModuleSpecifier,
settings: &Settings,
file_fetcher: &Arc<FileFetcher>,
file_fetcher: &Arc<CliFileFetcher>,
// sync requirement is because the lsp requires sync
cached_deno_config_fs: &(dyn DenoConfigFs + Sync),
deno_json_cache: &(dyn DenoJsonCache + Sync),
@ -1313,7 +1313,7 @@ impl ConfigData {
member_dir: Arc<WorkspaceDirectory>,
scope: Arc<ModuleSpecifier>,
settings: &Settings,
file_fetcher: Option<&Arc<FileFetcher>>,
file_fetcher: Option<&Arc<CliFileFetcher>>,
) -> Self {
let (settings, workspace_folder) = settings.get_for_specifier(&scope);
let mut watched_files = HashMap::with_capacity(10);
@ -1835,7 +1835,7 @@ impl ConfigTree {
&mut self,
settings: &Settings,
workspace_files: &IndexSet<ModuleSpecifier>,
file_fetcher: &Arc<FileFetcher>,
file_fetcher: &Arc<CliFileFetcher>,
) {
lsp_log!("Refreshing configuration tree...");
// since we're resolving a workspace multiple times in different

View file

@ -45,6 +45,7 @@ use deno_graph::source::ResolveError;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs;
@ -834,7 +835,7 @@ fn generate_lint_diagnostics(
lint_rule_provider.resolve_lint_rules(Default::default(), None)
},
fix: false,
deno_lint_config: deno_lint::linter::LintConfig {
deno_lint_config: DenoLintConfig {
default_jsx_factory: None,
default_jsx_fragment_factory: None,
},

View file

@ -2,7 +2,8 @@
use crate::args::jsr_api_url;
use crate::args::jsr_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::jsr::partial_jsr_package_version_info_from_slice;
use crate::jsr::JsrFetchResolver;
use dashmap::DashMap;
@ -267,7 +268,7 @@ fn read_cached_url(
#[derive(Debug)]
pub struct CliJsrSearchApi {
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
resolver: JsrFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
@ -275,7 +276,7 @@ pub struct CliJsrSearchApi {
}
impl CliJsrSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
let resolver = JsrFetchResolver::new(file_fetcher.clone());
Self {
file_fetcher,
@ -309,10 +310,8 @@ impl PackageSearchApi for CliJsrSearchApi {
let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move {
file_fetcher
.fetch_bypass_permissions(&search_url)
.await?
.into_text_decoded()
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)
})
.await??;
let names = Arc::new(parse_jsr_search_response(&file.source)?);

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_core::anyhow::anyhow;
@ -95,13 +96,12 @@ use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::has_flag_env_var;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::InternalFlags;
use crate::args::UnstableFmtOptions;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_util;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::ConfigWatchedFileType;
@ -958,15 +958,15 @@ impl Inner {
}
async fn refresh_config_tree(&mut self) {
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
self.cache.global().clone(),
CacheSetting::RespectHeaders,
true,
self.http_client_provider.clone(),
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
super::logging::lsp_log_level(),
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
let file_fetcher = Arc::new(file_fetcher);
self
.config

View file

@ -11,21 +11,22 @@ use serde::Deserialize;
use std::sync::Arc;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::npm::NpmFetchResolver;
use super::search::PackageSearchApi;
#[derive(Debug)]
pub struct CliNpmSearchApi {
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
resolver: NpmFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
}
impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
@ -57,10 +58,8 @@ impl PackageSearchApi for CliNpmSearchApi {
.append_pair("text", &format!("{} boost-exact:false", query));
let file_fetcher = self.file_fetcher.clone();
let file = deno_core::unsync::spawn(async move {
file_fetcher
.fetch_bypass_permissions(&search_url)
.await?
.into_text_decoded()
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)
})
.await??;
let names = Arc::new(parse_npm_search_response(&file.source)?);

View file

@ -12,14 +12,15 @@ use super::path_to_regex::StringOrNumber;
use super::path_to_regex::StringOrVec;
use super::path_to_regex::Token;
use crate::args::CacheSetting;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::http_util::HttpClientProvider;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
@ -418,7 +419,7 @@ enum VariableItems {
pub struct ModuleRegistry {
origins: HashMap<String, Vec<RegistryConfiguration>>,
pub location: PathBuf,
pub file_fetcher: Arc<FileFetcher>,
pub file_fetcher: Arc<CliFileFetcher>,
http_cache: Arc<GlobalHttpCache>,
}
@ -432,15 +433,15 @@ impl ModuleRegistry {
location.clone(),
crate::cache::RealDenoCacheEnv,
));
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
http_cache.clone(),
CacheSetting::RespectHeaders,
true,
http_client_provider,
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
super::logging::lsp_log_level(),
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
Self {
origins: HashMap::new(),
@ -479,13 +480,15 @@ impl ModuleRegistry {
let specifier = specifier.clone();
async move {
file_fetcher
.fetch_with_options(FetchOptions {
specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None,
})
.fetch_with_options(
&specifier,
FetchPermissionsOptionRef::AllowAll,
FetchOptions {
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None,
}
)
.await
}
}).await?;
@ -500,7 +503,7 @@ impl ModuleRegistry {
);
self.http_cache.set(specifier, headers_map, &[])?;
}
let file = fetch_result?.into_text_decoded()?;
let file = TextDecodedFile::decode(fetch_result?)?;
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
validate_config(&config)?;
Ok(config.registries)
@ -584,12 +587,11 @@ impl ModuleRegistry {
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn({
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&endpoint)
.await
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await
@ -983,12 +985,11 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
})
.await
.ok()??;
@ -1049,7 +1050,7 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
let specifier = specifier.clone();
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.map_err(|err| {
@ -1058,9 +1059,8 @@ impl ModuleRegistry {
specifier, err
);
})
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await
@ -1095,7 +1095,7 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
let specifier = specifier.clone();
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.map_err(|err| {
@ -1104,9 +1104,8 @@ impl ModuleRegistry {
specifier, err
);
})
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await

View file

@ -2,6 +2,7 @@
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
@ -39,7 +40,6 @@ use std::sync::Arc;
use super::cache::LspCache;
use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::cache::DenoCacheEnvFsAdapter;

View file

@ -5516,7 +5516,6 @@ impl TscRequest {
mod tests {
use super::*;
use crate::cache::HttpCache;
use crate::http_util::HeadersMap;
use crate::lsp::cache::LspCache;
use crate::lsp::config::Config;
use crate::lsp::config::WorkspaceSettings;
@ -5953,7 +5952,7 @@ mod tests {
.global()
.set(
&specifier_dep,
HeadersMap::default(),
Default::default(),
b"export const b = \"b\";\n",
)
.unwrap();
@ -5992,7 +5991,7 @@ mod tests {
.global()
.set(
&specifier_dep,
HeadersMap::default(),
Default::default(),
b"export const b = \"b\";\n\nexport const a = \"b\";\n",
)
.unwrap();

View file

@ -1,7 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod args;
mod auth_tokens;
mod cache;
mod cdp;
mod emit;

View file

@ -8,7 +8,6 @@
mod standalone;
mod args;
mod auth_tokens;
mod cache;
mod emit;
mod errors;

View file

@ -20,6 +20,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCacheSetting;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs::FileSystem;
@ -70,7 +71,7 @@ pub struct CliManagedNpmResolverCreateOptions {
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
pub npm_cache_dir: Arc<NpmCacheDir>,
pub cache_setting: crate::args::CacheSetting,
pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting,
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
@ -203,7 +204,7 @@ fn create_cache(
) -> Arc<CliNpmCache> {
Arc::new(CliNpmCache::new(
options.npm_cache_dir.clone(),
options.cache_setting.as_npm_cache_setting(),
NpmCacheSetting::from_cache_setting(&options.cache_setting),
env,
options.npmrc.clone(),
))

View file

@ -28,7 +28,7 @@ use managed::create_managed_in_npm_pkg_checker;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::hard_link_dir_recursive;
@ -115,14 +115,14 @@ impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv {
.download_with_progress_and_retries(url, maybe_auth_header, &guard)
.await
.map_err(|err| {
use crate::http_util::DownloadError::*;
let status_code = match &err {
use crate::http_util::DownloadErrorKind::*;
let status_code = match err.as_kind() {
Fetch { .. }
| UrlParse { .. }
| HttpParse { .. }
| Json { .. }
| ToStr { .. }
| NoRedirectHeader { .. }
| RedirectHeaderParse { .. }
| TooManyRedirects => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code)
@ -232,13 +232,13 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
}
impl NpmFetchResolver {
pub fn new(
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self {

View file

@ -71,7 +71,7 @@ use crate::args::UnstableConfig;
use crate::cache::DenoDir;
use crate::cache::FastInsecureHasher;
use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
@ -390,7 +390,7 @@ pub struct DenoCompileBinaryWriter<'a> {
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
@ -404,7 +404,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,

View file

@ -9,6 +9,7 @@ use binary::StandaloneData;
use binary::StandaloneModules;
use code_cache::DenoCompileCodeCache;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionError;
@ -64,7 +65,6 @@ use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::npm_pkg_req_ref_to_binary_command;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver;
use crate::cache::Caches;

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
@ -42,50 +43,46 @@ pub fn serialize_binary_data_section(
remote_modules: &RemoteModulesStoreBuilder,
vfs: &BuiltVfs,
) -> Result<Vec<u8>, AnyError> {
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
bytes.extend_from_slice(data);
}
let metadata = serde_json::to_string(metadata)?;
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
let remote_modules_len = Cell::new(0_u64);
let serialized_vfs = serde_json::to_string(&vfs.root)?;
let mut bytes = Vec::new();
bytes.extend_from_slice(MAGIC_BYTES);
// 1. Metadata
{
let metadata = serde_json::to_string(metadata)?;
write_bytes_with_len(&mut bytes, metadata.as_bytes());
}
// 2. Npm snapshot
{
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
write_bytes_with_len(&mut bytes, &npm_snapshot);
}
// 3. Remote modules
{
let update_index = bytes.len();
bytes.extend_from_slice(&(0_u64).to_le_bytes());
let start_index = bytes.len();
remote_modules.write(&mut bytes)?;
let length = bytes.len() - start_index;
let length_bytes = (length as u64).to_le_bytes();
bytes[update_index..update_index + length_bytes.len()]
.copy_from_slice(&length_bytes);
}
// 4. VFS
{
let serialized_vfs = serde_json::to_string(&vfs.root)?;
write_bytes_with_len(&mut bytes, serialized_vfs.as_bytes());
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
for file in &vfs.files {
bytes.extend_from_slice(file);
let bytes = capacity_builder::BytesBuilder::build(|builder| {
builder.append(MAGIC_BYTES);
// 1. Metadata
{
builder.append_le(metadata.len() as u64);
builder.append(&metadata);
}
// 2. Npm snapshot
{
builder.append_le(npm_snapshot.len() as u64);
builder.append(&npm_snapshot);
}
// 3. Remote modules
{
builder.append_le(remote_modules_len.get()); // this will be properly initialized on the second pass
let start_index = builder.len();
remote_modules.write(builder);
remote_modules_len.set((builder.len() - start_index) as u64);
}
// 4. VFS
{
builder.append_le(serialized_vfs.len() as u64);
builder.append(&serialized_vfs);
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
builder.append_le(vfs_bytes_len);
for file in &vfs.files {
builder.append(file);
}
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
bytes.extend_from_slice(MAGIC_BYTES);
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
builder.append(MAGIC_BYTES);
})?;
Ok(bytes)
}
@ -191,26 +188,25 @@ impl RemoteModulesStoreBuilder {
}
}
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
fn write<'a>(&'a self, builder: &mut capacity_builder::BytesBuilder<'a>) {
builder.append_le(self.specifiers.len() as u32);
builder.append_le(self.redirects.len() as u32);
for (specifier, offset) in &self.specifiers {
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
writer.write_all(specifier.as_bytes())?;
writer.write_all(&offset.to_le_bytes())?;
builder.append_le(specifier.len() as u32);
builder.append(specifier.as_bytes());
builder.append_le(*offset);
}
for (from, to) in &self.redirects {
writer.write_all(&(from.len() as u32).to_le_bytes())?;
writer.write_all(from.as_bytes())?;
writer.write_all(&(to.len() as u32).to_le_bytes())?;
writer.write_all(to.as_bytes())?;
builder.append_le(from.len() as u32);
builder.append(from);
builder.append_le(to.len() as u32);
builder.append(to);
}
for (media_type, data) in &self.data {
writer.write_all(&[serialize_media_type(*media_type)])?;
writer.write_all(&(data.len() as u64).to_le_bytes())?;
writer.write_all(data)?;
builder.append(serialize_media_type(*media_type));
builder.append_le(data.len() as u64);
builder.append(data);
}
Ok(())
}
}

View file

@ -6,6 +6,7 @@ use crate::args::FileFlags;
use crate::args::Flags;
use crate::cdp;
use crate::factory::CliFactory;
use crate::file_fetcher::TextDecodedFile;
use crate::tools::fmt::format_json;
use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code;
@ -559,6 +560,12 @@ pub fn cover_files(
},
None => None,
};
let get_message = |specifier: &ModuleSpecifier| -> String {
format!(
"Failed to fetch \"{}\" from cache. Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
specifier,
)
};
for script_coverage in script_coverages {
let module_specifier = deno_core::resolve_url_or_path(
@ -566,21 +573,14 @@ pub fn cover_files(
cli_options.initial_cwd(),
)?;
let maybe_file = if module_specifier.scheme() == "file" {
file_fetcher.get_source(&module_specifier)
} else {
file_fetcher
.fetch_cached(&module_specifier, 10)
.with_context(|| {
format!("Failed to fetch \"{module_specifier}\" from cache.")
})?
let maybe_file_result = file_fetcher
.get_cached_source_or_local(&module_specifier)
.map_err(AnyError::from);
let file = match maybe_file_result {
Ok(Some(file)) => TextDecodedFile::decode(file)?,
Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))),
Err(err) => return Err(err).context(get_message(&module_specifier)),
};
let file = maybe_file.ok_or_else(|| {
anyhow!("Failed to fetch \"{}\" from cache.
Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
module_specifier
)
})?.into_text_decoded()?;
let original_source = file.source.clone();
// Check if file was transpiled

View file

@ -3,7 +3,6 @@
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
@ -13,13 +12,14 @@ use crate::args::TypeCheckMode;
use crate::args::UninstallFlags;
use crate::args::UninstallKind;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::ModuleGraphContainer;
use crate::http_util::HttpClientProvider;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
@ -361,18 +361,18 @@ async fn install_global(
let cli_options = factory.cli_options()?;
let http_client = factory.http_client_provider();
let deps_http_cache = factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
let deps_file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::ReloadAll,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::ReloadAll,
log::Level::Trace,
);
let npmrc = factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver = Arc::new(NpmFetchResolver::new(

View file

@ -20,7 +20,7 @@ use deno_core::unsync::future::LocalFutureExt;
use deno_core::unsync::future::SharedLocal;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig;
use deno_lint::linter::LintConfig as DenoLintConfig;
use log::debug;
use reporters::create_reporter;
use reporters::LintReporter;
@ -29,7 +29,6 @@ use std::collections::HashSet;
use std::fs;
use std::io::stdin;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
@ -47,6 +46,7 @@ use crate::graph_util::ModuleGraphCreator;
use crate::tools::fmt::run_parallelized;
use crate::util::display;
use crate::util::file_watcher;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
@ -69,136 +69,139 @@ pub async fn lint(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
if let Some(watch_flags) = &lint_flags.watch {
if lint_flags.watch.is_some() {
if lint_flags.is_stdin() {
return Err(generic_error(
"Lint watch on standard input is not supported.",
));
}
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_config = cli_options.resolve_deno_lint_config()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in &mut paths_with_options_batches {
_ = watcher_communicator
.watch_paths(paths_with_options.paths.clone());
let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths {
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
files
.iter()
.any(|path| {
canonicalize_path(path)
.map(|p| paths.contains(&p))
.unwrap_or(false)
})
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
};
}
return lint_with_watch(flags, lint_flags).await;
}
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
factory.lint_rule_provider().await?,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
);
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish();
Ok(())
})
},
)
.await?;
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_rule_provider = factory.lint_rule_provider().await?;
let is_stdin = lint_flags.is_stdin();
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
let workspace_lint_options =
cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
lint_stdin(
cli_options,
lint_rule_provider,
workspace_lint_options,
lint_flags,
deno_lint_config,
)?
} else {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let is_stdin = lint_flags.is_stdin();
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
let workspace_lint_options =
cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
let start_dir = &cli_options.start_dir;
let reporter_lock = Arc::new(Mutex::new(create_reporter(
workspace_lint_options.reporter_kind,
)));
let lint_config = start_dir
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
let lint_rules = factory
.lint_rule_provider()
.await?
.resolve_lint_rules_err_empty(
lint_options.rules,
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let r = lint_stdin(&file_path, lint_rules, deno_lint_config);
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
reporter_lock.lock().close(1);
success
} else {
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
factory.lint_rule_provider().await?,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&workspace_lint_options,
);
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
deno_lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish()
};
if !success {
deno_runtime::exit(1);
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
lint_rule_provider,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&workspace_lint_options,
);
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
deno_lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish()
};
if !success {
deno_runtime::exit(1);
}
Ok(())
}
async fn lint_with_watch_inner(
flags: Arc<Flags>,
lint_flags: LintFlags,
watcher_communicator: Arc<WatcherCommunicator>,
changed_paths: Option<Vec<PathBuf>>,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_config = cli_options.resolve_deno_lint_config()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in &mut paths_with_options_batches {
_ = watcher_communicator.watch_paths(paths_with_options.paths.clone());
let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths {
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
files
.iter()
.any(|path| {
canonicalize_path(path)
.map(|p| paths.contains(&p))
.unwrap_or(false)
})
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
};
}
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
factory.lint_rule_provider().await?,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
);
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish();
Ok(())
}
async fn lint_with_watch(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
let watch_flags = lint_flags.watch.as_ref().unwrap();
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(lint_with_watch_inner(
flags,
lint_flags,
watcher_communicator,
changed_paths,
))
},
)
.await
}
struct PathsWithOptions {
dir: Arc<WorkspaceDirectory>,
paths: Vec<PathBuf>,
@ -269,7 +272,7 @@ impl WorkspaceLinter {
&mut self,
cli_options: &Arc<CliOptions>,
lint_options: LintOptions,
lint_config: LintConfig,
lint_config: DenoLintConfig,
member_dir: Arc<WorkspaceDirectory>,
paths: Vec<PathBuf>,
) -> Result<(), AnyError> {
@ -294,112 +297,63 @@ impl WorkspaceLinter {
deno_lint_config: lint_config,
}));
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let mut futures = Vec::with_capacity(2);
if linter.has_package_rules() {
if self.workspace_module_graph.is_none() {
let module_graph_creator = self.module_graph_creator.clone();
let packages = self.workspace_dir.jsr_packages_for_publish();
self.workspace_module_graph = Some(
async move {
module_graph_creator
.create_and_validate_publish_graph(&packages, true)
.await
.map(Rc::new)
.map_err(Rc::new)
}
.boxed_local()
.shared_local(),
);
}
let workspace_module_graph_future =
self.workspace_module_graph.as_ref().unwrap().clone();
let publish_config = member_dir.maybe_package_config();
if let Some(publish_config) = publish_config {
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let linter = linter.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
futures.push(
async move {
let graph = workspace_module_graph_future
.await
.map_err(|err| anyhow!("{:#}", err))?;
let export_urls =
publish_config.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
return Ok(()); // entrypoint is not specified, so skip
}
let diagnostics = linter.lint_package(&graph, &export_urls);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter.visit_diagnostic(diagnostic);
}
}
Ok(())
}
.boxed_local(),
);
if let Some(fut) = self.run_package_rules(&linter, &member_dir, &paths) {
futures.push(fut);
}
}
futures.push({
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let maybe_incremental_cache = maybe_incremental_cache.clone();
let linter = linter.clone();
let cli_options = cli_options.clone();
async move {
run_parallelized(paths, {
move |file_path| {
let file_text =
deno_ast::strip_bom(fs::read_to_string(&file_path)?);
let maybe_incremental_cache_ = maybe_incremental_cache.clone();
let linter = linter.clone();
let cli_options = cli_options.clone();
let fut = async move {
let operation = move |file_path: PathBuf| {
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
// don't bother rechecking this file if it didn't have any diagnostics before
if let Some(incremental_cache) = &maybe_incremental_cache {
if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(());
}
}
let r = linter.lint_file(
&file_path,
file_text,
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r {
if let Some(incremental_cache) = &maybe_incremental_cache {
if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(
&file_path,
// ensure the returned text is used here as it may have been modified via --fix
file_source.text(),
)
}
}
}
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
// don't bother rechecking this file if it didn't have any diagnostics before
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(());
}
})
.await
}
.boxed_local()
});
}
let r = linter.lint_file(
&file_path,
file_text,
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r {
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(
&file_path,
// ensure the returned text is used here as it may have been modified via --fix
file_source.text(),
)
}
}
}
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
};
run_parallelized(paths, operation).await
}
.boxed_local();
futures.push(fut);
if lint_options.fix {
// run sequentially when using `--fix` to lower the chances of weird
@ -419,6 +373,63 @@ impl WorkspaceLinter {
Ok(())
}
fn run_package_rules(
&mut self,
linter: &Arc<CliLinter>,
member_dir: &WorkspaceDirectory,
paths: &[PathBuf],
) -> Option<LocalBoxFuture<Result<(), AnyError>>> {
if self.workspace_module_graph.is_none() {
let module_graph_creator = self.module_graph_creator.clone();
let packages = self.workspace_dir.jsr_packages_for_publish();
self.workspace_module_graph = Some(
async move {
module_graph_creator
.create_and_validate_publish_graph(&packages, true)
.await
.map(Rc::new)
.map_err(Rc::new)
}
.boxed_local()
.shared_local(),
);
}
let workspace_module_graph_future =
self.workspace_module_graph.as_ref().unwrap().clone();
let maybe_publish_config = member_dir.maybe_package_config();
let publish_config = maybe_publish_config?;
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let linter = linter.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
let fut = async move {
let graph = workspace_module_graph_future
.await
.map_err(|err| anyhow!("{:#}", err))?;
let export_urls =
publish_config.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
return Ok(()); // entrypoint is not specified, so skip
}
let diagnostics = linter.lint_package(&graph, &export_urls);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter.visit_diagnostic(diagnostic);
}
}
Ok(())
}
.boxed_local();
Some(fut)
}
pub fn finish(self) -> bool {
debug!("Found {} files", self.file_count);
self.reporter_lock.lock().close(self.file_count);
@ -494,10 +505,27 @@ pub fn print_rules_list(json: bool, maybe_rules_tags: Option<Vec<String>>) {
/// Treats input as TypeScript.
/// Compatible with `--json` flag.
fn lint_stdin(
file_path: &Path,
configured_rules: ConfiguredRules,
deno_lint_config: LintConfig,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
cli_options: &Arc<CliOptions>,
lint_rule_provider: LintRuleProvider,
workspace_lint_options: WorkspaceLintOptions,
lint_flags: LintFlags,
deno_lint_config: DenoLintConfig,
) -> Result<bool, AnyError> {
let start_dir = &cli_options.start_dir;
let reporter_lock = Arc::new(Mutex::new(create_reporter(
workspace_lint_options.reporter_kind,
)));
let lint_config = start_dir
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty(
lint_options.rules,
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let mut source_code = String::new();
if stdin().read_to_string(&mut source_code).is_err() {
return Err(generic_error("Failed to read from stdin"));
@ -509,9 +537,14 @@ fn lint_stdin(
deno_lint_config,
});
linter
.lint_file(file_path, deno_ast::strip_bom(source_code), None)
.map_err(AnyError::from)
let r = linter
.lint_file(&file_path, deno_ast::strip_bom(source_code), None)
.map_err(AnyError::from);
let success =
handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone());
reporter_lock.lock().close(1);
Ok(success)
}
fn handle_lint_result(

View file

@ -4,6 +4,7 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@ -23,12 +24,11 @@ use jsonc_parser::cst::CstRootNode;
use jsonc_parser::json;
use crate::args::AddFlags;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::RemoveFlags;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
@ -411,18 +411,18 @@ pub async fn add(
let http_client = cli_factory.http_client_provider();
let deps_http_cache = cli_factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
let deps_file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::ReloadAll,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::ReloadAll,
log::Level::Trace,
);
let npmrc = cli_factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver =

View file

@ -3,6 +3,7 @@
use std::collections::HashSet;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_semver::package::PackageNv;
@ -10,12 +11,11 @@ use deno_semver::package::PackageReq;
use deno_semver::VersionReq;
use deno_terminal::colors;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::OutdatedFlags;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
use crate::tools::registry::pm::deps::DepKind;
@ -181,15 +181,15 @@ pub async fn outdated(
let workspace = cli_options.workspace();
let http_client = factory.http_client_provider();
let deps_http_cache = factory.global_http_cache()?;
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::RespectHeaders,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
log::Level::Trace,
);
file_fetcher.set_download_log_level(log::Level::Trace);
let file_fetcher = Arc::new(file_fetcher);
let npm_fetch_resolver = Arc::new(NpmFetchResolver::new(
file_fetcher.clone(),

View file

@ -11,7 +11,8 @@ use crate::args::ReplFlags;
use crate::cdp;
use crate::colors;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
@ -143,7 +144,7 @@ async fn read_line_and_poll(
async fn read_eval_file(
cli_options: &CliOptions,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
eval_file: &str,
) -> Result<Arc<str>, AnyError> {
let specifier =
@ -151,7 +152,7 @@ async fn read_eval_file(
let file = file_fetcher.fetch_bypass_permissions(&specifier).await?;
Ok(file.into_text_decoded()?.source)
Ok(TextDecodedFile::decode(file)?.source)
}
#[allow(clippy::print_stdout)]

View file

@ -3,6 +3,7 @@
use std::io::Read;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::File;
use deno_config::deno_json::NodeModulesDirMode;
use deno_core::error::AnyError;
use deno_runtime::WorkerExecutionMode;
@ -11,7 +12,6 @@ use crate::args::EvalFlags;
use crate::args::Flags;
use crate::args::WatchFlagsWithPaths;
use crate::factory::CliFactory;
use crate::file_fetcher::File;
use crate::util;
use crate::util::file_watcher::WatcherRestartMode;
@ -97,7 +97,7 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
// Save a fake file into file fetcher cache
// to allow module access by TS compiler
file_fetcher.insert_memory_files(File {
specifier: main_module.clone(),
url: main_module.clone(),
maybe_headers: None,
source: source.into(),
});
@ -184,7 +184,7 @@ pub async fn eval_command(
// Save a fake file into file fetcher cache
// to allow module access by TS compiler.
file_fetcher.insert_memory_files(File {
specifier: main_module.clone(),
url: main_module.clone(),
maybe_headers: None,
source: source_code.into_bytes().into(),
});

View file

@ -8,7 +8,7 @@ use crate::colors;
use crate::display;
use crate::factory::SpecifierInfo;
use crate::factory::WorkspaceFilesFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::ops;
use crate::util::extract::extract_doc_tests;
use crate::util::file_watcher;
@ -1448,7 +1448,7 @@ fn is_supported_test_ext(path: &Path) -> bool {
pub async fn collect_specifiers_for_tests(
patterns: FilePatterns,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
doc: bool,
) -> Result<Vec<(ModuleSpecifier, SpecifierInfo)>, AnyError> {
let vendor_folder = cli_options.vendor_dir_path();

View file

@ -13,6 +13,7 @@ use deno_ast::swc::visit::VisitMut;
use deno_ast::swc::visit::VisitWith as _;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned as _;
use deno_cache_dir::file_fetcher::File;
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier;
use regex::Regex;
@ -20,7 +21,7 @@ use std::collections::BTreeSet;
use std::fmt::Write as _;
use std::sync::Arc;
use crate::file_fetcher::File;
use crate::file_fetcher::TextDecodedFile;
use crate::util::path::mapped_specifier_for_tsc;
/// Extracts doc tests from a given file, transforms them into pseudo test
@ -52,7 +53,7 @@ fn extract_inner(
file: File,
wrap_kind: WrapKind,
) -> Result<Vec<File>, AnyError> {
let file = file.into_text_decoded()?;
let file = TextDecodedFile::decode(file)?;
let exports = match deno_ast::parse_program(deno_ast::ParseParams {
specifier: file.specifier.clone(),
@ -230,7 +231,7 @@ fn extract_files_from_regex_blocks(
.unwrap_or(file_specifier);
Some(File {
specifier: file_specifier,
url: file_specifier,
maybe_headers: None,
source: file_source.into_bytes().into(),
})
@ -558,7 +559,7 @@ fn generate_pseudo_file(
exports: &ExportCollector,
wrap_kind: WrapKind,
) -> Result<File, AnyError> {
let file = file.into_text_decoded()?;
let file = TextDecodedFile::decode(file)?;
let parsed = deno_ast::parse_program(deno_ast::ParseParams {
specifier: file.specifier.clone(),
@ -594,7 +595,7 @@ fn generate_pseudo_file(
log::debug!("{}:\n{}", file.specifier, source);
Ok(File {
specifier: file.specifier,
url: file.specifier,
maybe_headers: None,
source: source.into_bytes().into(),
})
@ -1199,14 +1200,14 @@ Deno.test("file:///main.ts$3-7.ts", async ()=>{
for test in tests {
let file = File {
specifier: ModuleSpecifier::parse(test.input.specifier).unwrap(),
url: ModuleSpecifier::parse(test.input.specifier).unwrap(),
maybe_headers: None,
source: test.input.source.as_bytes().into(),
};
let got_decoded = extract_doc_tests(file)
.unwrap()
.into_iter()
.map(|f| f.into_text_decoded().unwrap())
.map(|f| TextDecodedFile::decode(f).unwrap())
.collect::<Vec<_>>();
let expected = test
.expected
@ -1435,14 +1436,14 @@ add('1', '2');
for test in tests {
let file = File {
specifier: ModuleSpecifier::parse(test.input.specifier).unwrap(),
url: ModuleSpecifier::parse(test.input.specifier).unwrap(),
maybe_headers: None,
source: test.input.source.as_bytes().into(),
};
let got_decoded = extract_snippet_files(file)
.unwrap()
.into_iter()
.map(|f| f.into_text_decoded().unwrap())
.map(|f| TextDecodedFile::decode(f).unwrap())
.collect::<Vec<_>>();
let expected = test
.expected

View file

@ -2,7 +2,7 @@
[package]
name = "deno_broadcast_channel"
version = "0.176.0"
version = "0.177.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cache"
version = "0.114.0"
version = "0.115.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_canvas"
version = "0.51.0"
version = "0.52.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_console"
version = "0.182.0"
version = "0.183.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_cron"
version = "0.62.0"
version = "0.63.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_crypto"
version = "0.196.0"
version = "0.197.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_fetch"
version = "0.206.0"
version = "0.207.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_ffi"
version = "0.169.0"
version = "0.170.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_fs"
version = "0.92.0"
version = "0.93.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_http"
version = "0.180.0"
version = "0.181.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_io"
version = "0.92.0"
version = "0.93.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_kv"
version = "0.90.0"
version = "0.91.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_napi"
version = "0.113.0"
version = "0.114.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "napi_sym"
version = "0.112.0"
version = "0.113.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_net"
version = "0.174.0"
version = "0.175.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_node"
version = "0.119.0"
version = "0.120.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_telemetry"
version = "0.4.0"
version = "0.5.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_tls"
version = "0.169.0"
version = "0.170.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_url"
version = "0.182.0"
version = "0.183.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_web"
version = "0.213.0"
version = "0.214.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webgpu"
version = "0.149.0"
version = "0.150.0"
authors = ["the Deno authors"]
edition.workspace = true
license = "MIT"

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webidl"
version = "0.182.0"
version = "0.183.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_websocket"
version = "0.187.0"
version = "0.188.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_webstorage"
version = "0.177.0"
version = "0.178.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_resolver"
version = "0.14.0"
version = "0.15.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "node_resolver"
version = "0.21.0"
version = "0.22.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_npm_cache"
version = "0.2.0"
version = "0.3.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -9,6 +9,7 @@ use std::sync::Arc;
use anyhow::bail;
use anyhow::Context;
use anyhow::Error as AnyError;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
@ -90,6 +91,27 @@ pub enum NpmCacheSetting {
}
impl NpmCacheSetting {
pub fn from_cache_setting(cache_setting: &CacheSetting) -> NpmCacheSetting {
match cache_setting {
CacheSetting::Only => NpmCacheSetting::Only,
CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll,
CacheSetting::ReloadSome(values) => {
if values.iter().any(|v| v == "npm:") {
NpmCacheSetting::ReloadAll
} else {
NpmCacheSetting::ReloadSome {
npm_package_names: values
.iter()
.filter_map(|v| v.strip_prefix("npm:"))
.map(|n| n.to_string())
.collect(),
}
}
}
CacheSetting::RespectHeaders => panic!("not supported"),
CacheSetting::Use => NpmCacheSetting::Use,
}
}
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool {
match self {
NpmCacheSetting::ReloadAll => false,

View file

@ -2,7 +2,7 @@
[package]
name = "deno_runtime"
version = "0.191.0"
version = "0.192.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno_permissions"
version = "0.42.0"
version = "0.43.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -39,10 +39,11 @@ pub use prompter::PromptCallback;
pub use prompter::PromptResponse;
#[derive(Debug, thiserror::Error)]
#[error("Requires {access}, {}", format_permission_error(.name))]
pub struct PermissionDeniedError {
pub access: String,
pub name: &'static str,
pub enum PermissionDeniedError {
#[error("Requires {access}, {}", format_permission_error(.name))]
Retryable { access: String, name: &'static str },
#[error("Requires {access}, which cannot be granted in this environment")]
Fatal { access: String },
}
fn format_permission_error(name: &'static str) -> String {
@ -144,11 +145,11 @@ impl PermissionState {
)
}
fn error(
fn retryable_error(
name: &'static str,
info: impl FnOnce() -> Option<String>,
) -> PermissionDeniedError {
PermissionDeniedError {
PermissionDeniedError::Retryable {
access: Self::fmt_access(name, info),
name,
}
@ -201,10 +202,12 @@ impl PermissionState {
Self::log_perm_access(name, info);
(Ok(()), true, true)
}
PromptResponse::Deny => (Err(Self::error(name, info)), true, false),
PromptResponse::Deny => {
(Err(Self::retryable_error(name, info)), true, false)
}
}
}
_ => (Err(Self::error(name, info)), false, false),
_ => (Err(Self::retryable_error(name, info)), false, false),
}
}
}

View file

@ -43,7 +43,7 @@ fn conditionally_loads_type_graph() {
.new_command()
.args("bench --reload -L debug run/type_directives_js_main.js")
.run();
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
let output = context
.new_command()
.args("bench --reload -L debug --no-check run/type_directives_js_main.js")

View file

@ -107,5 +107,5 @@ fn loads_type_graph() {
.new_command()
.args("cache --reload -L debug run/type_directives_js_main.js")
.run();
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
}

View file

@ -922,7 +922,7 @@ fn type_directives_js_main() {
.new_command()
.args("run --reload -L debug --check run/type_directives_js_main.js")
.run();
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
let output = context
.new_command()
.args("run --reload -L debug run/type_directives_js_main.js")

View file

@ -111,7 +111,7 @@ fn conditionally_loads_type_graph() {
.new_command()
.args("test --reload -L debug run/type_directives_js_main.js")
.run();
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow_with_options - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch_no_follow - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
let output = context
.new_command()
.args("test --reload -L debug --no-check run/type_directives_js_main.js")

View file

@ -1,3 +1,6 @@
DANGER: TLS certificate validation is disabled for: deno.land
error: Import 'https://localhost:5545/subdir/mod2.ts' failed: error sending request for url (https://localhost:5545/subdir/mod2.ts): client error[WILDCARD]
at file:///[WILDCARD]/cafile_url_imports.ts:[WILDCARD]
error: Import 'https://localhost:5545/subdir/mod2.ts' failed.
0: error sending request for url (https://localhost:5545/subdir/mod2.ts): client error (Connect): invalid peer certificate: UnknownIssuer
1: client error (Connect)
2: invalid peer certificate: UnknownIssuer
at file:///[WILDLINE]/cafile_url_imports.ts:[WILDLINE]

View file

@ -19,6 +19,7 @@
"output": "jsx_import_source_dev.out"
},
"jsx_import_source_pragma_with_config_vendor_dir": {
"tempDir": true,
"args": "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock --vendor jsx_import_source_pragma.tsx",
"output": "jsx_import_source.out"
},

View file

@ -577,11 +577,6 @@ async fn main_server(
);
Ok(res)
}
(_, "/bad_redirect") => {
let mut res = Response::new(empty_body());
*res.status_mut() = StatusCode::FOUND;
Ok(res)
}
(_, "/server_error") => {
let mut res = Response::new(empty_body());
*res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;