1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

Merge branch 'main' into DENO_ROOT_INSTALL_Fix

This commit is contained in:
Gowtham K 2024-12-26 08:00:34 +05:30 committed by GitHub
commit c8a7aa0300
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2452 changed files with 58837 additions and 17317 deletions

View file

@ -35,7 +35,7 @@ jobs:
- name: Install deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
deno-version: v2.x
- name: Publish
env:

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 27;
const cacheVersion = 32;
const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
@ -59,6 +59,15 @@ const Runners = {
const prCacheKeyPrefix =
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
const prCacheKey = `${prCacheKeyPrefix}\${{ github.sha }}`;
const prCachePath = [
// this must match for save and restore (https://github.com/actions/cache/issues/1444)
"./target",
"!./target/*/gn_out",
"!./target/*/gn_root",
"!./target/*/*.zip",
"!./target/*/*.tar.gz",
].join("\n");
// Note that you may need to add more version to the `apt-get remove` line below if you change this
const llvmVersion = 19;
@ -196,7 +205,7 @@ const installNodeStep = {
const installDenoStep = {
name: "Install Deno",
uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" },
with: { "deno-version": "v2.x" },
};
const authenticateWithGoogleCloud = {
@ -475,6 +484,27 @@ const ci = {
" -czvf target/release/deno_src.tar.gz -C .. deno",
].join("\n"),
},
{
name: "Cache Cargo home",
uses: "actions/cache@v4",
with: {
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
path: [
"~/.cargo/.crates.toml",
"~/.cargo/.crates2.json",
"~/.cargo/bin",
"~/.cargo/registry/index",
"~/.cargo/registry/cache",
"~/.cargo/git/db",
].join("\n"),
key:
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
// We will try to restore from the closest cargo-home we can find
"restore-keys":
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-`,
},
},
installRustStep,
{
if:
@ -598,23 +628,6 @@ const ci = {
installBenchTools,
].join("\n"),
},
{
name: "Cache Cargo home",
uses: "actions/cache@v4",
with: {
// See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci
// Note that with the new sparse registry format, we no longer have to cache a `.git` dir
path: [
"~/.cargo/registry/index",
"~/.cargo/registry/cache",
].join("\n"),
key:
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ hashFiles('Cargo.lock') }}`,
// We will try to restore from the closest cargo-home we can find
"restore-keys":
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ matrix.arch }}`,
},
},
{
// Restore cache from the latest 'main' branch build.
name: "Restore cache build output (PR)",
@ -622,13 +635,7 @@ const ci = {
if:
"github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')",
with: {
path: [
"./target",
"!./target/*/gn_out",
"!./target/*/gn_root",
"!./target/*/*.zip",
"!./target/*/*.tar.gz",
].join("\n"),
path: prCachePath,
key: "never_saved",
"restore-keys": prCacheKeyPrefix,
},
@ -1080,14 +1087,8 @@ const ci = {
if:
"(matrix.job == 'test' || matrix.job == 'lint') && github.ref == 'refs/heads/main'",
with: {
path: [
"./target",
"!./target/*/gn_out",
"!./target/*/*.zip",
"!./target/*/*.sha256sum",
"!./target/*/*.tar.gz",
].join("\n"),
key: prCacheKeyPrefix + "${{ github.sha }}",
path: prCachePath,
key: prCacheKey,
},
},
]),

View file

@ -174,13 +174,26 @@ jobs:
mkdir -p target/release
tar --exclude=".git*" --exclude=target --exclude=third_party/prebuilt \
-czvf target/release/deno_src.tar.gz -C .. deno
- name: Cache Cargo home
uses: actions/cache@v4
with:
path: |-
~/.cargo/.crates.toml
~/.cargo/.crates2.json
~/.cargo/bin
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git/db
key: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '32-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)'
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
name: Install Deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
deno-version: v2.x
- name: Install Python
uses: actions/setup-python@v5
with:
@ -355,15 +368,6 @@ jobs:
- name: Install benchmark tools
if: '!(matrix.skip) && (matrix.job == ''bench'')'
run: ./tools/install_prebuilt.js wrk hyperfine
- name: Cache Cargo home
uses: actions/cache@v4
with:
path: |-
~/.cargo/registry/index
~/.cargo/registry/cache
key: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '27-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)'
- name: Restore cache build output (PR)
uses: actions/cache/restore@v4
if: '!(matrix.skip) && (github.ref != ''refs/heads/main'' && !startsWith(github.ref, ''refs/tags/''))'
@ -375,7 +379,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -682,10 +686,10 @@ jobs:
path: |-
./target
!./target/*/gn_out
!./target/*/gn_root
!./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz
key: '27-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '32-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

45
.github/workflows/npm_publish.yml vendored Normal file
View file

@ -0,0 +1,45 @@
name: npm_publish
on:
workflow_dispatch:
inputs:
version:
description: 'Version'
type: string
release:
types: [published]
permissions:
id-token: write
jobs:
build:
name: npm publish
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Configure git
run: |
git config --global core.symlinks true
git config --global fetch.parallel 32
- name: Clone repository
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install Deno
uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: '22.x'
registry-url: 'https://registry.npmjs.org'
- name: Publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: ./tools/release/npm/build.ts ${{ github.event.inputs.version }} --publish

View file

@ -42,7 +42,7 @@ jobs:
- name: Install deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
deno-version: v2.x
- name: Install rust-codesign
run: |-

View file

@ -36,7 +36,7 @@ jobs:
- name: Install deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
deno-version: v2.x
- name: Create Gist URL
env:

View file

@ -41,7 +41,7 @@ jobs:
- name: Install deno
uses: denoland/setup-deno@v2
with:
deno-version: v1.x
deno-version: v2.x
- name: Run version bump
run: |

965
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -21,6 +21,7 @@ members = [
"ext/napi/sym",
"ext/net",
"ext/node",
"ext/telemetry",
"ext/url",
"ext/web",
"ext/webgpu",
@ -29,6 +30,7 @@ members = [
"ext/webstorage",
"resolvers/deno",
"resolvers/node",
"resolvers/npm_cache",
"runtime",
"runtime/permissions",
"tests",
@ -45,20 +47,20 @@ license = "MIT"
repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.322.0" }
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
deno_core = { version = "0.327.0" }
deno_bench_util = { version = "0.173.0", path = "./bench_util" }
deno_config = { version = "=0.39.2", features = ["workspace", "sync"] }
deno_lockfile = "=0.23.1"
deno_bench_util = { version = "0.178.0", path = "./bench_util" }
deno_config = { version = "=0.41.0", features = ["workspace", "sync"] }
deno_lockfile = "=0.24.0"
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.4"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.39.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.188.0", path = "./runtime" }
deno_semver = "=0.5.16"
deno_npm = "=0.27.0"
deno_path_util = "=0.2.2"
deno_permissions = { version = "0.43.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.192.0", path = "./runtime" }
deno_semver = "=0.7.1"
deno_terminal = "0.2.0"
napi_sym = { version = "0.109.0", path = "./ext/napi/sym" }
napi_sym = { version = "0.114.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.4"
@ -67,32 +69,34 @@ denokv_remote = "0.8.4"
denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts
deno_broadcast_channel = { version = "0.173.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.111.0", path = "./ext/cache" }
deno_canvas = { version = "0.48.0", path = "./ext/canvas" }
deno_console = { version = "0.179.0", path = "./ext/console" }
deno_cron = { version = "0.59.0", path = "./ext/cron" }
deno_crypto = { version = "0.193.0", path = "./ext/crypto" }
deno_fetch = { version = "0.203.0", path = "./ext/fetch" }
deno_ffi = { version = "0.166.0", path = "./ext/ffi" }
deno_fs = { version = "0.89.0", path = "./ext/fs" }
deno_http = { version = "0.177.0", path = "./ext/http" }
deno_io = { version = "0.89.0", path = "./ext/io" }
deno_kv = { version = "0.87.0", path = "./ext/kv" }
deno_napi = { version = "0.110.0", path = "./ext/napi" }
deno_net = { version = "0.171.0", path = "./ext/net" }
deno_node = { version = "0.116.0", path = "./ext/node" }
deno_tls = { version = "0.166.0", path = "./ext/tls" }
deno_url = { version = "0.179.0", path = "./ext/url" }
deno_web = { version = "0.210.0", path = "./ext/web" }
deno_webgpu = { version = "0.146.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.179.0", path = "./ext/webidl" }
deno_websocket = { version = "0.184.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.174.0", path = "./ext/webstorage" }
deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.116.0", path = "./ext/cache" }
deno_canvas = { version = "0.53.0", path = "./ext/canvas" }
deno_console = { version = "0.184.0", path = "./ext/console" }
deno_cron = { version = "0.64.0", path = "./ext/cron" }
deno_crypto = { version = "0.198.0", path = "./ext/crypto" }
deno_fetch = { version = "0.208.0", path = "./ext/fetch" }
deno_ffi = { version = "0.171.0", path = "./ext/ffi" }
deno_fs = { version = "0.94.0", path = "./ext/fs" }
deno_http = { version = "0.182.0", path = "./ext/http" }
deno_io = { version = "0.94.0", path = "./ext/io" }
deno_kv = { version = "0.92.0", path = "./ext/kv" }
deno_napi = { version = "0.115.0", path = "./ext/napi" }
deno_net = { version = "0.176.0", path = "./ext/net" }
deno_node = { version = "0.122.0", path = "./ext/node" }
deno_telemetry = { version = "0.6.0", path = "./ext/telemetry" }
deno_tls = { version = "0.171.0", path = "./ext/tls" }
deno_url = { version = "0.184.0", path = "./ext/url" }
deno_web = { version = "0.215.0", path = "./ext/web" }
deno_webgpu = { version = "0.151.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.184.0", path = "./ext/webidl" }
deno_websocket = { version = "0.189.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.179.0", path = "./ext/webstorage" }
# resolvers
deno_resolver = { version = "0.11.0", path = "./resolvers/deno" }
node_resolver = { version = "0.18.0", path = "./resolvers/node" }
deno_npm_cache = { version = "0.3.0", path = "./resolvers/npm_cache" }
deno_resolver = { version = "0.15.0", path = "./resolvers/deno" }
node_resolver = { version = "0.22.0", path = "./resolvers/node" }
aes = "=0.8.3"
anyhow = "1.0.57"
@ -100,10 +104,11 @@ async-trait = "0.1.73"
base32 = "=0.5.1"
base64 = "0.21.7"
bencher = "0.1"
boxed_error = "0.2.2"
boxed_error = "0.2.3"
brotli = "6.0.0"
bytes = "1.4.0"
cache_control = "=0.2.0"
capacity_builder = "0.5.0"
cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now()
@ -112,9 +117,11 @@ color-print = "0.3.5"
console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3"
data-url = "=0.3.0"
deno_cache_dir = "=0.13.2"
deno_package_json = { version = "0.1.2", default-features = false }
data-url = "=0.3.1"
deno_cache_dir = "=0.15.0"
deno_error = "=0.5.2"
deno_package_json = { version = "0.3.0", default-features = false }
deno_unsync = "0.4.2"
dlopen2 = "0.6.1"
ecb = "=0.1.2"
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem", "jwk"] }
@ -128,7 +135,7 @@ fs3 = "0.5.0"
futures = "0.3.21"
glob = "0.3.1"
h2 = "0.4.4"
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] }
http = "1.0"
http-body = "1.0"
http-body-util = "0.1.2"
@ -136,13 +143,13 @@ http_v02 = { package = "http", version = "0.2.9" }
httparse = "1.8.0"
hyper = { version = "1.4.1", features = ["full"] }
hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] }
hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3"
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3"
libc = "0.2.126"
libc = "0.2.168"
libz-sys = { version = "1.1.20", default-features = false }
log = { version = "0.4.20", features = ["kv"] }
lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases
@ -189,13 +196,13 @@ spki = "0.7.2"
tar = "=0.4.40"
tempfile = "3.4.0"
termcolor = "1.1.3"
thiserror = "1.0.61"
thiserror = "2.0.3"
tokio = { version = "1.36.0", features = ["full"] }
tokio-metrics = { version = "0.3.0", features = ["rt"] }
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
tokio-socks = "0.5.1"
tokio-util = "0.7.4"
tower = { version = "0.4.13", default-features = false, features = ["util"] }
tower = { version = "0.5.2", default-features = false, features = ["retry", "util"] }
tower-http = { version = "0.6.1", features = ["decompression-br", "decompression-gzip"] }
tower-lsp = { package = "deno_tower_lsp", version = "0.1.0", features = ["proposed"] }
tower-service = "0.3.2"
@ -234,7 +241,7 @@ nix = "=0.27.1"
# windows deps
junction = "=0.2.0"
winapi = "=0.3.9"
windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel"] }
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Media", "Win32_Storage_FileSystem", "Win32_System_IO", "Win32_System_WindowsProgramming", "Wdk", "Wdk_System", "Wdk_System_SystemInformation", "Win32_Security", "Win32_System_Pipes", "Wdk_Storage_FileSystem", "Win32_System_Registry", "Win32_System_Kernel", "Win32_System_Threading", "Win32_UI", "Win32_UI_Shell"] }
winres = "=0.1.12"
[profile.release]

View file

@ -6,6 +6,90 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at:
https://github.com/denoland/deno_install
### 2.1.4 / 2024.12.11
- feat(unstable): support caching npm dependencies only as they're needed
(#27300)
- fix(compile): correct read length for transpiled typescript files (#27301)
- fix(ext/node): accept file descriptor in fs.readFile(Sync) (#27252)
- fix(ext/node): handle Float16Array in node:v8 module (#27285)
- fix(lint): do not error providing --allow-import (#27321)
- fix(node): update list of builtin node modules, add missing export to
_http_common (#27294)
- fix(outdated): error when there are no config files (#27306)
- fix(outdated): respect --quiet flag for hints (#27317)
- fix(outdated): show a suggestion for updating (#27304)
- fix(task): do not always kill child on ctrl+c on windows (#27269)
- fix(unstable): don't unwrap optional state in otel (#27292)
- fix: do not error when subpath has an @ symbol (#27290)
- fix: do not panic when fetching invalid file url on Windows (#27259)
- fix: replace the @deno-types with @ts-types (#27310)
- perf(compile): improve FileBackedVfsFile (#27299)
### 2.1.3 / 2024.12.05
- feat(unstable): add metrics to otel (#27143)
- fix(fmt): stable formatting of HTML files with JS (#27164)
- fix(install): use locked version of jsr package when fetching exports (#27237)
- fix(node/fs): support `recursive` option in readdir (#27179)
- fix(node/worker_threads): data url not encoded properly with eval (#27184)
- fix(outdated): allow `--latest` without `--update` (#27227)
- fix(task): `--recursive` option not working (#27183)
- fix(task): don't panic with filter on missing task argument (#27180)
- fix(task): forward signals to spawned sub-processes on unix (#27141)
- fix(task): kill descendants when killing task process on Windows (#27163)
- fix(task): only pass args to root task (#27213)
- fix(unstable): otel context with multiple keys (#27230)
- fix(unstable/temporal): respect locale in `Duration.prototype.toLocaleString`
(#27000)
- fix: clear dep analysis when module loading is done (#27204)
- fix: improve auto-imports for npm packages (#27224)
- fix: support `workspace:^` and `workspace:~` version constraints (#27096)
### 2.1.2 / 2024.11.28
- feat(unstable): Instrument Deno.serve (#26964)
- feat(unstable): Instrument fetch (#27057)
- feat(unstable): repurpose `--unstable-detect-cjs` to attempt loading more
modules as cjs (#27094)
- fix(check): support jsdoc `@import` tag (#26991)
- fix(compile): correct buffered reading of assets and files (#27008)
- fix(compile): do not error embedding same symlink via multiple methods
(#27015)
- fix(compile): handle TypeScript file included as asset (#27032)
- fix(ext/fetch): don't throw when `bodyUsed` inspect after upgrade (#27088)
- fix(ext/node): `tls.connect` socket upgrades (#27125)
- fix(ext/node): add `fs.promises.fstat` and `FileHandle#stat` (#26719)
- fix(ext/webgpu): normalize limits to number (#27072)
- fix(ext/webgpu): use correct variable name (#27108)
- fix(ext/websocket): don't throw exception when sending to closed socket
(#26932)
- fix(fmt): return `None` if sql fmt result is the same (#27014)
- fix(info): resolve bare specifier pointing to workspace member (#27020)
- fix(init): always force managed node modules (#27047)
- fix(init): support scoped npm packages (#27128)
- fix(install): don't re-set up node_modules if running lifecycle script
(#26984)
- fix(lsp): remove stray debug output (#27010)
- fix(lsp): support task object notation for tasks request (#27076)
- fix(lsp): wasm file import completions (#27018)
- fix(node): correct resolution of dynamic import of esm from cjs (#27071)
- fix(node/fs): add missing stat path argument validation (#27086)
- fix(node/fs): missing uv error context for readFile (#27011)
- fix(node/http): casing ignored in ServerResponse.hasHeader() (#27105)
- fix(node/timers): error when passing id to clearTimeout/clearInterval (#27130)
- fix(runtime/ops): Fix watchfs remove event (#27041)
- fix(streams): reject `string` in `ReadableStream.from` type (#25116)
- fix(task): handle carriage return in task description (#27099)
- fix(task): handle multiline descriptions properly (#27069)
- fix(task): strip ansi codes and control chars when printing tasks (#27100)
- fix(tools/doc): HTML resolve main entrypoint from config file (#27103)
- fix: support bun specifiers in JSR publish (#24588)
- fix: support non-function exports in Wasm modules (#26992)
- perf(compile): read embedded files as static references when UTF-8 and reading
as strings (#27033)
- perf(ext/webstorage): use object wrap for `Storage` (#26931)
### 2.1.1 / 2024.11.21
- docs(add): clarification to add command (#26968)

View file

@ -2,7 +2,7 @@
[package]
name = "deno_bench_util"
version = "0.173.0"
version = "0.178.0"
authors.workspace = true
edition.workspace = true
license.workspace = true

View file

@ -2,7 +2,7 @@
[package]
name = "deno"
version = "2.1.1"
version = "2.1.4"
authors.workspace = true
default-run = "deno"
edition.workspace = true
@ -72,17 +72,20 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
deno_cache_dir.workspace = true
deno_config.workspace = true
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.160.0", features = ["rust", "comrak"] }
deno_graph = { version = "=0.85.0" }
deno_lint = { version = "=0.68.0", features = ["docs"] }
deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] }
deno_error.workspace = true
deno_graph = { version = "=0.86.5" }
deno_lint = { version = "=0.68.2", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm.workspace = true
deno_npm_cache.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_semver.workspace = true
deno_task_shell = "=0.18.1"
deno_task_shell = "=0.20.2"
deno_telemetry.workspace = true
deno_terminal.workspace = true
libsui = "0.5.0"
node_resolver.workspace = true
@ -91,8 +94,10 @@ anstream = "0.6.14"
async-trait.workspace = true
base64.workspace = true
bincode = "=1.3.3"
boxed_error.workspace = true
bytes.workspace = true
cache_control.workspace = true
capacity_builder.workspace = true
chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
clap_complete = "=4.5.24"
@ -107,7 +112,7 @@ dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.2"
dprint-plugin-typescript = "=0.93.3"
env_logger = "=0.10.0"
fancy-regex = "=0.10.0"
faster-hex.workspace = true
@ -129,7 +134,7 @@ libz-sys.workspace = true
log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true
malva = "=0.11.0"
markup_fmt = "=0.16.0"
markup_fmt = "=0.18.0"
memmem.workspace = true
monch.workspace = true
notify.workspace = true
@ -151,8 +156,7 @@ serde_repr.workspace = true
sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] }
# NOTE(bartlomieju): using temporary fork for now, revert back to `sqlformat-rs` later
sqlformat = { package = "deno_sqlformat", version = "0.3.2" }
sqlformat = "=0.3.2"
strsim = "0.11.1"
tar.workspace = true
tempfile.workspace = true

View file

@ -18,7 +18,7 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
fn read_to_string_lossy(
&self,
path: &std::path::Path,
) -> Result<String, std::io::Error> {
) -> Result<std::borrow::Cow<'static, str>, std::io::Error> {
self
.0
.read_text_file_lossy_sync(path, None)
@ -64,6 +64,15 @@ impl<'a> deno_config::fs::DenoConfigFs for DenoConfigFsAdapter<'a> {
}
}
pub fn import_map_deps(
import_map: &serde_json::Value,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(import_map.get("imports"))
.into_iter()
.chain(scope_values(import_map.get("scopes")));
values_to_set(values)
}
pub fn deno_json_deps(
config: &deno_config::deno_json::ConfigFile,
) -> HashSet<JsrDepPackageReq> {

View file

@ -36,7 +36,8 @@ use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::deno_permissions::SysDescriptor;
use deno_runtime::ops::otel::OtelConfig;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelConsoleConfig;
use log::debug;
use log::Level;
use serde::Deserialize;
@ -245,7 +246,7 @@ pub struct InstallFlagsGlobal {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum InstallKind {
pub enum InstallFlags {
Local(InstallFlagsLocal),
Global(InstallFlagsGlobal),
}
@ -257,11 +258,6 @@ pub enum InstallFlagsLocal {
Entrypoints(Vec<String>),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct InstallFlags {
pub kind: InstallKind,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct JSONReferenceFlags {
pub json: deno_core::serde_json::Value,
@ -598,7 +594,9 @@ pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub npm_lazy_caching: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
@ -989,21 +987,41 @@ impl Flags {
args
}
pub fn otel_config(&self) -> Option<OtelConfig> {
if self
pub fn otel_config(&self) -> OtelConfig {
let has_unstable_flag = self
.unstable_config
.features
.contains(&String::from("otel"))
{
Some(OtelConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
.is_ok(),
..Default::default()
})
.contains(&String::from("otel"));
let otel_var = |name| match std::env::var(name) {
Ok(s) if s.to_lowercase() == "true" => Some(true),
Ok(s) if s.to_lowercase() == "false" => Some(false),
_ => None,
};
let disabled =
!has_unstable_flag || otel_var("OTEL_SDK_DISABLED").unwrap_or(false);
let default = !disabled && otel_var("OTEL_DENO").unwrap_or(false);
OtelConfig {
tracing_enabled: !disabled
&& otel_var("OTEL_DENO_TRACING").unwrap_or(default),
console: match std::env::var("OTEL_DENO_CONSOLE").as_deref() {
Ok(_) if disabled => OtelConsoleConfig::Ignore,
Ok("ignore") => OtelConsoleConfig::Ignore,
Ok("capture") => OtelConsoleConfig::Capture,
Ok("replace") => OtelConsoleConfig::Replace,
_ => {
if default {
OtelConsoleConfig::Capture
} else {
None
OtelConsoleConfig::Ignore
}
}
},
deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC")
.as_deref()
== Ok("1"),
}
}
@ -2663,10 +2681,10 @@ Display outdated dependencies:
<p(245)>deno outdated</>
<p(245)>deno outdated --compatible</>
Update dependencies:
Update dependencies to latest semver compatible versions:
<p(245)>deno outdated --update</>
Update dependencies to latest versions, ignoring semver requirements:
<p(245)>deno outdated --update --latest</>
<p(245)>deno outdated --update</>
Filters can be used to select which packages to act on. Filters can include wildcards (*) to match multiple packages.
<p(245)>deno outdated --update --latest \"@std/*\"</>
@ -2702,7 +2720,6 @@ Specific version requirements to update to can be specified:
.help(
"Update to the latest version, regardless of semver constraints",
)
.requires("update")
.conflicts_with("compatible"),
)
.arg(
@ -2904,6 +2921,7 @@ To ignore linting on an entire file, you can add an ignore comment at the top of
.arg(watch_arg(false))
.arg(watch_exclude_arg())
.arg(no_clear_screen_arg())
.arg(allow_import_arg())
})
}
@ -4373,7 +4391,7 @@ impl CommandExt for Command {
).arg(
Arg::new("unstable-detect-cjs")
.long("unstable-detect-cjs")
.help("Reads the package.json type field in a project to treat .js files as .cjs")
.help("Treats ambiguous .js, .jsx, .ts, .tsx files as CommonJS modules in more cases")
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
@ -4406,6 +4424,16 @@ impl CommandExt for Command {
})
.help_heading(UNSTABLE_HEADING)
.display_order(next_display_order())
).arg(
Arg::new("unstable-npm-lazy-caching")
.long("unstable-npm-lazy-caching")
.help("Enable unstable lazy caching of npm dependencies, downloading them only as needed (disabled: all npm packages in package.json are installed on startup; enabled: only npm packages that are actually referenced in an import are installed")
.env("DENO_UNSTABLE_NPM_LAZY_CACHING")
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.help_heading(UNSTABLE_HEADING)
.display_order(next_display_order()),
);
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
@ -4919,15 +4947,14 @@ fn install_parse(
let module_url = cmd_values.next().unwrap();
let args = cmd_values.collect();
flags.subcommand = DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(InstallFlagsGlobal {
flags.subcommand =
DenoSubcommand::Install(InstallFlags::Global(InstallFlagsGlobal {
name,
module_url,
args,
root,
force,
}),
});
}));
return Ok(());
}
@ -4936,22 +4963,19 @@ fn install_parse(
allow_scripts_arg_parse(flags, matches)?;
if matches.get_flag("entrypoint") {
let entrypoints = matches.remove_many::<String>("cmd").unwrap_or_default();
flags.subcommand = DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(InstallFlagsLocal::Entrypoints(
entrypoints.collect(),
)),
});
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
InstallFlagsLocal::Entrypoints(entrypoints.collect()),
));
} else if let Some(add_files) = matches
.remove_many("cmd")
.map(|packages| add_parse_inner(matches, Some(packages)))
{
flags.subcommand = DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(InstallFlagsLocal::Add(add_files)),
})
flags.subcommand = DenoSubcommand::Install(InstallFlags::Local(
InstallFlagsLocal::Add(add_files),
))
} else {
flags.subcommand = DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(InstallFlagsLocal::TopLevel),
});
flags.subcommand =
DenoSubcommand::Install(InstallFlags::Local(InstallFlagsLocal::TopLevel));
}
Ok(())
}
@ -5083,6 +5107,7 @@ fn lint_parse(
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
ext_arg_parse(flags, matches);
config_args_parse(flags, matches);
allow_import_parse(flags, matches);
let files = match matches.remove_many::<String>("files") {
Some(f) => f.collect(),
@ -5277,8 +5302,15 @@ fn task_parse(
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
node_modules_arg_parse(flags, matches);
let filter = matches.remove_one::<String>("filter");
let recursive = matches.get_flag("recursive") || filter.is_some();
let mut recursive = matches.get_flag("recursive");
let filter = if let Some(filter) = matches.remove_one::<String>("filter") {
recursive = false;
Some(filter)
} else if recursive {
Some("*".to_string())
} else {
None
};
let mut task_flags = TaskFlags {
cwd: matches.remove_one::<String>("cwd"),
@ -5986,8 +6018,11 @@ fn unstable_args_parse(
flags.unstable_config.bare_node_builtins =
matches.get_flag("unstable-bare-node-builtins");
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
flags.unstable_config.sloppy_imports =
matches.get_flag("unstable-sloppy-imports");
flags.unstable_config.npm_lazy_caching =
matches.get_flag("unstable-npm-lazy-caching");
if matches!(cfg, UnstableArgsConfig::ResolutionAndRuntime) {
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS {
@ -7133,6 +7168,7 @@ mod tests {
let r = flags_from_vec(svec![
"deno",
"lint",
"--allow-import",
"--watch",
"script_1.ts",
"script_2.ts"
@ -7154,6 +7190,10 @@ mod tests {
compact: false,
watch: Some(Default::default()),
}),
permissions: PermissionFlags {
allow_import: Some(vec![]),
..Default::default()
},
..Flags::default()
}
);
@ -8591,15 +8631,15 @@ mod tests {
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(InstallFlagsGlobal {
subcommand: DenoSubcommand::Install(InstallFlags::Global(
InstallFlagsGlobal {
name: None,
module_url: "jsr:@std/http/file-server".to_string(),
args: vec![],
root: None,
force: false,
}),
}),
}
),),
..Flags::default()
}
);
@ -8613,15 +8653,15 @@ mod tests {
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(InstallFlagsGlobal {
subcommand: DenoSubcommand::Install(InstallFlags::Global(
InstallFlagsGlobal {
name: None,
module_url: "jsr:@std/http/file-server".to_string(),
args: vec![],
root: None,
force: false,
}),
}),
}
),),
..Flags::default()
}
);
@ -8634,15 +8674,15 @@ mod tests {
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(InstallFlagsGlobal {
subcommand: DenoSubcommand::Install(InstallFlags::Global(
InstallFlagsGlobal {
name: Some("file_server".to_string()),
module_url: "jsr:@std/http/file-server".to_string(),
args: svec!["foo", "bar"],
root: Some("/foo".to_string()),
force: true,
}),
}),
}
),),
import_map_path: Some("import_map.json".to_string()),
no_remote: true,
config_flag: ConfigFlag::Path("tsconfig.json".to_owned()),
@ -10537,7 +10577,7 @@ mod tests {
cwd: None,
task: Some("build".to_string()),
is_run: false,
recursive: true,
recursive: false,
filter: Some("*".to_string()),
eval: false,
}),
@ -10554,7 +10594,7 @@ mod tests {
task: Some("build".to_string()),
is_run: false,
recursive: true,
filter: None,
filter: Some("*".to_string()),
eval: false,
}),
..Flags::default()
@ -10570,7 +10610,7 @@ mod tests {
task: Some("build".to_string()),
is_run: false,
recursive: true,
filter: None,
filter: Some("*".to_string()),
eval: false,
}),
..Flags::default()
@ -11196,9 +11236,9 @@ mod tests {
..Flags::default()
},
"install" => Flags {
subcommand: DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(InstallFlagsLocal::Add(flags)),
}),
subcommand: DenoSubcommand::Install(InstallFlags::Local(
InstallFlagsLocal::Add(flags),
)),
..Flags::default()
},
_ => unreachable!(),
@ -11685,6 +11725,14 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
recursive: false,
},
),
(
svec!["--latest"],
OutdatedFlags {
filters: svec![],
kind: OutdatedKind::PrintOutdated { compatible: false },
recursive: false,
},
),
];
for (input, expected) in cases {
let mut args = svec!["deno", "outdated"];

View file

@ -4,21 +4,21 @@ use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
pub async fn resolve_import_map_value_from_specifier(
specifier: &Url,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
) -> Result<serde_json::Value, AnyError> {
if specifier.scheme() == "data" {
let data_url_text =
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
Ok(serde_json::from_str(&data_url_text)?)
} else {
let file = file_fetcher
.fetch_bypass_permissions(specifier)
.await?
.into_text_decoded()?;
let file = TextDecodedFile::decode(
file_fetcher.fetch_bypass_permissions(specifier).await?,
)?;
Ok(serde_json::from_str(&file.source)?)
}
}

View file

@ -9,18 +9,19 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::serde_json;
use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq;
use crate::args::deno_json::import_map_deps;
use crate::cache;
use crate::util::fs::atomic_write_file_with_retries;
use crate::Flags;
use crate::args::DenoSubcommand;
use crate::args::InstallFlags;
use crate::args::InstallKind;
use deno_lockfile::Lockfile;
@ -102,6 +103,7 @@ impl CliLockfile {
pub fn discover(
flags: &Flags,
workspace: &Workspace,
maybe_external_import_map: Option<&serde_json::Value>,
) -> Result<Option<CliLockfile>, AnyError> {
fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>,
@ -109,9 +111,12 @@ impl CliLockfile {
let Some(pkg_json) = maybe_pkg_json else {
return Default::default();
};
pkg_json
.resolve_local_package_json_deps()
let deps = pkg_json.resolve_local_package_json_deps();
deps
.dependencies
.values()
.chain(deps.dev_dependencies.values())
.filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => {
@ -133,10 +138,8 @@ impl CliLockfile {
if flags.no_lock
|| matches!(
flags.subcommand,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(..),
..
}) | DenoSubcommand::Uninstall(_)
DenoSubcommand::Install(InstallFlags::Global(..))
| DenoSubcommand::Uninstall(_)
)
{
return Ok(None);
@ -171,7 +174,11 @@ impl CliLockfile {
let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
dependencies: if let Some(map) = maybe_external_import_map {
import_map_deps(map)
} else {
deno_json_deps(root_folder.deno_json.as_deref())
},
},
members: workspace
.config_folders()

View file

@ -9,6 +9,7 @@ mod package_json;
use deno_ast::MediaType;
use deno_ast::SourceMapOption;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions;
use deno_config::workspace::FolderConfigs;
@ -23,13 +24,16 @@ use deno_config::workspace::WorkspaceLintConfig;
use deno_config::workspace::WorkspaceResolver;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path;
use deno_runtime::ops::otel::OtelConfig;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::StackString;
use deno_telemetry::OtelConfig;
use deno_telemetry::OtelRuntimeConfig;
use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::deno_json::BenchConfig;
@ -83,7 +87,7 @@ use thiserror::Error;
use crate::cache;
use crate::cache::DenoDirProvider;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version;
@ -215,47 +219,6 @@ pub fn ts_config_to_transpile_and_emit_options(
))
}
/// Indicates how cached source files should be handled.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum CacheSetting {
/// Only the cached files should be used. Any files not in the cache will
/// error. This is the equivalent of `--cached-only` in the CLI.
Only,
/// No cached source files should be used, and all files should be reloaded.
/// This is the equivalent of `--reload` in the CLI.
ReloadAll,
/// Only some cached resources should be used. This is the equivalent of
/// `--reload=jsr:@std/http/file-server` or
/// `--reload=jsr:@std/http/file-server,jsr:@std/assert/assert-equals`.
ReloadSome(Vec<String>),
/// The usability of a cached value is determined by analyzing the cached
/// headers and other metadata associated with a cached response, reloading
/// any cached "non-fresh" cached responses.
RespectHeaders,
/// The cached source files should be used for local modules. This is the
/// default behavior of the CLI.
Use,
}
impl CacheSetting {
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool {
match self {
CacheSetting::ReloadAll => false,
CacheSetting::ReloadSome(list) => {
if list.iter().any(|i| i == "npm:") {
return false;
}
let specifier = format!("npm:{package_name}");
if list.contains(&specifier) {
return false;
}
true
}
_ => true,
}
}
}
pub struct WorkspaceBenchOptions {
pub filter: Option<String>,
pub json: bool,
@ -801,6 +764,7 @@ pub struct CliOptions {
maybe_node_modules_folder: Option<PathBuf>,
npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>,
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
overrides: CliOptionOverrides,
pub start_dir: Arc<WorkspaceDirectory>,
pub deno_dir_provider: Arc<DenoDirProvider>,
@ -814,6 +778,7 @@ impl CliOptions {
npmrc: Arc<ResolvedNpmRc>,
start_dir: Arc<WorkspaceDirectory>,
force_global_cache: bool,
maybe_external_import_map: Option<(PathBuf, serde_json::Value)>,
) -> Result<Self, AnyError> {
if let Some(insecure_allowlist) =
flags.unsafely_ignore_certificate_errors.as_ref()
@ -851,6 +816,7 @@ impl CliOptions {
maybe_node_modules_folder,
overrides: Default::default(),
main_module_cell: std::sync::OnceLock::new(),
maybe_external_import_map,
start_dir,
deno_dir_provider,
})
@ -926,7 +892,33 @@ impl CliOptions {
let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?;
let maybe_lock_file = CliLockfile::discover(&flags, &start_dir.workspace)?;
fn load_external_import_map(
deno_json: &ConfigFile,
) -> Result<Option<(PathBuf, serde_json::Value)>, AnyError> {
if !deno_json.is_an_import_map() {
if let Some(path) = deno_json.to_import_map_path()? {
let contents = std::fs::read_to_string(&path).with_context(|| {
format!("Unable to read import map at '{}'", path.display())
})?;
let map = serde_json::from_str(&contents)?;
return Ok(Some((path, map)));
}
}
Ok(None)
}
let external_import_map =
if let Some(deno_json) = start_dir.workspace.root_deno_json() {
load_external_import_map(deno_json)?
} else {
None
};
let maybe_lock_file = CliLockfile::discover(
&flags,
&start_dir.workspace,
external_import_map.as_ref().map(|(_, v)| v),
)?;
log::debug!("Finished config loading.");
@ -937,6 +929,7 @@ impl CliOptions {
npmrc,
Arc::new(start_dir),
false,
external_import_map,
)
}
@ -964,9 +957,7 @@ impl CliOptions {
match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(_),
}) => GraphKind::All,
DenoSubcommand::Install(InstallFlags::Local(_)) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(),
}
}
@ -1002,24 +993,24 @@ impl CliOptions {
// https://nodejs.org/api/process.html
match target.as_str() {
"aarch64-apple-darwin" => NpmSystemInfo {
os: "darwin".to_string(),
cpu: "arm64".to_string(),
os: "darwin".into(),
cpu: "arm64".into(),
},
"aarch64-unknown-linux-gnu" => NpmSystemInfo {
os: "linux".to_string(),
cpu: "arm64".to_string(),
os: "linux".into(),
cpu: "arm64".into(),
},
"x86_64-apple-darwin" => NpmSystemInfo {
os: "darwin".to_string(),
cpu: "x64".to_string(),
os: "darwin".into(),
cpu: "x64".into(),
},
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
os: "linux".to_string(),
cpu: "x64".to_string(),
os: "linux".into(),
cpu: "x64".into(),
},
"x86_64-pc-windows-msvc" => NpmSystemInfo {
os: "win32".to_string(),
cpu: "x64".to_string(),
os: "win32".into(),
cpu: "x64".into(),
},
value => {
log::warn!(
@ -1056,10 +1047,10 @@ impl CliOptions {
pub async fn create_workspace_resolver(
&self,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
pkg_json_dep_resolution: PackageJsonDepResolution,
) -> Result<WorkspaceResolver, AnyError> {
let overrode_no_import_map = self
let overrode_no_import_map: bool = self
.overrides
.import_map_specifier
.as_ref()
@ -1087,7 +1078,19 @@ impl CliOptions {
value,
})
}
None => None,
None => {
if let Some((path, import_map)) =
self.maybe_external_import_map.as_ref()
{
let path_url = deno_path_util::url_from_file_path(path)?;
Some(deno_config::workspace::SpecifiedImportMap {
base_url: path_url,
value: import_map.clone(),
})
} else {
None
}
}
}
};
Ok(self.workspace().create_resolver(
@ -1126,7 +1129,7 @@ impl CliOptions {
}
}
pub fn otel_config(&self) -> Option<OtelConfig> {
pub fn otel_config(&self) -> OtelConfig {
self.flags.otel_config()
}
@ -1350,9 +1353,7 @@ impl CliOptions {
Ok(result)
}
pub fn resolve_deno_lint_config(
&self,
) -> Result<deno_lint::linter::LintConfig, AnyError> {
pub fn resolve_deno_lint_config(&self) -> Result<DenoLintConfig, AnyError> {
let ts_config_result =
self.resolve_ts_config_for_emit(TsConfigType::Emit)?;
@ -1361,11 +1362,11 @@ impl CliOptions {
ts_config_result.ts_config,
)?;
Ok(deno_lint::linter::LintConfig {
Ok(DenoLintConfig {
default_jsx_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_factory.clone()),
.then_some(transpile_options.jsx_factory),
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
.then(|| transpile_options.jsx_fragment_factory.clone()),
.then_some(transpile_options.jsx_fragment_factory),
})
}
@ -1543,11 +1544,11 @@ impl CliOptions {
DenoSubcommand::Check(check_flags) => {
Some(files_to_urls(&check_flags.files))
}
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Global(flags),
}) => Url::parse(&flags.module_url)
DenoSubcommand::Install(InstallFlags::Global(flags)) => {
Url::parse(&flags.module_url)
.ok()
.map(|url| vec![Cow::Owned(url)]),
.map(|url| vec![Cow::Owned(url)])
}
DenoSubcommand::Doc(DocFlags {
source_files: DocSourceFileFlag::Paths(paths),
..
@ -1606,6 +1607,11 @@ impl CliOptions {
|| self.workspace().has_unstable("bare-node-builtins")
}
pub fn unstable_detect_cjs(&self) -> bool {
self.flags.unstable_config.detect_cjs
|| self.workspace().has_unstable("detect-cjs")
}
pub fn detect_cjs(&self) -> bool {
// only enabled when there's a package.json in order to not have a
// perf penalty for non-npm Deno projects of searching for the closest
@ -1675,8 +1681,10 @@ impl CliOptions {
"sloppy-imports",
"byonm",
"bare-node-builtins",
"detect-cjs",
"fmt-component",
"fmt-sql",
"lazy-npm-caching",
])
.collect();
@ -1755,6 +1763,19 @@ impl CliOptions {
),
}
}
pub fn unstable_npm_lazy_caching(&self) -> bool {
self.flags.unstable_config.npm_lazy_caching
|| self.workspace().has_unstable("npm-lazy-caching")
}
pub fn default_npm_caching_strategy(&self) -> NpmCachingStrategy {
if self.flags.unstable_config.npm_lazy_caching {
NpmCachingStrategy::Lazy
} else {
NpmCachingStrategy::Eager
}
}
}
/// Resolves the path to use for a local node_modules folder.
@ -1926,15 +1947,17 @@ pub fn has_flag_env_var(name: &str) -> bool {
pub fn npm_pkg_req_ref_to_binary_command(
req_ref: &NpmPackageReqReference,
) -> String {
let binary_name = req_ref.sub_path().unwrap_or(req_ref.req().name.as_str());
binary_name.to_string()
req_ref
.sub_path()
.map(|s| s.to_string())
.unwrap_or_else(|| req_ref.req().name.to_string())
}
pub fn config_to_deno_graph_workspace_member(
config: &ConfigFile,
) -> Result<deno_graph::WorkspaceMember, AnyError> {
let name = match &config.json.name {
Some(name) => name.clone(),
let name: StackString = match &config.json.name {
Some(name) => name.as_str().into(),
None => bail!("Missing 'name' field in config file."),
};
let version = match &config.json.version {
@ -1969,6 +1992,20 @@ fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) {
}
}
#[derive(Debug, Clone, Copy)]
pub enum NpmCachingStrategy {
Eager,
Lazy,
Manual,
}
pub(crate) fn otel_runtime_config() -> OtelRuntimeConfig {
OtelRuntimeConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
}
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;

View file

@ -8,20 +8,23 @@ use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::VersionReq;
use thiserror::Error;
#[derive(Debug)]
pub struct InstallNpmRemotePkg {
pub alias: Option<String>,
pub alias: Option<StackString>,
pub base_dir: PathBuf,
pub req: PackageReq,
}
#[derive(Debug)]
pub struct InstallNpmWorkspacePkg {
pub alias: Option<String>,
pub alias: Option<StackString>,
pub target_dir: PathBuf,
}
@ -29,7 +32,7 @@ pub struct InstallNpmWorkspacePkg {
#[error("Failed to install '{}'\n at {}", alias, location)]
pub struct PackageJsonDepValueParseWithLocationError {
pub location: Url,
pub alias: String,
pub alias: StackString,
#[source]
pub source: PackageJsonDepValueParseError,
}
@ -95,16 +98,20 @@ impl NpmInstallDepsProvider {
if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(deps.len());
for (alias, dep) in deps {
let mut pkg_pkgs = Vec::with_capacity(
deps.dependencies.len() + deps.dev_dependencies.len(),
);
for (alias, dep) in
deps.dependencies.iter().chain(deps.dev_dependencies.iter())
{
let dep = match dep {
Ok(dep) => dep,
Err(err) => {
pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias,
source: err,
alias: alias.clone(),
source: err.clone(),
},
);
continue;
@ -113,30 +120,39 @@ impl NpmInstallDepsProvider {
match dep {
PackageJsonDepValue::Req(pkg_req) => {
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_req(&pkg_req)
pkg.matches_req(pkg_req)
// do not resolve to the current package
&& pkg.pkg_json.path != pkg_json.path
});
if let Some(pkg) = workspace_pkg {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias),
alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_pkgs.push(InstallNpmRemotePkg {
alias: Some(alias),
alias: Some(alias.clone()),
base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req,
req: pkg_req.clone(),
});
}
}
PackageJsonDepValue::Workspace(version_req) => {
PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
version_req.clone()
}
PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => {
VersionReq::parse_from_npm("*").unwrap()
}
};
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(&alias, &version_req)
pkg.matches_name_and_version_req(alias, &version_req)
}) {
workspace_pkgs.push(InstallNpmWorkspacePkg {
alias: Some(alias),
alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
}

View file

@ -1,369 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use deno_core::ModuleSpecifier;
use log::debug;
use log::error;
use std::borrow::Cow;
use std::fmt;
use std::net::IpAddr;
use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
use std::net::SocketAddr;
use std::str::FromStr;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AuthTokenData {
Bearer(String),
Basic { username: String, password: String },
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AuthToken {
host: AuthDomain,
token: AuthTokenData,
}
impl fmt::Display for AuthToken {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.token {
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
AuthTokenData::Basic { username, password } => {
let credentials = format!("{username}:{password}");
write!(f, "Basic {}", BASE64_STANDARD.encode(credentials))
}
}
}
}
/// A structure which contains bearer tokens that can be used when sending
/// requests to websites, intended to authorize access to private resources
/// such as remote modules.
#[derive(Debug, Clone)]
pub struct AuthTokens(Vec<AuthToken>);
/// An authorization domain, either an exact or suffix match.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AuthDomain {
Ip(IpAddr),
IpPort(SocketAddr),
/// Suffix match, no dot. May include a port.
Suffix(Cow<'static, str>),
}
impl<T: ToString> From<T> for AuthDomain {
fn from(value: T) -> Self {
let s = value.to_string().to_lowercase();
if let Ok(ip) = SocketAddr::from_str(&s) {
return AuthDomain::IpPort(ip);
};
if s.starts_with('[') && s.ends_with(']') {
if let Ok(ip) = Ipv6Addr::from_str(&s[1..s.len() - 1]) {
return AuthDomain::Ip(ip.into());
}
} else if let Ok(ip) = Ipv4Addr::from_str(&s) {
return AuthDomain::Ip(ip.into());
}
if let Some(s) = s.strip_prefix('.') {
AuthDomain::Suffix(Cow::Owned(s.to_owned()))
} else {
AuthDomain::Suffix(Cow::Owned(s))
}
}
}
impl AuthDomain {
pub fn matches(&self, specifier: &ModuleSpecifier) -> bool {
let Some(host) = specifier.host_str() else {
return false;
};
match *self {
Self::Ip(ip) => {
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
return false;
};
ip == parsed && specifier.port().is_none()
}
Self::IpPort(ip) => {
let AuthDomain::Ip(parsed) = AuthDomain::from(host) else {
return false;
};
ip.ip() == parsed && specifier.port() == Some(ip.port())
}
Self::Suffix(ref suffix) => {
let hostname = if let Some(port) = specifier.port() {
Cow::Owned(format!("{}:{}", host, port))
} else {
Cow::Borrowed(host)
};
if suffix.len() == hostname.len() {
return suffix == &hostname;
}
// If it's a suffix match, ensure a dot
if hostname.ends_with(suffix.as_ref())
&& hostname.ends_with(&format!(".{suffix}"))
{
return true;
}
false
}
}
}
}
impl AuthTokens {
/// Create a new set of tokens based on the provided string. It is intended
/// that the string be the value of an environment variable and the string is
/// parsed for token values. The string is expected to be a semi-colon
/// separated string, where each value is `{token}@{hostname}`.
pub fn new(maybe_tokens_str: Option<String>) -> Self {
let mut tokens = Vec::new();
if let Some(tokens_str) = maybe_tokens_str {
for token_str in tokens_str.trim().split(';') {
if token_str.contains('@') {
let mut iter = token_str.rsplitn(2, '@');
let host = AuthDomain::from(iter.next().unwrap());
let token = iter.next().unwrap();
if token.contains(':') {
let mut iter = token.rsplitn(2, ':');
let password = iter.next().unwrap().to_owned();
let username = iter.next().unwrap().to_owned();
tokens.push(AuthToken {
host,
token: AuthTokenData::Basic { username, password },
});
} else {
tokens.push(AuthToken {
host,
token: AuthTokenData::Bearer(token.to_string()),
});
}
} else {
error!("Badly formed auth token discarded.");
}
}
debug!("Parsed {} auth token(s).", tokens.len());
}
Self(tokens)
}
/// Attempt to match the provided specifier to the tokens in the set. The
/// matching occurs from the right of the hostname plus port, irrespective of
/// scheme. For example `https://www.deno.land:8080/` would match a token
/// with a host value of `deno.land:8080` but not match `www.deno.land`. The
/// matching is case insensitive.
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<AuthToken> {
self.0.iter().find_map(|t| {
if t.host.matches(specifier) {
Some(t.clone())
} else {
None
}
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use deno_core::resolve_url;
#[test]
fn test_auth_token() {
let auth_tokens = AuthTokens::new(Some("abc123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123"
);
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture =
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
}
#[test]
fn test_auth_tokens_multiple() {
let auth_tokens =
AuthTokens::new(Some("abc123@deno.land;def456@example.com".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_space() {
let auth_tokens = AuthTokens::new(Some(
" abc123@deno.land;def456@example.com\t".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_newline() {
let auth_tokens = AuthTokens::new(Some(
"\nabc123@deno.land;def456@example.com\n".to_string(),
));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
let fixture = resolve_url("http://example.com/a/file.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer def456".to_string()
);
}
#[test]
fn test_auth_tokens_port() {
let auth_tokens =
AuthTokens::new(Some("abc123@deno.land:8080".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("http://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc123".to_string()
);
}
#[test]
fn test_auth_tokens_contain_at() {
let auth_tokens = AuthTokens::new(Some("abc@123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Bearer abc@123".to_string()
);
}
#[test]
fn test_auth_token_basic() {
let auth_tokens = AuthTokens::new(Some("abc:123@deno.land".to_string()));
let fixture = resolve_url("https://deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Basic YWJjOjEyMw=="
);
let fixture = resolve_url("https://www.deno.land/x/mod.ts").unwrap();
assert_eq!(
auth_tokens.get(&fixture).unwrap().to_string(),
"Basic YWJjOjEyMw==".to_string()
);
let fixture = resolve_url("http://127.0.0.1:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture =
resolve_url("https://deno.land.example.com/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
let fixture = resolve_url("https://deno.land:8080/x/mod.ts").unwrap();
assert_eq!(auth_tokens.get(&fixture), None);
}
#[test]
fn test_parse_ip() {
let ip = AuthDomain::from("[2001:db8:a::123]");
assert_eq!("Ip(2001:db8:a::123)", format!("{ip:?}"));
let ip = AuthDomain::from("[2001:db8:a::123]:8080");
assert_eq!("IpPort([2001:db8:a::123]:8080)", format!("{ip:?}"));
let ip = AuthDomain::from("1.1.1.1");
assert_eq!("Ip(1.1.1.1)", format!("{ip:?}"));
}
#[test]
fn test_case_insensitive() {
let domain = AuthDomain::from("EXAMPLE.com");
assert!(
domain.matches(&ModuleSpecifier::parse("http://example.com").unwrap())
);
assert!(
domain.matches(&ModuleSpecifier::parse("http://example.COM").unwrap())
);
}
#[test]
fn test_matches() {
let candidates = [
"example.com",
"www.example.com",
"1.1.1.1",
"[2001:db8:a::123]",
// These will never match
"example.com.evil.com",
"1.1.1.1.evil.com",
"notexample.com",
"www.notexample.com",
];
let domains = [
("example.com", vec!["example.com", "www.example.com"]),
(".example.com", vec!["example.com", "www.example.com"]),
("www.example.com", vec!["www.example.com"]),
("1.1.1.1", vec!["1.1.1.1"]),
("[2001:db8:a::123]", vec!["[2001:db8:a::123]"]),
];
let url = |c: &str| ModuleSpecifier::parse(&format!("http://{c}")).unwrap();
let url_port =
|c: &str| ModuleSpecifier::parse(&format!("http://{c}:8080")).unwrap();
// Generate each candidate with and without a port
let candidates = candidates
.into_iter()
.flat_map(|c| [url(c), url_port(c)])
.collect::<Vec<_>>();
for (domain, expected_domain) in domains {
// Test without a port -- all candidates return without a port
let auth_domain = AuthDomain::from(domain);
let actual = candidates
.iter()
.filter(|c| auth_domain.matches(c))
.cloned()
.collect::<Vec<_>>();
let expected = expected_domain.iter().map(|u| url(u)).collect::<Vec<_>>();
assert_eq!(actual, expected);
// Test with a port, all candidates return with a port
let auth_domain = AuthDomain::from(&format!("{domain}:8080"));
let actual = candidates
.iter()
.filter(|c| auth_domain.matches(c))
.cloned()
.collect::<Vec<_>>();
let expected = expected_domain
.iter()
.map(|u| url_port(u))
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
}
}

92
cli/cache/mod.rs vendored
View file

@ -1,18 +1,19 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::args::CacheSetting;
use crate::errors::get_error_class_name;
use crate::file_fetcher::CliFetchNoFollowErrorKind;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::FetchNoFollowOptions;
use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind;
use deno_cache_dir::file_fetcher::FileOrRedirect;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::FutureExt;
use deno_core::ModuleSpecifier;
@ -23,6 +24,7 @@ use deno_graph::source::Loader;
use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
@ -67,8 +69,11 @@ pub const CACHE_PERM: u32 = 0o644;
pub struct RealDenoCacheEnv;
impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
std::fs::read(path)
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
std::fs::read(path).map(Cow::Owned)
}
fn atomic_write_file(
@ -112,7 +117,10 @@ pub struct DenoCacheEnvFsAdapter<'a>(
);
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
fn read_file_bytes(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, [u8]>> {
self
.0
.read_file_sync(path, None)
@ -183,7 +191,7 @@ pub struct FetchCacherOptions {
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -195,7 +203,7 @@ pub struct FetchCacher {
impl FetchCacher {
pub fn new(
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -313,18 +321,18 @@ impl Loader for FetchCacher {
LoaderCacheSetting::Only => Some(CacheSetting::Only),
};
file_fetcher
.fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions {
specifier: &specifier,
permissions: if is_statically_analyzable {
FetchPermissionsOptionRef::StaticContainer(&permissions)
.fetch_no_follow(
&specifier,
FetchPermissionsOptionRef::Restricted(&permissions,
if is_statically_analyzable {
deno_runtime::deno_permissions::CheckSpecifierKind::Static
} else {
FetchPermissionsOptionRef::DynamicContainer(&permissions)
},
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic
}),
FetchNoFollowOptions {
maybe_auth: None,
maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(),
},
maybe_checksum: options.maybe_checksum.as_ref(),
})
.await
@ -341,7 +349,7 @@ impl Loader for FetchCacher {
(None, None) => None,
};
Ok(Some(LoadResponse::Module {
specifier: file.specifier,
specifier: file.url,
maybe_headers,
content: file.source,
}))
@ -354,18 +362,46 @@ impl Loader for FetchCacher {
}
})
.unwrap_or_else(|err| {
if let Some(io_err) = err.downcast_ref::<std::io::Error>() {
if io_err.kind() == std::io::ErrorKind::NotFound {
return Ok(None);
let err = err.into_kind();
match err {
CliFetchNoFollowErrorKind::FetchNoFollow(err) => {
let err = err.into_kind();
match err {
FetchNoFollowErrorKind::NotFound(_) => Ok(None),
FetchNoFollowErrorKind::UrlToFilePath { .. } |
FetchNoFollowErrorKind::ReadingBlobUrl { .. } |
FetchNoFollowErrorKind::ReadingFile { .. } |
FetchNoFollowErrorKind::FetchingRemote { .. } |
FetchNoFollowErrorKind::ClientError { .. } |
FetchNoFollowErrorKind::NoRemote { .. } |
FetchNoFollowErrorKind::DataUrlDecode { .. } |
FetchNoFollowErrorKind::RedirectResolution { .. } |
FetchNoFollowErrorKind::CacheRead { .. } |
FetchNoFollowErrorKind::CacheSave { .. } |
FetchNoFollowErrorKind::UnsupportedScheme { .. } |
FetchNoFollowErrorKind::RedirectHeaderParse { .. } |
FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)),
FetchNoFollowErrorKind::NotCached { .. } => {
if options.cache_setting == LoaderCacheSetting::Only {
Ok(None)
} else {
return Err(err);
Err(AnyError::from(err))
}
},
FetchNoFollowErrorKind::ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph
Err(
deno_graph::source::ChecksumIntegrityError {
actual: err.actual,
expected: err.expected,
}
.into(),
)
}
}
let error_class_name = get_error_class_name(&err);
match error_class_name {
"NotFound" => Ok(None),
"NotCached" if options.cache_setting == LoaderCacheSetting::Only => Ok(None),
_ => Err(err),
},
CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)),
}
})
}

View file

@ -284,6 +284,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
#[cfg(test)]
mod test {
use deno_graph::JsDocImportInfo;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
@ -308,7 +309,8 @@ mod test {
);
let mut module_info = ModuleInfo::default();
module_info.jsdoc_imports.push(SpecifierWithRange {
module_info.jsdoc_imports.push(JsDocImportInfo {
specifier: SpecifierWithRange {
range: PositionRange {
start: deno_graph::Position {
line: 0,
@ -320,6 +322,8 @@ mod test {
},
},
text: "test".to_string(),
},
resolution_mode: None,
});
cache
.set_module_info(

View file

@ -95,11 +95,21 @@ impl ParsedSourceCache {
self.sources.lock().remove(specifier);
}
/// Fress all parsed sources from memory.
pub fn free_all(&self) {
self.sources.lock().clear();
}
/// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingEsParser {
CapturingEsParser::new(None, self)
}
#[cfg(test)]
pub fn len(&self) -> usize {
self.sources.lock().len()
}
}
/// It's ok that this is racy since in non-LSP situations

View file

@ -5,6 +5,7 @@ use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache;
use crate::resolver::CjsTracker;
use deno_ast::EmittedSourceText;
use deno_ast::ModuleKind;
use deno_ast::SourceMapOption;
use deno_ast::SourceRange;
@ -132,6 +133,7 @@ impl Emitter {
&transpile_and_emit_options.0,
&transpile_and_emit_options.1,
)
.map(|r| r.text)
}
})
.await
@ -166,7 +168,8 @@ impl Emitter {
source.clone(),
&self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1,
)?;
)?
.text;
helper.post_emit_parsed_source(
specifier,
&transpiled_source,
@ -177,6 +180,31 @@ impl Emitter {
}
}
pub fn emit_parsed_source_for_deno_compile(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>,
) -> Result<(String, String), AnyError> {
let mut emit_options = self.transpile_and_emit_options.1.clone();
emit_options.inline_sources = false;
emit_options.source_map = SourceMapOption::Separate;
// strip off the path to have more deterministic builds as we don't care
// about the source name because we manually provide the source map to v8
emit_options.source_map_base = Some(deno_path_util::url_parent(specifier));
let source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache,
specifier,
media_type,
module_kind,
source.clone(),
&self.transpile_and_emit_options.0,
&emit_options,
)?;
Ok((source.text, source.source_map.unwrap()))
}
/// Expects a file URL, panics otherwise.
pub async fn load_and_emit_for_hmr(
&self,
@ -282,7 +310,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions,
) -> Result<String, AnyError> {
) -> Result<EmittedSourceText, AnyError> {
// nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned
let parsed_source = parsed_source_cache
@ -302,8 +330,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
source
}
};
debug_assert!(transpiled_source.source_map.is_none());
Ok(transpiled_source.text)
Ok(transpiled_source)
}
pub fn post_emit_parsed_source(

View file

@ -22,7 +22,7 @@ use crate::cache::ModuleInfoCache;
use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::MainModuleGraphContainer;
use crate::graph_util::FileWatcherReporter;
use crate::graph_util::ModuleGraphBuilder;
@ -48,7 +48,6 @@ use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter;
@ -72,6 +71,7 @@ use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
@ -185,7 +185,7 @@ struct CliFactoryServices {
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<FileFetcher>>,
file_fetcher: Deferred<Arc<CliFileFetcher>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>,
@ -350,16 +350,17 @@ impl CliFactory {
})
}
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
pub fn file_fetcher(&self) -> Result<&Arc<CliFileFetcher>, AnyError> {
self.services.file_fetcher.get_or_try_init(|| {
let cli_options = self.cli_options()?;
Ok(Arc::new(FileFetcher::new(
Ok(Arc::new(CliFileFetcher::new(
self.http_cache()?.clone(),
cli_options.cache_setting(),
!cli_options.no_remote(),
self.http_client_provider().clone(),
self.blob_store().clone(),
Some(self.text_only_progress_bar().clone()),
!cli_options.no_remote(),
cli_options.cache_setting(),
log::Level::Info,
)))
})
}
@ -504,7 +505,12 @@ impl CliFactory {
let resolver = cli_options
.create_workspace_resolver(
self.file_fetcher()?,
if cli_options.use_byonm() {
if cli_options.use_byonm()
&& !matches!(
cli_options.sub_command(),
DenoSubcommand::Publish(_)
)
{
PackageJsonDepResolution::Disabled
} else {
// todo(dsherret): this should be false for nodeModulesDir: true
@ -845,9 +851,12 @@ impl CliFactory {
Ok(Arc::new(CjsTracker::new(
self.in_npm_pkg_checker()?.clone(),
self.pkg_json_resolver().clone(),
IsCjsResolverOptions {
detect_cjs: options.detect_cjs(),
is_node_main: options.is_node_main(),
if options.is_node_main() || options.unstable_detect_cjs() {
IsCjsResolutionMode::ImplicitTypeCommonJs
} else if options.detect_cjs() {
IsCjsResolutionMode::ExplicitTypeCommonJs
} else {
IsCjsResolutionMode::Disabled
},
)))
})
@ -976,6 +985,7 @@ impl CliFactory {
cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?,
self.cli_options()?.otel_config(),
self.cli_options()?.default_npm_caching_strategy(),
))
}

File diff suppressed because it is too large Load diff

View file

@ -4,14 +4,16 @@ use crate::args::config_to_deno_graph_workspace_member;
use crate::args::jsr_url;
use crate::args::CliLockfile;
use crate::args::CliOptions;
pub use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache;
use crate::cache::FetchCacher;
use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliResolver;
@ -25,7 +27,7 @@ use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolutionKind;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError;
@ -44,12 +46,13 @@ use deno_graph::ModuleGraphError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_path_util::url_to_file_path;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::SmallStackString;
use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use std::collections::HashSet;
@ -108,6 +111,25 @@ pub fn graph_valid(
}
}
pub fn fill_graph_from_lockfile(
graph: &mut ModuleGraph,
lockfile: &deno_lockfile::Lockfile,
) {
graph.fill_from_lockfile(FillFromLockfileOptions {
redirects: lockfile
.content
.redirects
.iter()
.map(|(from, to)| (from.as_str(), to.as_str())),
package_specifiers: lockfile
.content
.packages
.specifiers
.iter()
.map(|(dep, id)| (dep, id.as_str())),
});
}
#[derive(Clone)]
pub struct GraphWalkErrorsOptions {
pub check_js: bool,
@ -198,6 +220,7 @@ pub struct CreateGraphOptions<'a> {
pub is_dynamic: bool,
/// Specify `None` to use the default CLI loader.
pub loader: Option<&'a mut dyn Loader>,
pub npm_caching: NpmCachingStrategy,
}
pub struct ModuleGraphCreator {
@ -226,10 +249,11 @@ impl ModuleGraphCreator {
&self,
graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>,
npm_caching: NpmCachingStrategy,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = self.module_graph_builder.create_graph_loader();
self
.create_graph_with_loader(graph_kind, roots, &mut cache)
.create_graph_with_loader(graph_kind, roots, &mut cache, npm_caching)
.await
}
@ -238,6 +262,7 @@ impl ModuleGraphCreator {
graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>,
loader: &mut dyn Loader,
npm_caching: NpmCachingStrategy,
) -> Result<ModuleGraph, AnyError> {
self
.create_graph_with_options(CreateGraphOptions {
@ -245,6 +270,7 @@ impl ModuleGraphCreator {
graph_kind,
roots,
loader: Some(loader),
npm_caching,
})
.await
}
@ -254,6 +280,23 @@ impl ModuleGraphCreator {
package_configs: &[JsrPackageConfig],
build_fast_check_graph: bool,
) -> Result<ModuleGraph, AnyError> {
struct PublishLoader(FetchCacher);
impl Loader for PublishLoader {
fn load(
&self,
specifier: &deno_ast::ModuleSpecifier,
options: deno_graph::source::LoadOptions,
) -> deno_graph::source::LoadFuture {
if specifier.scheme() == "bun" {
return Box::pin(std::future::ready(Ok(Some(
deno_graph::source::LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
self.0.load(specifier, options)
}
}
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
// Earlier on, we marked external non-JSR modules as external.
// If the graph contains any of those, it would cause type checking
@ -271,12 +314,16 @@ impl ModuleGraphCreator {
for package_config in package_configs {
roots.extend(package_config.config_file.resolve_export_value_urls()?);
}
let loader = self.module_graph_builder.create_graph_loader();
let mut publish_loader = PublishLoader(loader);
let mut graph = self
.create_graph_with_options(CreateGraphOptions {
is_dynamic: false,
graph_kind: deno_graph::GraphKind::All,
roots,
loader: None,
loader: Some(&mut publish_loader),
npm_caching: self.options.default_npm_caching_strategy(),
})
.await?;
self.graph_valid(&graph)?;
@ -336,6 +383,7 @@ impl ModuleGraphCreator {
graph_kind,
roots,
loader: None,
npm_caching: self.options.default_npm_caching_strategy(),
})
.await?;
@ -384,7 +432,7 @@ pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -403,7 +451,7 @@ impl ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
fs: Arc<dyn FileSystem>,
global_http_cache: Arc<GlobalHttpCache>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
@ -525,7 +573,8 @@ impl ModuleGraphBuilder {
};
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
let graph_npm_resolver =
cli_resolver.create_graph_npm_resolver(options.npm_caching);
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
.as_ref()
@ -552,6 +601,7 @@ impl ModuleGraphBuilder {
resolver: Some(&graph_resolver),
locker: locker.as_mut().map(|l| l as _),
},
options.npm_caching,
)
.await
}
@ -562,6 +612,7 @@ impl ModuleGraphBuilder {
roots: Vec<ModuleSpecifier>,
loader: &'a mut dyn deno_graph::source::Loader,
options: deno_graph::BuildOptions<'a>,
npm_caching: NpmCachingStrategy,
) -> Result<(), AnyError> {
// ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory
@ -572,7 +623,13 @@ impl ModuleGraphBuilder {
.unwrap_or(false)
{
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if !already_done && matches!(npm_caching, NpmCachingStrategy::Eager) {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
}
}
@ -582,19 +639,7 @@ impl ModuleGraphBuilder {
// populate the information from the lockfile
if let Some(lockfile) = &self.lockfile {
let lockfile = lockfile.lock();
graph.fill_from_lockfile(FillFromLockfileOptions {
redirects: lockfile
.content
.redirects
.iter()
.map(|(from, to)| (from.as_str(), to.as_str())),
package_specifiers: lockfile
.content
.packages
.specifiers
.iter()
.map(|(dep, id)| (dep, id.as_str())),
});
fill_graph_from_lockfile(graph, &lockfile);
}
}
@ -636,7 +681,7 @@ impl ModuleGraphBuilder {
for (from, to) in graph.packages.mappings() {
lockfile.insert_package_specifier(
JsrDepPackageReq::jsr(from.clone()),
to.version.to_string(),
to.version.to_custom_string::<SmallStackString>(),
);
}
}
@ -673,7 +718,9 @@ impl ModuleGraphBuilder {
let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver;
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(
self.cli_options.default_npm_caching_strategy(),
);
graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions {
@ -795,7 +842,7 @@ fn enhanced_sloppy_imports_error_message(
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
| ModuleError::Missing(specifier, _) => {
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
.resolve(specifier, SloppyImportsResolutionMode::Execution)?
.resolve(specifier, SloppyImportsResolutionKind::Execution)?
.as_suggestion_message();
Some(format!(
"{} {} or run with --unstable-sloppy-imports",
@ -1100,12 +1147,12 @@ impl<'a> deno_graph::source::FileSystem for DenoGraphFsAdapter<'a> {
}
}
pub fn format_range_with_colors(range: &deno_graph::Range) -> String {
pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
format!(
"{}:{}:{}",
colors::cyan(range.specifier.as_str()),
colors::yellow(&(range.start.line + 1).to_string()),
colors::yellow(&(range.start.character + 1).to_string())
colors::cyan(referrer.specifier.as_str()),
colors::yellow(&(referrer.range.start.line + 1).to_string()),
colors::yellow(&(referrer.range.start.character + 1).to_string())
)
}
@ -1195,26 +1242,54 @@ impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
mode: ResolutionMode,
resolution_kind: ResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
self.resolver.resolve(
raw_specifier,
referrer_range,
&referrer_range.specifier,
referrer_range.range.start,
referrer_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or_else(|| {
self
.cjs_tracker
.get_referrer_kind(&referrer_range.specifier),
mode,
.get_referrer_kind(&referrer_range.specifier)
}),
to_node_resolution_kind(resolution_kind),
)
}
}
pub fn to_node_resolution_kind(
kind: ResolutionKind,
) -> node_resolver::NodeResolutionKind {
match kind {
ResolutionKind::Execution => node_resolver::NodeResolutionKind::Execution,
ResolutionKind::Types => node_resolver::NodeResolutionKind::Types,
}
}
pub fn to_node_resolution_mode(
mode: deno_graph::source::ResolutionMode,
) -> node_resolver::ResolutionMode {
match mode {
deno_graph::source::ResolutionMode::Import => {
node_resolver::ResolutionMode::Import
}
deno_graph::source::ResolutionMode::Require => {
node_resolver::ResolutionMode::Require
}
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_graph::source::ResolveError;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::Range;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
@ -1235,8 +1310,8 @@ mod test {
specifier: input.to_string(),
range: Range {
specifier,
start: Position::zeroed(),
end: Position::zeroed(),
resolution_mode: None,
range: PositionRange::zeroed(),
},
};
assert_eq!(get_resolution_error_bare_node_specifier(&err), output);
@ -1251,8 +1326,8 @@ mod test {
let err = ResolutionError::InvalidSpecifier {
range: Range {
specifier,
start: Position::zeroed(),
end: Position::zeroed(),
resolution_mode: None,
range: PositionRange::zeroed(),
},
error: SpecifierError::ImportPrefixMissing {
specifier: input.to_string(),

File diff suppressed because it is too large Load diff

1097
cli/js/40_lint.js Normal file

File diff suppressed because it is too large Load diff

1014
cli/js/40_lint_selector.js Normal file

File diff suppressed because it is too large Load diff

132
cli/js/40_lint_types.d.ts vendored Normal file
View file

@ -0,0 +1,132 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
export interface NodeFacade {
type: string;
range: [number, number];
[key: string]: unknown;
}
export interface AstContext {
buf: Uint8Array;
strTable: Map<number, string>;
strTableOffset: number;
rootOffset: number;
nodes: Map<number, NodeFacade>;
strByType: number[];
strByProp: number[];
typeByStr: Map<string, number>;
propByStr: Map<string, number>;
matcher: MatchContext;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface RuleContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintRule {
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
destroy?(ctx: RuleContext): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
export interface LintState {
plugins: LintPlugin[];
installedPlugins: Set<string>;
}
export type VisitorFn = (node: unknown) => void;
export interface CompiledVisitor {
matcher: (ctx: MatchContext, offset: number) => boolean;
info: { enter: VisitorFn; exit: VisitorFn };
}
export interface AttrExists {
type: 3;
prop: number[];
}
export interface AttrBin {
type: 4;
prop: number[];
op: number;
// deno-lint-ignore no-explicit-any
value: any;
}
export type AttrSelector = AttrExists | AttrBin;
export interface ElemSelector {
type: 1;
wildcard: boolean;
elem: number;
}
export interface PseudoNthChild {
type: 5;
op: string | null;
step: number;
stepOffset: number;
of: Selector | null;
repeat: boolean;
}
export interface PseudoHas {
type: 6;
selectors: Selector[];
}
export interface PseudoNot {
type: 7;
selectors: Selector[];
}
export interface PseudoFirstChild {
type: 8;
}
export interface PseudoLastChild {
type: 9;
}
export interface Relation {
type: 2;
op: number;
}
export type Selector = Array<
| ElemSelector
| Relation
| AttrExists
| AttrBin
| PseudoNthChild
| PseudoNot
| PseudoHas
| PseudoFirstChild
| PseudoLastChild
>;
export interface SelectorParseCtx {
root: Selector;
current: Selector;
}
export interface MatchContext {
getFirstChild(id: number): number;
getLastChild(id: number): number;
getSiblings(id: number): number[];
getParent(id: number): number;
getType(id: number): number;
hasAttrPath(id: number, propIds: number[], idx: number): boolean;
getAttrPathValue(id: number, propIds: number[], idx: number): unknown;
}
export type NextFn = (ctx: MatchContext, id: number) => boolean;
export type MatcherFn = (ctx: MatchContext, id: number) => boolean;
export type TransformFn = (value: string) => number;
export {};

View file

@ -1,7 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::jsr_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use dashmap::DashMap;
use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo;
@ -19,11 +19,11 @@ pub struct JsrFetchResolver {
/// It can be large and we don't want to store it.
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
}
impl JsrFetchResolver {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
Self {
nv_by_req: Default::default(),
info_by_nv: Default::default(),

View file

@ -15,7 +15,6 @@ use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
use deno_config::workspace::MappedResolution;
use deno_graph::source::ResolutionMode;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_ast::SourceRange;
@ -37,9 +36,12 @@ use deno_semver::package::PackageNv;
use deno_semver::package::PackageNvReference;
use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use import_map::ImportMap;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Regex;
use std::borrow::Cow;
@ -270,13 +272,24 @@ impl<'a> TsResponseImportMapper<'a> {
}
}
if specifier.scheme() == "node" {
return Some(specifier.to_string());
}
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
{
let mut segments = jsr_path.split('/');
let name = if jsr_path.starts_with('@') {
format!("{}/{}", segments.next()?, segments.next()?)
let scope = segments.next()?;
let name = segments.next()?;
capacity_builder::StringBuilder::<StackString>::build(|builder| {
builder.append(scope);
builder.append("/");
builder.append(name);
})
.unwrap()
} else {
segments.next()?.to_string()
StackString::from(segments.next()?)
};
let version = Version::parse_standard(segments.next()?).ok()?;
let nv = PackageNv { name, version };
@ -286,7 +299,9 @@ impl<'a> TsResponseImportMapper<'a> {
&path,
Some(&self.file_referrer),
)?;
let sub_path = (export != ".").then_some(export);
let sub_path = (export != ".")
.then_some(export)
.map(SmallStackString::from_string);
let mut req = None;
req = req.or_else(|| {
let import_map = self.maybe_import_map?;
@ -353,7 +368,12 @@ impl<'a> TsResponseImportMapper<'a> {
let pkg_reqs = npm_resolver.resolve_pkg_reqs_from_pkg_id(&pkg_id);
// check if any pkg reqs match what is found in an import map
if !pkg_reqs.is_empty() {
let sub_path = self.resolve_package_path(specifier);
let sub_path = npm_resolver
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.ok()
.and_then(|pkg_folder| {
self.resolve_package_path(specifier, &pkg_folder)
});
if let Some(import_map) = self.maybe_import_map {
let pkg_reqs = pkg_reqs.iter().collect::<HashSet<_>>();
let mut matches = Vec::new();
@ -367,6 +387,10 @@ impl<'a> TsResponseImportMapper<'a> {
let value_sub_path = package_ref.sub_path().unwrap_or("");
if let Some(key_sub_path) =
sub_path.strip_prefix(value_sub_path)
{
// keys that don't end in a slash can't be mapped to a subpath
if entry.raw_key.ends_with('/')
|| key_sub_path.is_empty()
{
matches
.push(format!("{}{}", entry.raw_key, key_sub_path));
@ -375,6 +399,7 @@ impl<'a> TsResponseImportMapper<'a> {
}
}
}
}
// select the shortest match
matches.sort_by_key(|a| a.len());
if let Some(matched) = matches.first() {
@ -413,10 +438,16 @@ impl<'a> TsResponseImportMapper<'a> {
fn resolve_package_path(
&self,
specifier: &ModuleSpecifier,
package_root_folder: &Path,
) -> Option<String> {
let package_json = self
.resolver
.get_closest_package_json(specifier)
.pkg_json_resolver(specifier)
// the specifier might have a closer package.json, but we
// want the root of the package's package.json
.get_closest_package_json_from_file_path(
&package_root_folder.join("package.json"),
)
.ok()
.flatten()?;
let root_folder = package_json.path.parent()?;
@ -467,7 +498,7 @@ impl<'a> TsResponseImportMapper<'a> {
&self,
specifier: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
) -> Option<String> {
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
@ -481,13 +512,10 @@ impl<'a> TsResponseImportMapper<'a> {
.as_cli_resolver(Some(&self.file_referrer))
.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Types,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
@ -509,20 +537,17 @@ impl<'a> TsResponseImportMapper<'a> {
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
) -> bool {
self
.resolver
.as_cli_resolver(Some(&self.file_referrer))
.resolve(
specifier_text,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
deno_graph::source::ResolutionMode::Types,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
.is_ok()
}
@ -589,18 +614,24 @@ fn try_reverse_map_package_json_exports(
/// For a set of tsc changes, can them for any that contain something that looks
/// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
changes: &[tsc::FileTextChanges],
language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
let mut r = Vec::new();
for change in changes {
let Ok(referrer) = ModuleSpecifier::parse(&change.file_name) else {
continue;
};
let referrer_doc = language_server.get_asset_or_document(&referrer).ok();
let resolution_mode = referrer_doc
.as_ref()
.map(|d| d.resolution_mode())
.unwrap_or(ResolutionMode::Import);
let import_mapper =
language_server.get_ts_response_import_mapper(&referrer);
let mut text_changes = Vec::new();
for text_change in &change.text_changes {
let lines = text_change.new_text.split('\n');
let new_lines: Vec<String> = lines
.map(|line| {
// This assumes that there's only one import per line.
@ -608,7 +639,7 @@ pub fn fix_ts_import_changes(
let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) = import_mapper
.check_unresolved_specifier(specifier, referrer, referrer_kind)
.check_unresolved_specifier(specifier, &referrer, resolution_mode)
{
line.replace(specifier, &new_specifier)
} else {
@ -638,7 +669,7 @@ pub fn fix_ts_import_changes(
/// resolution by Deno (includes the extension).
fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
action: &'a tsc::CodeFixAction,
language_server: &language_server::Inner,
) -> Option<Cow<'a, tsc::CodeFixAction>> {
@ -657,9 +688,11 @@ fn fix_ts_import_action<'a>(
return Some(Cow::Borrowed(action));
};
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind)
{
if let Some(new_specifier) = import_mapper.check_unresolved_specifier(
specifier,
referrer,
resolution_mode,
) {
let description = action.description.replace(specifier, &new_specifier);
let changes = action
.changes
@ -689,7 +722,8 @@ fn fix_ts_import_action<'a>(
fix_id: None,
fix_all_description: None,
}))
} else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) {
} else if !import_mapper.is_valid_import(specifier, referrer, resolution_mode)
{
None
} else {
Some(Cow::Borrowed(action))
@ -1023,7 +1057,7 @@ impl CodeActionCollection {
pub fn add_ts_fix_action(
&mut self,
specifier: &ModuleSpecifier,
specifier_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic,
language_server: &language_server::Inner,
@ -1042,7 +1076,7 @@ impl CodeActionCollection {
));
}
let Some(action) =
fix_ts_import_action(specifier, specifier_kind, action, language_server)
fix_ts_import_action(specifier, resolution_mode, action, language_server)
else {
return Ok(());
};
@ -1237,12 +1271,12 @@ impl CodeActionCollection {
let text_info = parsed_source.text_info_lazy();
let specifier_range = SourceRange::new(
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.start.line,
column_index: import.specifier_range.start.character,
line_index: import.specifier_range.range.start.line,
column_index: import.specifier_range.range.start.character,
}),
text_info.loc_to_source_pos(LineAndColumnIndex {
line_index: import.specifier_range.end.line,
column_index: import.specifier_range.end.character,
line_index: import.specifier_range.range.end.line,
column_index: import.specifier_range.range.end.character,
}),
);
@ -1277,16 +1311,14 @@ impl CodeActionCollection {
if json!(i.kind) != json!("es") && json!(i.kind) != json!("tsType") {
return None;
}
if !i.specifier_range.includes(&position) {
if !i.specifier_range.includes(position) {
return None;
}
import_start_from_specifier(document, i)
})?;
let referrer = document.specifier();
let referrer_kind = language_server
.is_cjs_resolver
.get_doc_module_kind(document);
let resolution_mode = document.resolution_mode();
let file_referrer = document.file_referrer();
let config_data = language_server
.config
@ -1312,7 +1344,7 @@ impl CodeActionCollection {
if !language_server.resolver.is_bare_package_json_dep(
&dep_key,
referrer,
referrer_kind,
resolution_mode,
) {
return None;
}
@ -1332,7 +1364,7 @@ impl CodeActionCollection {
}
if language_server
.resolver
.npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
.npm_to_file_url(&npm_ref, referrer, resolution_mode, file_referrer)
.is_some()
{
// The package import has types.
@ -1376,7 +1408,7 @@ impl CodeActionCollection {
character: import_start.column_index as u32,
};
let new_text = format!(
"{}// @deno-types=\"{}\"\n",
"{}// @ts-types=\"{}\"\n",
if position.character == 0 { "" } else { "\n" },
&types_specifier_text
);
@ -1389,7 +1421,7 @@ impl CodeActionCollection {
};
Some(lsp::CodeAction {
title: format!(
"Add @deno-types directive for \"{}\"",
"Add @ts-types directive for \"{}\"",
&types_specifier_text
),
kind: Some(lsp::CodeActionKind::QUICKFIX),

View file

@ -9,16 +9,14 @@ use super::jsr::CliJsrSearchApi;
use super::lsp_custom;
use super::npm::CliNpmSearchApi;
use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::search::PackageSearchApi;
use super::tsc;
use crate::graph_util::to_node_resolution_mode;
use crate::jsr::JsrFetchResolver;
use crate::util::path::is_importable_ext;
use crate::util::path::relative_specifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Range;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_ast::LineAndColumnIndex;
@ -36,7 +34,8 @@ use deno_semver::package::PackageNv;
use import_map::ImportMap;
use indexmap::IndexSet;
use lsp_types::CompletionList;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Regex;
use tower_lsp::lsp_types as lsp;
@ -113,7 +112,7 @@ async fn check_auto_config_registry(
/// which we want to ignore when replacing text.
fn to_narrow_lsp_range(
text_info: &SourceTextInfo,
range: &deno_graph::Range,
range: deno_graph::PositionRange,
) -> lsp::Range {
let end_byte_index = text_info
.loc_to_source_pos(LineAndColumnIndex {
@ -161,26 +160,25 @@ pub async fn get_import_completions(
jsr_search_api: &CliJsrSearchApi,
npm_search_api: &CliNpmSearchApi,
documents: &Documents,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
maybe_import_map: Option<&ImportMap>,
) -> Option<lsp::CompletionResponse> {
let document = documents.get(specifier)?;
let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document);
let file_referrer = document.file_referrer();
let (text, _, range) = document.get_maybe_dependency(position)?;
let range = to_narrow_lsp_range(document.text_info(), &range);
let (text, _, graph_range) = document.get_maybe_dependency(position)?;
let resolution_mode = graph_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or_else(|| document.resolution_mode());
let range = to_narrow_lsp_range(document.text_info(), graph_range.range);
let resolved = resolver
.as_cli_resolver(file_referrer)
.resolve(
&text,
&Range {
specifier: specifier.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
specifier_kind,
ResolutionMode::Execution,
specifier,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Execution,
)
.ok();
if let Some(completion_list) = get_jsr_completions(
@ -206,7 +204,7 @@ pub async fn get_import_completions(
// completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) =
get_local_completions(specifier, specifier_kind, &text, &range, resolver)
get_local_completions(specifier, resolution_mode, &text, &range, resolver)
{
// completions for local relative modules
Some(lsp::CompletionResponse::List(completion_list))
@ -361,7 +359,7 @@ fn get_import_map_completions(
/// Return local completions that are relative to the base specifier.
fn get_local_completions(
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
text: &str,
range: &lsp::Range,
resolver: &LspResolver,
@ -374,13 +372,10 @@ fn get_local_completions(
.as_cli_resolver(Some(referrer))
.resolve(
parent,
&Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Execution,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Execution,
)
.ok()?;
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
@ -748,13 +743,16 @@ fn get_node_completions(
}
let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.map(|name| {
.filter_map(|name| {
if name.starts_with('_') {
return None;
}
let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()),
@ -763,7 +761,7 @@ fn get_node_completions(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
})
.collect();
Some(CompletionList {
@ -831,7 +829,6 @@ mod tests {
use crate::lsp::documents::LanguageId;
use crate::lsp::search::tests::TestPackageSearchApi;
use deno_core::resolve_url;
use deno_graph::Range;
use pretty_assertions::assert_eq;
use std::collections::HashMap;
use test_util::TempDir;
@ -912,7 +909,7 @@ mod tests {
ModuleSpecifier::from_file_path(file_c).expect("could not create");
let actual = get_local_completions(
&specifier,
NodeModuleKind::Esm,
ResolutionMode::Import,
"./",
&lsp::Range {
start: lsp::Position {
@ -1608,8 +1605,7 @@ mod tests {
let text_info = SourceTextInfo::from_string(r#""te""#.to_string());
let range = to_narrow_lsp_range(
&text_info,
&Range {
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
deno_graph::PositionRange {
start: deno_graph::Position {
line: 0,
character: 0,
@ -1632,8 +1628,7 @@ mod tests {
let text_info = SourceTextInfo::from_string(r#""te"#.to_string());
let range = to_narrow_lsp_range(
&text_info,
&Range {
specifier: ModuleSpecifier::parse("https://deno.land").unwrap(),
deno_graph::PositionRange {
start: deno_graph::Position {
line: 0,
character: 0,

View file

@ -41,6 +41,7 @@ use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet;
use lsp_types::ClientCapabilities;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
@ -62,7 +63,7 @@ use crate::args::ConfigFile;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::cache::FastInsecureHasher;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::lsp::logging::lsp_warn;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
@ -458,6 +459,19 @@ impl Default for LanguagePreferences {
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct SuggestionActionsSettings {
#[serde(default = "is_true")]
pub enabled: bool,
}
impl Default for SuggestionActionsSettings {
fn default() -> Self {
SuggestionActionsSettings { enabled: true }
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct UpdateImportsOnFileMoveOptions {
@ -489,6 +503,8 @@ pub struct LanguageWorkspaceSettings {
#[serde(default)]
pub suggest: CompletionSettings,
#[serde(default)]
pub suggestion_actions: SuggestionActionsSettings,
#[serde(default)]
pub update_imports_on_file_move: UpdateImportsOnFileMoveOptions,
}
@ -1202,7 +1218,7 @@ impl ConfigData {
specified_config: Option<&Path>,
scope: &ModuleSpecifier,
settings: &Settings,
file_fetcher: &Arc<FileFetcher>,
file_fetcher: &Arc<CliFileFetcher>,
// sync requirement is because the lsp requires sync
cached_deno_config_fs: &(dyn DenoConfigFs + Sync),
deno_json_cache: &(dyn DenoJsonCache + Sync),
@ -1297,7 +1313,7 @@ impl ConfigData {
member_dir: Arc<WorkspaceDirectory>,
scope: Arc<ModuleSpecifier>,
settings: &Settings,
file_fetcher: Option<&Arc<FileFetcher>>,
file_fetcher: Option<&Arc<CliFileFetcher>>,
) -> Self {
let (settings, workspace_folder) = settings.get_for_specifier(&scope);
let mut watched_files = HashMap::with_capacity(10);
@ -1818,7 +1834,7 @@ impl ConfigTree {
&mut self,
settings: &Settings,
workspace_files: &IndexSet<ModuleSpecifier>,
file_fetcher: &Arc<FileFetcher>,
file_fetcher: &Arc<CliFileFetcher>,
) {
lsp_log!("Refreshing configuration tree...");
// since we're resolving a workspace multiple times in different
@ -2092,7 +2108,7 @@ impl<T: Clone> CachedFsItems<T> {
#[derive(Default)]
struct InnerData {
stat_calls: CachedFsItems<deno_config::fs::FsMetadata>,
read_to_string_calls: CachedFsItems<String>,
read_to_string_calls: CachedFsItems<Cow<'static, str>>,
}
#[derive(Default)]
@ -2113,7 +2129,7 @@ impl DenoConfigFs for CachedDenoConfigFs {
fn read_to_string_lossy(
&self,
path: &Path,
) -> Result<String, std::io::Error> {
) -> Result<Cow<'static, str>, std::io::Error> {
self
.0
.lock()
@ -2291,6 +2307,7 @@ mod tests {
enabled: true,
},
},
suggestion_actions: SuggestionActionsSettings { enabled: true },
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
enabled: UpdateImportsOnFileMoveEnabled::Prompt
}
@ -2337,6 +2354,7 @@ mod tests {
enabled: true,
},
},
suggestion_actions: SuggestionActionsSettings { enabled: true },
update_imports_on_file_move: UpdateImportsOnFileMoveOptions {
enabled: UpdateImportsOnFileMoveEnabled::Prompt
}

View file

@ -24,6 +24,7 @@ use crate::resolver::SloppyImportsCachedFs;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider;
use crate::tsc::DiagnosticCategory;
use crate::util::path::to_percent_decoded_str;
use deno_ast::MediaType;
@ -44,8 +45,9 @@ use deno_graph::source::ResolveError;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_fs;
use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime;
@ -833,7 +835,7 @@ fn generate_lint_diagnostics(
lint_rule_provider.resolve_lint_rules(Default::default(), None)
},
fix: false,
deno_lint_config: deno_lint::linter::LintConfig {
deno_lint_config: DenoLintConfig {
default_jsx_factory: None,
default_jsx_fragment_factory: None,
},
@ -906,8 +908,22 @@ async fn generate_ts_diagnostics(
} else {
Default::default()
};
for (specifier_str, ts_json_diagnostics) in ts_diagnostics_map {
for (specifier_str, mut ts_json_diagnostics) in ts_diagnostics_map {
let specifier = resolve_url(&specifier_str)?;
let suggestion_actions_settings = snapshot
.config
.language_settings_for_specifier(&specifier)
.map(|s| s.suggestion_actions.clone())
.unwrap_or_default();
if !suggestion_actions_settings.enabled {
ts_json_diagnostics.retain(|d| {
d.category != DiagnosticCategory::Suggestion
// Still show deprecated and unused diagnostics.
// https://github.com/microsoft/vscode/blob/ce50bd4876af457f64d83cfd956bc916535285f4/extensions/typescript-language-features/src/languageFeatures/diagnostics.ts#L113-L114
|| d.reports_deprecated == Some(true)
|| d.reports_unnecessary == Some(true)
});
}
let version = snapshot
.documents
.get(&specifier)
@ -1262,11 +1278,11 @@ impl DenoDiagnostic {
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => {
let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
).resolve(specifier, SloppyImportsResolutionMode::Execution);
).resolve(specifier, SloppyImportsResolutionKind::Execution);
let data = maybe_sloppy_resolution.as_ref().map(|res| {
json!({
"specifier": specifier,
@ -1355,7 +1371,7 @@ fn diagnose_resolution(
}
// don't bother warning about sloppy import redirects from .js to .d.ts
// because explaining how to fix this via a diagnostic involves using
// @deno-types and that's a bit complicated to explain
// @ts-types and that's a bit complicated to explain
let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file"
&& doc.media_type().is_declaration()
&& !MediaType::from_specifier(specifier).is_declaration();
@ -1523,7 +1539,7 @@ fn diagnose_dependency(
.iter()
.map(|i| documents::to_lsp_range(&i.specifier_range))
.collect();
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
// TODO(nayeemrmn): This is a crude way of detecting `@ts-types` which has
// a different specifier and therefore needs a separate call to
// `diagnose_resolution()`. It would be much cleaner if that were modelled as
// a separate dependency: https://github.com/denoland/deno_graph/issues/247.
@ -1531,7 +1547,7 @@ fn diagnose_dependency(
&& !dependency.imports.iter().any(|i| {
dependency
.maybe_type
.includes(&i.specifier_range.start)
.includes(i.specifier_range.range.start)
.is_some()
});
@ -1540,7 +1556,7 @@ fn diagnose_dependency(
snapshot,
dependency_key,
if dependency.maybe_code.is_none()
// If not @deno-types, diagnose the types if the code errored because
// If not @ts-types, diagnose the types if the code errored because
// it's likely resolving into the node_modules folder, which might be
// erroring correctly due to resolution only being for bundlers. Let this
// fail at runtime if necessary, but don't bother erroring in the editor
@ -1707,7 +1723,6 @@ mod tests {
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver,
},
)
@ -1952,7 +1967,7 @@ let c: number = "a";
&[(
"a.ts",
r#"
// @deno-types="bad.d.ts"
// @ts-types="bad.d.ts"
import "bad.js";
import "bad.js";
"#,
@ -2006,11 +2021,11 @@ let c: number = "a";
"range": {
"start": {
"line": 1,
"character": 23
"character": 21
},
"end": {
"line": 1,
"character": 33
"character": 31
}
},
"severity": 1,

View file

@ -3,7 +3,6 @@
use super::cache::calculate_fs_version;
use super::cache::LspCache;
use super::config::Config;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::resolver::ScopeDepInfo;
use super::resolver::SingleReferrerGraphResolver;
@ -27,7 +26,6 @@ use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::Resolution;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node;
@ -36,7 +34,8 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use indexmap::IndexSet;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
@ -66,6 +65,12 @@ pub enum LanguageId {
Html,
Css,
Yaml,
Sql,
Svelte,
Vue,
Astro,
Vento,
Nunjucks,
Unknown,
}
@ -82,6 +87,12 @@ impl LanguageId {
LanguageId::Html => Some("html"),
LanguageId::Css => Some("css"),
LanguageId::Yaml => Some("yaml"),
LanguageId::Sql => Some("sql"),
LanguageId::Svelte => Some("svelte"),
LanguageId::Vue => Some("vue"),
LanguageId::Astro => Some("astro"),
LanguageId::Vento => Some("vto"),
LanguageId::Nunjucks => Some("njk"),
LanguageId::Unknown => None,
}
}
@ -97,6 +108,12 @@ impl LanguageId {
LanguageId::Html => Some("text/html"),
LanguageId::Css => Some("text/css"),
LanguageId::Yaml => Some("application/yaml"),
LanguageId::Sql => None,
LanguageId::Svelte => None,
LanguageId::Vue => None,
LanguageId::Astro => None,
LanguageId::Vento => None,
LanguageId::Nunjucks => None,
LanguageId::Unknown => None,
}
}
@ -124,6 +141,12 @@ impl FromStr for LanguageId {
"html" => Ok(Self::Html),
"css" => Ok(Self::Css),
"yaml" => Ok(Self::Yaml),
"sql" => Ok(Self::Sql),
"svelte" => Ok(Self::Svelte),
"vue" => Ok(Self::Vue),
"astro" => Ok(Self::Astro),
"vento" => Ok(Self::Vento),
"nunjucks" => Ok(Self::Nunjucks),
_ => Ok(Self::Unknown),
}
}
@ -228,6 +251,13 @@ impl AssetOrDocument {
pub fn document_lsp_version(&self) -> Option<i32> {
self.document().and_then(|d| d.maybe_lsp_version())
}
pub fn resolution_mode(&self) -> ResolutionMode {
match self {
AssetOrDocument::Asset(_) => ResolutionMode::Import,
AssetOrDocument::Document(d) => d.resolution_mode(),
}
}
}
type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>;
@ -313,6 +343,7 @@ pub struct Document {
media_type: MediaType,
/// Present if and only if this is an open document.
open_data: Option<DocumentOpenData>,
resolution_mode: ResolutionMode,
resolver: Arc<LspResolver>,
specifier: ModuleSpecifier,
text: Arc<str>,
@ -328,7 +359,6 @@ impl Document {
maybe_lsp_version: Option<i32>,
maybe_language_id: Option<LanguageId>,
maybe_headers: Option<HashMap<String, String>>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>,
config: Arc<Config>,
cache: &Arc<LspCache>,
@ -340,7 +370,7 @@ impl Document {
.or(file_referrer);
let media_type =
resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
let (maybe_parsed_source, maybe_module) =
let (maybe_parsed_source, maybe_module, resolution_mode) =
if media_type_is_diagnosable(media_type) {
parse_and_analyze_module(
specifier.clone(),
@ -348,11 +378,10 @@ impl Document {
maybe_headers.as_ref(),
media_type,
file_referrer.as_ref(),
is_cjs_resolver,
&resolver,
)
} else {
(None, None)
(None, None, ResolutionMode::Import)
};
let maybe_module = maybe_module.and_then(Result::ok);
let dependencies = maybe_module
@ -387,6 +416,7 @@ impl Document {
maybe_parsed_source,
maybe_semantic_tokens: Default::default(),
}),
resolution_mode,
resolver,
specifier,
text,
@ -396,7 +426,6 @@ impl Document {
fn with_new_config(
&self,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>,
config: Arc<Config>,
) -> Arc<Self> {
@ -408,20 +437,20 @@ impl Document {
let dependencies;
let maybe_types_dependency;
let maybe_parsed_source;
let found_resolution_mode;
let is_script;
let maybe_test_module_fut;
if media_type != self.media_type {
let parsed_source_result =
parse_source(self.specifier.clone(), self.text.clone(), media_type);
let maybe_module = analyze_module(
let (maybe_module_result, resolution_mode) = analyze_module(
self.specifier.clone(),
&parsed_source_result,
self.maybe_headers.as_ref(),
self.file_referrer.as_ref(),
is_cjs_resolver,
&resolver,
)
.ok();
);
let maybe_module = maybe_module_result.ok();
dependencies = maybe_module
.as_ref()
.map(|m| Arc::new(m.dependencies.clone()))
@ -433,17 +462,21 @@ impl Document {
maybe_parsed_source = Some(parsed_source_result);
maybe_test_module_fut =
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
found_resolution_mode = resolution_mode;
} else {
let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
let is_cjs_resolver =
resolver.as_is_cjs_resolver(self.file_referrer.as_ref());
let npm_resolver =
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
found_resolution_mode = is_cjs_resolver
.get_lsp_resolution_mode(&self.specifier, self.is_script);
let resolver = SingleReferrerGraphResolver {
valid_referrer: &self.specifier,
referrer_kind: is_cjs_resolver
.get_lsp_referrer_kind(&self.specifier, self.is_script),
module_resolution_mode: found_resolution_mode,
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
@ -493,6 +526,7 @@ impl Document {
maybe_language_id: self.maybe_language_id,
maybe_test_module_fut,
media_type,
resolution_mode: found_resolution_mode,
open_data: self.open_data.as_ref().map(|d| DocumentOpenData {
lsp_version: d.lsp_version,
maybe_parsed_source,
@ -508,7 +542,6 @@ impl Document {
fn with_change(
&self,
is_cjs_resolver: &LspIsCjsResolver,
version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<Self>, AnyError> {
@ -530,7 +563,7 @@ impl Document {
}
let text: Arc<str> = content.into();
let media_type = self.media_type;
let (maybe_parsed_source, maybe_module) = if self
let (maybe_parsed_source, maybe_module, resolution_mode) = if self
.maybe_language_id
.as_ref()
.map(|li| li.is_diagnosable())
@ -542,11 +575,10 @@ impl Document {
self.maybe_headers.as_ref(),
media_type,
self.file_referrer.as_ref(),
is_cjs_resolver,
self.resolver.as_ref(),
)
} else {
(None, None)
(None, None, ResolutionMode::Import)
};
let maybe_module = maybe_module.and_then(Result::ok);
let dependencies = maybe_module
@ -580,6 +612,7 @@ impl Document {
maybe_navigation_tree: Mutex::new(None),
maybe_test_module_fut,
media_type,
resolution_mode,
open_data: self.open_data.is_some().then_some(DocumentOpenData {
lsp_version: version,
maybe_parsed_source,
@ -613,6 +646,7 @@ impl Document {
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
media_type: self.media_type,
open_data: None,
resolution_mode: self.resolution_mode,
resolver: self.resolver.clone(),
})
}
@ -641,6 +675,7 @@ impl Document {
maybe_test_module_fut: self.maybe_test_module_fut.clone(),
media_type: self.media_type,
open_data: self.open_data.clone(),
resolution_mode: self.resolution_mode,
resolver: self.resolver.clone(),
})
}
@ -664,6 +699,10 @@ impl Document {
&self.text
}
pub fn resolution_mode(&self) -> ResolutionMode {
self.resolution_mode
}
pub fn text_info(&self) -> &SourceTextInfo {
// try to get the text info from the parsed source and if
// not then create one in the cell
@ -677,14 +716,6 @@ impl Document {
.get_or_init(|| SourceTextInfo::new(self.text.clone()))
})
}
/// If this is maybe a CJS script and maybe not an ES module.
///
/// Use `LspIsCjsResolver` to determine for sure.
pub fn is_script(&self) -> Option<bool> {
self.is_script
}
pub fn line_index(&self) -> Arc<LineIndex> {
self.line_index.clone()
}
@ -768,7 +799,7 @@ impl Document {
};
self.dependencies().iter().find_map(|(s, dep)| {
dep
.includes(&position)
.includes(position)
.map(|r| (s.clone(), dep.clone(), r.clone()))
})
}
@ -809,15 +840,15 @@ fn resolve_media_type(
MediaType::from_specifier(specifier)
}
pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range {
pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp::Range {
lsp::Range {
start: lsp::Position {
line: range.start.line as u32,
character: range.start.character as u32,
line: referrer.range.start.line as u32,
character: referrer.range.start.character as u32,
},
end: lsp::Position {
line: range.end.line as u32,
character: range.end.character as u32,
line: referrer.range.end.line as u32,
character: referrer.range.end.character as u32,
},
}
}
@ -832,7 +863,6 @@ impl FileSystemDocuments {
pub fn get(
&self,
specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>,
config: &Arc<Config>,
cache: &Arc<LspCache>,
@ -856,14 +886,7 @@ impl FileSystemDocuments {
};
if dirty {
// attempt to update the file on the file system
self.refresh_document(
specifier,
is_cjs_resolver,
resolver,
config,
cache,
file_referrer,
)
self.refresh_document(specifier, resolver, config, cache, file_referrer)
} else {
old_doc
}
@ -874,7 +897,6 @@ impl FileSystemDocuments {
fn refresh_document(
&self,
specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>,
config: &Arc<Config>,
cache: &Arc<LspCache>,
@ -896,7 +918,6 @@ impl FileSystemDocuments {
None,
None,
None,
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -913,7 +934,6 @@ impl FileSystemDocuments {
None,
None,
None,
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -936,7 +956,7 @@ impl FileSystemDocuments {
let content = bytes_to_content(
specifier,
media_type,
cached_file.content,
cached_file.content.into_owned(),
maybe_charset,
)
.ok()?;
@ -946,7 +966,6 @@ impl FileSystemDocuments {
None,
None,
Some(cached_file.metadata.headers),
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -987,8 +1006,6 @@ pub struct Documents {
/// The DENO_DIR that the documents looks for non-file based modules.
cache: Arc<LspCache>,
config: Arc<Config>,
/// Resolver for detecting if a document is CJS or ESM.
is_cjs_resolver: Arc<LspIsCjsResolver>,
/// A resolver that takes into account currently loaded import map and JSX
/// settings.
resolver: Arc<LspResolver>,
@ -1024,7 +1041,6 @@ impl Documents {
// the cache for remote modules here in order to get the
// x-typescript-types?
None,
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
&self.cache,
@ -1059,7 +1075,7 @@ impl Documents {
))
})?;
self.dirty = true;
let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
let doc = doc.with_change(version, changes)?;
self.open_docs.insert(doc.specifier().clone(), doc.clone());
Ok(doc)
}
@ -1191,7 +1207,6 @@ impl Documents {
if let Some(old_doc) = old_doc {
self.file_system_docs.get(
specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1216,7 +1231,6 @@ impl Documents {
} else {
self.file_system_docs.get(
&specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1271,7 +1285,8 @@ impl Documents {
/// tsc when type checking.
pub fn resolve(
&self,
raw_specifiers: &[String],
// (is_cjs: bool, raw_specifier: String)
raw_specifiers: &[(bool, String)],
referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
@ -1281,11 +1296,12 @@ impl Documents {
.and_then(|d| d.file_referrer())
.or(file_referrer);
let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
let referrer_kind = self
.is_cjs_resolver
.get_maybe_doc_module_kind(referrer, referrer_doc.as_deref());
let mut results = Vec::new();
for raw_specifier in raw_specifiers {
for (is_cjs, raw_specifier) in raw_specifiers {
let resolution_mode = match is_cjs {
true => ResolutionMode::Require,
false => ResolutionMode::Import,
};
if raw_specifier.starts_with("asset:") {
if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
let media_type = MediaType::from_specifier(&specifier);
@ -1300,14 +1316,14 @@ impl Documents {
results.push(self.resolve_dependency(
specifier,
referrer,
referrer_kind,
resolution_mode,
file_referrer,
));
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
results.push(self.resolve_dependency(
specifier,
referrer,
referrer_kind,
resolution_mode,
file_referrer,
));
} else {
@ -1316,19 +1332,16 @@ impl Documents {
} else if let Ok(specifier) =
self.resolver.as_cli_resolver(file_referrer).resolve(
raw_specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Types,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
{
results.push(self.resolve_dependency(
&specifier,
referrer,
referrer_kind,
resolution_mode,
file_referrer,
));
} else {
@ -1347,7 +1360,6 @@ impl Documents {
) {
self.config = Arc::new(config.clone());
self.cache = Arc::new(cache.clone());
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
self.resolver = resolver.clone();
node_resolver::PackageJsonThreadLocalCache::clear();
@ -1371,21 +1383,14 @@ impl Documents {
if !config.specifier_enabled(doc.specifier()) {
continue;
}
*doc = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone());
}
for mut doc in self.file_system_docs.docs.iter_mut() {
if !config.specifier_enabled(doc.specifier()) {
continue;
}
*doc.value_mut() = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
*doc.value_mut() =
doc.with_new_config(self.resolver.clone(), self.config.clone());
}
self.open_docs = open_docs;
let mut preload_count = 0;
@ -1402,7 +1407,6 @@ impl Documents {
{
fs_docs.refresh_document(
specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1477,27 +1481,24 @@ impl Documents {
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
let code_specifier = jsx_config.default_specifier.as_ref()?;
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
let range = deno_graph::Range {
specifier: jsx_config.base_url.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
};
let type_specifier = cli_resolver
.resolve(
type_specifier,
&range,
&jsx_config.base_url,
deno_graph::Position::zeroed(),
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Types,
ResolutionMode::Import,
NodeResolutionKind::Types,
)
.ok()?;
let code_specifier = cli_resolver
.resolve(
code_specifier,
&range,
&jsx_config.base_url,
deno_graph::Position::zeroed(),
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Execution,
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.ok()?;
dep_info
@ -1542,7 +1543,7 @@ impl Documents {
&self,
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> {
if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
@ -1559,7 +1560,7 @@ impl Documents {
let (s, mt) = self.resolver.npm_to_file_url(
&npm_ref,
referrer,
referrer_kind,
resolution_mode,
file_referrer,
)?;
specifier = s;
@ -1571,8 +1572,12 @@ impl Documents {
return Some((specifier, media_type));
};
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc);
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
self.resolve_dependency(
types,
&specifier,
doc.resolution_mode(),
file_referrer,
)
} else {
Some((doc.specifier().clone(), doc.media_type()))
}
@ -1640,19 +1645,25 @@ fn parse_and_analyze_module(
maybe_headers: Option<&HashMap<String, String>>,
media_type: MediaType,
file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
) -> (
Option<ParsedSourceResult>,
Option<ModuleResult>,
ResolutionMode,
) {
let parsed_source_result = parse_source(specifier.clone(), text, media_type);
let module_result = analyze_module(
let (module_result, resolution_mode) = analyze_module(
specifier,
&parsed_source_result,
maybe_headers,
file_referrer,
is_cjs_resolver,
resolver,
);
(Some(parsed_source_result), Some(module_result))
(
Some(parsed_source_result),
Some(module_result),
resolution_mode,
)
}
fn parse_source(
@ -1675,26 +1686,28 @@ fn analyze_module(
parsed_source_result: &ParsedSourceResult,
maybe_headers: Option<&HashMap<String, String>>,
file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
) -> ModuleResult {
) -> (ModuleResult, ResolutionMode) {
match parsed_source_result {
Ok(parsed_source) => {
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
let cli_resolver = resolver.as_cli_resolver(file_referrer);
let is_cjs_resolver = resolver.as_is_cjs_resolver(file_referrer);
let config_data = resolver.as_config_data(file_referrer);
let valid_referrer = specifier.clone();
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &valid_referrer,
referrer_kind: is_cjs_resolver.get_lsp_referrer_kind(
let module_resolution_mode = is_cjs_resolver.get_lsp_resolution_mode(
&specifier,
Some(parsed_source.compute_is_script()),
),
);
let resolver = SingleReferrerGraphResolver {
valid_referrer: &valid_referrer,
module_resolution_mode,
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
(
Ok(deno_graph::parse_module_from_ast(
deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::TypesOnly,
@ -1708,11 +1721,16 @@ fn analyze_module(
maybe_resolver: Some(&resolver),
maybe_npm_resolver: Some(&npm_resolver),
},
))
)),
module_resolution_mode,
)
}
Err(err) => Err(deno_graph::ModuleGraphError::ModuleError(
Err(err) => (
Err(deno_graph::ModuleGraphError::ModuleError(
deno_graph::ModuleError::ParseErr(specifier, err.clone()),
)),
ResolutionMode::Import,
),
}
}

View file

@ -2,7 +2,8 @@
use crate::args::jsr_api_url;
use crate::args::jsr_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::jsr::partial_jsr_package_version_info_from_slice;
use crate::jsr::JsrFetchResolver;
use dashmap::DashMap;
@ -17,6 +18,7 @@ use deno_graph::ModuleSpecifier;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::Version;
use serde::Deserialize;
use std::collections::HashMap;
@ -32,8 +34,8 @@ pub struct JsrCacheResolver {
/// The `module_graph` fields of the version infos should be forcibly absent.
/// It can be large and we don't want to store it.
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
workspace_scope_by_name: HashMap<String, ModuleSpecifier>,
info_by_name: DashMap<StackString, Option<Arc<JsrPackageInfo>>>,
workspace_scope_by_name: HashMap<StackString, ModuleSpecifier>,
cache: Arc<dyn HttpCache>,
}
@ -58,7 +60,7 @@ impl JsrCacheResolver {
continue;
};
let nv = PackageNv {
name: jsr_pkg_config.name.clone(),
name: jsr_pkg_config.name.as_str().into(),
version: version.clone(),
};
info_by_name.insert(
@ -124,8 +126,8 @@ impl JsrCacheResolver {
return nv.value().clone();
}
let maybe_get_nv = || {
let name = req.name.clone();
let package_info = self.package_info(&name)?;
let name = &req.name;
let package_info = self.package_info(name)?;
// Find the first matching version of the package which is cached.
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
versions.sort();
@ -143,7 +145,10 @@ impl JsrCacheResolver {
self.package_version_info(&nv).is_some()
})
.cloned()?;
Some(PackageNv { name, version })
Some(PackageNv {
name: name.clone(),
version,
})
};
let nv = maybe_get_nv();
self.nv_by_req.insert(req.clone(), nv.clone());
@ -215,7 +220,10 @@ impl JsrCacheResolver {
None
}
pub fn package_info(&self, name: &str) -> Option<Arc<JsrPackageInfo>> {
pub fn package_info(
&self,
name: &StackString,
) -> Option<Arc<JsrPackageInfo>> {
if let Some(info) = self.info_by_name.get(name) {
return info.value().clone();
}
@ -225,7 +233,7 @@ impl JsrCacheResolver {
serde_json::from_slice::<JsrPackageInfo>(&meta_bytes).ok()
};
let info = read_cached_package_info().map(Arc::new);
self.info_by_name.insert(name.to_string(), info.clone());
self.info_by_name.insert(name.clone(), info.clone());
info
}
@ -262,12 +270,12 @@ fn read_cached_url(
cache
.get(&cache.cache_item_key(url).ok()?, None)
.ok()?
.map(|f| f.content)
.map(|f| f.content.into_owned())
}
#[derive(Debug)]
pub struct CliJsrSearchApi {
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
resolver: JsrFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
@ -275,7 +283,7 @@ pub struct CliJsrSearchApi {
}
impl CliJsrSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
let resolver = JsrFetchResolver::new(file_fetcher.clone());
Self {
file_fetcher,
@ -309,10 +317,8 @@ impl PackageSearchApi for CliJsrSearchApi {
let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move {
file_fetcher
.fetch_bypass_permissions(&search_url)
.await?
.into_text_decoded()
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)
})
.await??;
let names = Arc::new(parse_jsr_search_response(&file.source)?);

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_core::anyhow::anyhow;
@ -22,7 +23,8 @@ use deno_semver::jsr::JsrPackageReqReference;
use indexmap::Equivalent;
use indexmap::IndexSet;
use log::error;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use serde::Deserialize;
use serde_json::from_value;
use std::collections::BTreeMap;
@ -78,7 +80,6 @@ use super::parent_process_checker;
use super::performance::Performance;
use super::refactor;
use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::testing;
use super::text;
@ -95,13 +96,12 @@ use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::has_flag_env_var;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::InternalFlags;
use crate::args::UnstableFmtOptions;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_util;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::ConfigWatchedFileType;
@ -146,7 +146,6 @@ pub struct StateSnapshot {
pub project_version: usize,
pub assets: AssetsSnapshot,
pub config: Arc<Config>,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
pub documents: Arc<Documents>,
pub resolver: Arc<LspResolver>,
}
@ -206,7 +205,6 @@ pub struct Inner {
pub documents: Documents,
http_client_provider: Arc<HttpClientProvider>,
initial_cwd: PathBuf,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
jsr_search_api: CliJsrSearchApi,
/// Handles module registries, which allow discovery of modules
module_registry: ModuleRegistry,
@ -272,7 +270,12 @@ impl LanguageServer {
open_docs: &open_docs,
};
let graph = module_graph_creator
.create_graph_with_loader(GraphKind::All, roots.clone(), &mut loader)
.create_graph_with_loader(
GraphKind::All,
roots.clone(),
&mut loader,
graph_util::NpmCachingStrategy::Eager,
)
.await?;
graph_util::graph_valid(
&graph,
@ -484,7 +487,6 @@ impl Inner {
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
panic!("Could not resolve current working directory")
});
let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache));
Self {
assets,
@ -496,7 +498,6 @@ impl Inner {
documents,
http_client_provider,
initial_cwd: initial_cwd.clone(),
is_cjs_resolver,
jsr_search_api,
project_version: 0,
task_queue: Default::default(),
@ -607,7 +608,6 @@ impl Inner {
project_version: self.project_version,
assets: self.assets.snapshot(),
config: Arc::new(self.config.clone()),
is_cjs_resolver: self.is_cjs_resolver.clone(),
documents: Arc::new(self.documents.clone()),
resolver: self.resolver.snapshot(),
})
@ -629,7 +629,6 @@ impl Inner {
}
});
self.cache = LspCache::new(global_cache_url);
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache));
let deno_dir = self.cache.deno_dir();
let workspace_settings = self.config.workspace_settings();
let maybe_root_path = self
@ -959,15 +958,15 @@ impl Inner {
}
async fn refresh_config_tree(&mut self) {
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
self.cache.global().clone(),
CacheSetting::RespectHeaders,
true,
self.http_client_provider.clone(),
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
super::logging::lsp_log_level(),
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
let file_fetcher = Arc::new(file_fetcher);
self
.config
@ -993,13 +992,10 @@ impl Inner {
let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
let Ok(specifier) = resolver.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
NodeModuleKind::Esm,
deno_graph::source::ResolutionMode::Types,
&referrer,
deno_graph::Position::zeroed(),
ResolutionMode::Import,
NodeResolutionKind::Types,
) else {
return;
};
@ -1640,8 +1636,8 @@ impl Inner {
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
let specifier_kind = asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm);
.map(|d| d.resolution_mode())
.unwrap_or(ResolutionMode::Import);
let mut includes_no_cache = false;
for diagnostic in &fixable_diagnostics {
match diagnostic.source.as_deref() {
@ -1859,20 +1855,12 @@ impl Inner {
}
let changes = if code_action_data.fix_id == "fixMissingImport" {
fix_ts_import_changes(
&code_action_data.specifier,
maybe_asset_or_doc
.as_ref()
.and_then(|d| d.document())
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm),
&combined_code_actions.changes,
self,
)
.map_err(|err| {
fix_ts_import_changes(&combined_code_actions.changes, self).map_err(
|err| {
error!("Unable to remap changes: {:#}", err);
LspError::internal_error()
})?
},
)?
} else {
combined_code_actions.changes
};
@ -1916,20 +1904,16 @@ impl Inner {
asset_or_doc.scope().cloned(),
)
.await?;
if kind_suffix == ".rewrite.function.returnType" {
refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier,
asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm),
&refactor_edit_info.edits,
self,
)
.map_err(|err| {
if kind_suffix == ".rewrite.function.returnType"
|| kind_suffix == ".move.newFile"
{
refactor_edit_info.edits =
fix_ts_import_changes(&refactor_edit_info.edits, self).map_err(
|err| {
error!("Unable to remap changes: {:#}", err);
LspError::internal_error()
})?
},
)?
}
code_action.edit = refactor_edit_info.to_workspace_edit(self)?;
code_action
@ -2272,7 +2256,6 @@ impl Inner {
&self.jsr_search_api,
&self.npm_search_api,
&self.documents,
&self.is_cjs_resolver,
self.resolver.as_ref(),
self
.config
@ -3681,6 +3664,7 @@ impl Inner {
.unwrap_or_else(create_default_npmrc),
workspace,
force_global_cache,
None,
)?;
let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable);
@ -3781,14 +3765,11 @@ impl Inner {
fn task_definitions(&self) -> LspResult<Vec<TaskDefinition>> {
let mut result = vec![];
for config_file in self.config.tree.config_files() {
if let Some(tasks) = json!(&config_file.json.tasks).as_object() {
for (name, value) in tasks {
let Some(command) = value.as_str() else {
continue;
};
if let Some(tasks) = config_file.to_tasks_config().ok().flatten() {
for (name, def) in tasks {
result.push(TaskDefinition {
name: name.clone(),
command: command.to_string(),
command: def.command.clone(),
source_uri: url_to_uri(&config_file.specifier)
.map_err(|_| LspError::internal_error())?,
});
@ -3800,7 +3781,7 @@ impl Inner {
for (name, command) in scripts {
result.push(TaskDefinition {
name: name.clone(),
command: command.clone(),
command: Some(command.clone()),
source_uri: url_to_uri(&package_json.specifier())
.map_err(|_| LspError::internal_error())?,
});

View file

@ -14,9 +14,7 @@ pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str =
#[serde(rename_all = "camelCase")]
pub struct TaskDefinition {
pub name: String,
// TODO(nayeemrmn): Rename this to `command` in vscode_deno.
#[serde(rename = "detail")]
pub command: String,
pub command: Option<String>,
pub source_uri: lsp::Uri,
}

View file

@ -56,9 +56,6 @@ pub async fn start() -> Result<(), AnyError> {
LanguageServer::performance_request,
)
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
// TODO(nayeemrmn): Rename this to `deno/taskDefinitions` in vscode_deno and
// remove this alias.
.custom_method("deno/task", LanguageServer::task_definitions)
.custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request)
.custom_method(
testing::TEST_RUN_CANCEL_REQUEST,

View file

@ -11,21 +11,22 @@ use serde::Deserialize;
use std::sync::Arc;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::npm::NpmFetchResolver;
use super::search::PackageSearchApi;
#[derive(Debug)]
pub struct CliNpmSearchApi {
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
resolver: NpmFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
}
impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
@ -57,10 +58,8 @@ impl PackageSearchApi for CliNpmSearchApi {
.append_pair("text", &format!("{} boost-exact:false", query));
let file_fetcher = self.file_fetcher.clone();
let file = deno_core::unsync::spawn(async move {
file_fetcher
.fetch_bypass_permissions(&search_url)
.await?
.into_text_decoded()
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)
})
.await??;
let names = Arc::new(parse_npm_search_response(&file.source)?);

View file

@ -12,14 +12,15 @@ use super::path_to_regex::StringOrNumber;
use super::path_to_regex::StringOrVec;
use super::path_to_regex::Token;
use crate::args::CacheSetting;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::http_util::HttpClientProvider;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
@ -418,7 +419,7 @@ enum VariableItems {
pub struct ModuleRegistry {
origins: HashMap<String, Vec<RegistryConfiguration>>,
pub location: PathBuf,
pub file_fetcher: Arc<FileFetcher>,
pub file_fetcher: Arc<CliFileFetcher>,
http_cache: Arc<GlobalHttpCache>,
}
@ -432,15 +433,15 @@ impl ModuleRegistry {
location.clone(),
crate::cache::RealDenoCacheEnv,
));
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
http_cache.clone(),
CacheSetting::RespectHeaders,
true,
http_client_provider,
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
super::logging::lsp_log_level(),
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
Self {
origins: HashMap::new(),
@ -479,13 +480,15 @@ impl ModuleRegistry {
let specifier = specifier.clone();
async move {
file_fetcher
.fetch_with_options(FetchOptions {
specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll,
.fetch_with_options(
&specifier,
FetchPermissionsOptionRef::AllowAll,
FetchOptions {
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None,
})
}
)
.await
}
}).await?;
@ -500,7 +503,7 @@ impl ModuleRegistry {
);
self.http_cache.set(specifier, headers_map, &[])?;
}
let file = fetch_result?.into_text_decoded()?;
let file = TextDecodedFile::decode(fetch_result?)?;
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
validate_config(&config)?;
Ok(config.registries)
@ -584,12 +587,11 @@ impl ModuleRegistry {
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn({
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&endpoint)
.await
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await
@ -983,12 +985,11 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
})
.await
.ok()??;
@ -1049,7 +1050,7 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
let specifier = specifier.clone();
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.map_err(|err| {
@ -1058,9 +1059,8 @@ impl ModuleRegistry {
specifier, err
);
})
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await
@ -1095,7 +1095,7 @@ impl ModuleRegistry {
let file_fetcher = self.file_fetcher.clone();
let specifier = specifier.clone();
async move {
file_fetcher
let file = file_fetcher
.fetch_bypass_permissions(&specifier)
.await
.map_err(|err| {
@ -1104,9 +1104,8 @@ impl ModuleRegistry {
specifier, err
);
})
.ok()?
.into_text_decoded()
.ok()
.ok()?;
TextDecodedFile::decode(file).ok()
}
})
.await

View file

@ -2,6 +2,7 @@
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
@ -9,29 +10,26 @@ use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_graph::source::ResolutionMode;
use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_to_file_path;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
@ -40,14 +38,14 @@ use std::collections::HashSet;
use std::sync::Arc;
use super::cache::LspCache;
use super::documents::Document;
use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::factory::Deferred;
use crate::graph_util::to_node_resolution_kind;
use crate::graph_util::to_node_resolution_mode;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
@ -70,7 +68,6 @@ use crate::resolver::CliResolverOptions;
use crate::resolver::IsCjsResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::tsc::into_specifier_and_media_type;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@ -78,6 +75,7 @@ use crate::util::progress_bar::ProgressBarStyle;
struct LspScopeResolver {
resolver: Arc<CliResolver>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
is_cjs_resolver: Arc<IsCjsResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<NodeResolver>>,
@ -96,6 +94,7 @@ impl Default for LspScopeResolver {
Self {
resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: None,
npm_resolver: None,
node_resolver: None,
@ -134,7 +133,8 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()),
)));
let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
let npm_graph_resolver = cli_resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager);
let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data
@ -146,7 +146,7 @@ impl LspScopeResolver {
.map(|(referrer, imports)| {
let resolver = SingleReferrerGraphResolver {
valid_referrer: &referrer,
referrer_kind: NodeModuleKind::Esm,
module_resolution_mode: ResolutionMode::Import,
cli_resolver: &cli_resolver,
jsx_import_source_config: maybe_jsx_import_source_config
.as_ref(),
@ -180,16 +180,16 @@ impl LspScopeResolver {
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Types,
ResolutionMode::Import,
NodeResolutionKind::Types,
)
.or_else(|_| {
npm_pkg_req_resolver.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Execution,
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
})
.ok()?,
@ -205,6 +205,7 @@ impl LspScopeResolver {
Self {
resolver: cli_resolver,
in_npm_pkg_checker,
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver,
npm_pkg_req_resolver,
npm_resolver,
@ -228,6 +229,7 @@ impl LspScopeResolver {
Arc::new(Self {
resolver: factory.cli_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: self.jsr_resolver.clone(),
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
npm_resolver: factory.npm_resolver().cloned(),
@ -342,7 +344,17 @@ impl LspResolver {
file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver {
let resolver = self.get_scope_resolver(file_referrer);
resolver.resolver.create_graph_npm_resolver()
resolver
.resolver
.create_graph_npm_resolver(crate::graph_util::NpmCachingStrategy::Eager)
}
pub fn as_is_cjs_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &IsCjsResolver {
let resolver = self.get_scope_resolver(file_referrer);
resolver.is_cjs_resolver.as_ref()
}
pub fn as_config_data(
@ -369,6 +381,14 @@ impl LspResolver {
resolver.npm_resolver.as_ref().and_then(|r| r.as_managed())
}
pub fn pkg_json_resolver(
&self,
referrer: &ModuleSpecifier,
) -> &Arc<PackageJsonResolver> {
let resolver = self.get_scope_resolver(Some(referrer));
&resolver.pkg_json_resolver
}
pub fn graph_imports_by_referrer(
&self,
file_referrer: &ModuleSpecifier,
@ -424,7 +444,7 @@ impl LspResolver {
&self,
req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer);
@ -434,8 +454,8 @@ impl LspResolver {
.resolve_req_reference(
req_ref,
referrer,
referrer_kind,
NodeResolutionMode::Types,
resolution_mode,
NodeResolutionKind::Types,
)
.ok()?,
)))
@ -492,7 +512,7 @@ impl LspResolver {
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
resolution_mode: ResolutionMode,
) -> bool {
let resolver = self.get_scope_resolver(Some(referrer));
let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref()
@ -503,24 +523,14 @@ impl LspResolver {
.resolve_if_for_npm_pkg(
specifier_text,
referrer,
referrer_kind,
NodeResolutionMode::Types,
resolution_mode,
NodeResolutionKind::Types,
)
.ok()
.flatten()
.is_some()
}
pub fn get_closest_package_json(
&self,
referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
let resolver = self.get_scope_resolver(Some(referrer));
resolver
.pkg_json_resolver
.get_closest_package_json(referrer)
}
pub fn resolve_redirects(
&self,
specifier: &ModuleSpecifier,
@ -581,6 +591,7 @@ pub struct ScopeDepInfo {
struct ResolverFactoryServices {
cli_resolver: Deferred<Arc<CliResolver>>,
in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
is_cjs_resolver: Deferred<Arc<IsCjsResolver>>,
node_resolver: Deferred<Option<Arc<NodeResolver>>>,
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
@ -744,6 +755,23 @@ impl<'a> ResolverFactory<'a> {
})
}
pub fn is_cjs_resolver(&self) -> &Arc<IsCjsResolver> {
self.services.is_cjs_resolver.get_or_init(|| {
Arc::new(IsCjsResolver::new(
self.in_npm_pkg_checker().clone(),
self.pkg_json_resolver().clone(),
if self
.config_data
.is_some_and(|d| d.unstable.contains("detect-cjs"))
{
IsCjsResolutionMode::ImplicitTypeCommonJs
} else {
IsCjsResolutionMode::ExplicitTypeCommonJs
},
))
})
}
pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> {
self
.services
@ -803,99 +831,10 @@ impl std::fmt::Debug for RedirectResolver {
}
}
#[derive(Debug)]
pub struct LspIsCjsResolver {
inner: IsCjsResolver,
}
impl Default for LspIsCjsResolver {
fn default() -> Self {
LspIsCjsResolver::new(&Default::default())
}
}
impl LspIsCjsResolver {
pub fn new(cache: &LspCache) -> Self {
#[derive(Debug)]
struct LspInNpmPackageChecker {
global_cache_dir: ModuleSpecifier,
}
impl LspInNpmPackageChecker {
pub fn new(cache: &LspCache) -> Self {
let npm_folder_path = cache.deno_dir().npm_folder_path();
Self {
global_cache_dir: url_from_directory_path(
&canonicalize_path_maybe_not_exists(&npm_folder_path)
.unwrap_or(npm_folder_path),
)
.unwrap_or_else(|_| {
ModuleSpecifier::parse("file:///invalid/").unwrap()
}),
}
}
}
impl InNpmPackageChecker for LspInNpmPackageChecker {
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
if specifier
.as_str()
.starts_with(self.global_cache_dir.as_str())
{
return true;
}
specifier.as_str().contains("/node_modules/")
}
}
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
LspIsCjsResolver {
inner: IsCjsResolver::new(
Arc::new(LspInNpmPackageChecker::new(cache)),
pkg_json_resolver,
crate::resolver::IsCjsResolverOptions {
detect_cjs: true,
is_node_main: false,
},
),
}
}
pub fn get_maybe_doc_module_kind(
&self,
specifier: &ModuleSpecifier,
maybe_document: Option<&Document>,
) -> NodeModuleKind {
self.get_lsp_referrer_kind(
specifier,
maybe_document.and_then(|d| d.is_script()),
)
}
pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind {
self.get_lsp_referrer_kind(document.specifier(), document.is_script())
}
pub fn get_lsp_referrer_kind(
&self,
specifier: &ModuleSpecifier,
is_script: Option<bool>,
) -> NodeModuleKind {
self.inner.get_lsp_referrer_kind(specifier, is_script)
}
}
#[derive(Debug)]
pub struct SingleReferrerGraphResolver<'a> {
pub valid_referrer: &'a ModuleSpecifier,
pub referrer_kind: NodeModuleKind,
pub module_resolution_mode: ResolutionMode,
pub cli_resolver: &'a CliResolver,
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
}
@ -924,16 +863,20 @@ impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
&self,
specifier_text: &str,
referrer_range: &Range,
mode: ResolutionMode,
resolution_kind: deno_graph::source::ResolutionKind,
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
// this resolver assumes it will only be used with a single referrer
// with the provided referrer kind
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
self.cli_resolver.resolve(
specifier_text,
referrer_range,
self.referrer_kind,
mode,
&referrer_range.specifier,
referrer_range.range.start,
referrer_range
.resolution_mode
.map(to_node_resolution_mode)
.unwrap_or(self.module_resolution_mode),
to_node_resolution_kind(resolution_kind),
)
}
}
@ -1001,9 +944,7 @@ impl RedirectResolver {
if chain.len() > 10 {
break None;
}
let Ok(target) =
deno_core::resolve_import(location, specifier.as_str())
else {
let Ok(target) = specifier.join(location) else {
break None;
};
chain.push((

View file

@ -67,7 +67,9 @@ pub mod tests {
&self,
nv: &PackageNv,
) -> Result<Arc<Vec<String>>, AnyError> {
let Some(exports_by_version) = self.package_versions.get(&nv.name) else {
let Some(exports_by_version) =
self.package_versions.get(nv.name.as_str())
else {
return Err(anyhow!("Package not found."));
};
let Some(exports) = exports_by_version.get(&nv.version) else {

View file

@ -64,13 +64,14 @@ use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::tokio_util::create_basic_runtime;
use indexmap::IndexMap;
use indexmap::IndexSet;
use lazy_regex::lazy_regex;
use log::error;
use node_resolver::NodeModuleKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Captures;
use regex::Regex;
@ -1297,16 +1298,10 @@ impl TsServer {
{
// When an LSP request is cancelled by the client, the future this is being
// executed under and any local variables here will be dropped at the next
// await point. To pass on that cancellation to the TS thread, we make this
// wrapper which cancels the request's token on drop.
struct DroppableToken(CancellationToken);
impl Drop for DroppableToken {
fn drop(&mut self) {
self.0.cancel();
}
}
// await point. To pass on that cancellation to the TS thread, we use drop_guard
// which cancels the request's token on drop.
let token = token.child_token();
let droppable_token = DroppableToken(token.clone());
let droppable_token = token.clone().drop_guard();
let (tx, mut rx) = oneshot::channel::<Result<String, AnyError>>();
let change = self.pending_change.lock().take();
@ -1320,7 +1315,7 @@ impl TsServer {
tokio::select! {
value = &mut rx => {
let value = value??;
drop(droppable_token);
droppable_token.disarm();
Ok(serde_json::from_str(&value)?)
}
_ = token.cancelled() => {
@ -3417,15 +3412,23 @@ fn parse_code_actions(
additional_text_edits.extend(change.text_changes.iter().map(|tc| {
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
if let Some(specifier_rewrite) = &data.specifier_rewrite {
text_edit.new_text = text_edit.new_text.replace(
let specifier_index = text_edit
.new_text
.char_indices()
.find_map(|(b, c)| (c == '\'' || c == '"').then_some(b));
if let Some(i) = specifier_index {
let mut specifier_part = text_edit.new_text.split_off(i);
specifier_part = specifier_part.replace(
&specifier_rewrite.old_specifier,
&specifier_rewrite.new_specifier,
);
text_edit.new_text.push_str(&specifier_part);
}
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
text_edit.new_text = format!(
"// @deno-types=\"{}\"\n{}",
"// @ts-types=\"{}\"\n{}",
deno_types_specifier, &text_edit.new_text
);
}
@ -3593,17 +3596,22 @@ impl CompletionEntryDetails {
&mut insert_replace_edit.new_text
}
};
*new_text = new_text.replace(
let specifier_index = new_text
.char_indices()
.find_map(|(b, c)| (c == '\'' || c == '"').then_some(b));
if let Some(i) = specifier_index {
let mut specifier_part = new_text.split_off(i);
specifier_part = specifier_part.replace(
&specifier_rewrite.old_specifier,
&specifier_rewrite.new_specifier,
);
new_text.push_str(&specifier_part);
}
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
*new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, new_text
);
*new_text =
format!("// @ts-types=\"{}\"\n{}", deno_types_specifier, new_text);
}
}
}
@ -3737,7 +3745,7 @@ pub struct CompletionItemData {
#[serde(rename_all = "camelCase")]
struct CompletionEntryDataAutoImport {
module_specifier: String,
file_name: String,
file_name: Option<String>,
}
#[derive(Debug)]
@ -3794,10 +3802,21 @@ impl CompletionEntry {
else {
return;
};
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
if let Some(file_name) = &raw.file_name {
if let Ok(normalized) = specifier_map.normalize(file_name) {
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
} else if SUPPORTED_BUILTIN_NODE_MODULES
.contains(&raw.module_specifier.as_str())
{
if let Ok(normalized) =
resolve_url(&format!("node:{}", &raw.module_specifier))
{
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
}
}
fn get_commit_characters(
@ -4449,9 +4468,9 @@ fn op_load<'s>(
version: state.script_version(&specifier),
is_cjs: doc
.document()
.map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm)
== NodeModuleKind::Cjs,
.map(|d| d.resolution_mode())
.unwrap_or(ResolutionMode::Import)
== ResolutionMode::Require,
})
};
let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
@ -4479,17 +4498,9 @@ fn op_release(
fn op_resolve(
state: &mut OpState,
#[string] base: String,
is_base_cjs: bool,
#[serde] specifiers: Vec<String>,
#[serde] specifiers: Vec<(bool, String)>,
) -> Result<Vec<Option<(String, String)>>, AnyError> {
op_resolve_inner(
state,
ResolveArgs {
base,
is_base_cjs,
specifiers,
},
)
op_resolve_inner(state, ResolveArgs { base, specifiers })
}
struct TscRequestArray {
@ -4512,6 +4523,7 @@ impl<'a> ToV8<'a> for TscRequestArray {
let method_name = deno_core::FastString::from_static(method_name)
.v8_string(scope)
.unwrap()
.into();
let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into());
let scope_url = serde_v8::to_v8(scope, self.scope)
@ -4692,10 +4704,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
let (types, _) = documents.resolve_dependency(
types,
specifier,
state
.state_snapshot
.is_cjs_resolver
.get_doc_module_kind(doc),
doc.resolution_mode(),
doc.file_referrer(),
)?;
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
@ -5534,7 +5543,6 @@ impl TscRequest {
mod tests {
use super::*;
use crate::cache::HttpCache;
use crate::http_util::HeadersMap;
use crate::lsp::cache::LspCache;
use crate::lsp::config::Config;
use crate::lsp::config::WorkspaceSettings;
@ -5579,7 +5587,6 @@ mod tests {
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver,
});
let performance = Arc::new(Performance::default());
@ -5765,6 +5772,7 @@ mod tests {
"sourceLine": " import { A } from \".\";",
"category": 2,
"code": 6133,
"reportsUnnecessary": true,
}]
})
);
@ -5847,6 +5855,7 @@ mod tests {
"sourceLine": " import {",
"category": 2,
"code": 6192,
"reportsUnnecessary": true,
}, {
"start": {
"line": 8,
@ -5970,7 +5979,7 @@ mod tests {
.global()
.set(
&specifier_dep,
HeadersMap::default(),
Default::default(),
b"export const b = \"b\";\n",
)
.unwrap();
@ -6009,7 +6018,7 @@ mod tests {
.global()
.set(
&specifier_dep,
HeadersMap::default(),
Default::default(),
b"export const b = \"b\";\n\nexport const a = \"b\";\n",
)
.unwrap();
@ -6430,8 +6439,7 @@ mod tests {
&mut state,
ResolveArgs {
base: temp_dir.url().join("a.ts").unwrap().to_string(),
is_base_cjs: false,
specifiers: vec!["./b.ts".to_string()],
specifiers: vec![(false, "./b.ts".to_string())],
},
)
.unwrap();

View file

@ -1,7 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod args;
mod auth_tokens;
mod cache;
mod cdp;
mod emit;
@ -437,20 +436,18 @@ fn resolve_flags_and_init(
if err.kind() == clap::error::ErrorKind::DisplayVersion =>
{
// Ignore results to avoid BrokenPipe errors.
util::logger::init(None);
util::logger::init(None, None);
let _ = err.print();
deno_runtime::exit(0);
}
Err(err) => {
util::logger::init(None);
util::logger::init(None, None);
exit_for_error(AnyError::from(err))
}
};
if let Some(otel_config) = flags.otel_config() {
deno_runtime::ops::otel::init(otel_config)?;
}
util::logger::init(flags.log_level);
deno_telemetry::init(crate::args::otel_runtime_config())?;
util::logger::init(flags.log_level, Some(flags.otel_config()));
// TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled {

View file

@ -8,7 +8,6 @@
mod standalone;
mod args;
mod auth_tokens;
mod cache;
mod emit;
mod errors;
@ -87,17 +86,18 @@ fn main() {
let future = async move {
match standalone {
Ok(Some(data)) => {
if let Some(otel_config) = data.metadata.otel_config.clone() {
deno_runtime::ops::otel::init(otel_config)?;
}
util::logger::init(data.metadata.log_level);
deno_telemetry::init(crate::args::otel_runtime_config())?;
util::logger::init(
data.metadata.log_level,
Some(data.metadata.otel_config.clone()),
);
load_env_vars(&data.metadata.env_vars_from_env_file);
let exit_code = standalone::run(data).await?;
deno_runtime::exit(exit_code);
}
Ok(None) => Ok(()),
Err(err) => {
util::logger::init(None);
util::logger::init(None, None);
Err(err)
}
}

View file

@ -7,6 +7,8 @@ use std::path::PathBuf;
use std::pin::Pin;
use std::rc::Rc;
use std::str;
use std::sync::atomic::AtomicU16;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use crate::args::jsr_url;
@ -49,6 +51,7 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::future::FutureExt;
use deno_core::futures::Future;
use deno_core::parking_lot::Mutex;
use deno_core::resolve_url;
use deno_core::ModuleCodeString;
use deno_core::ModuleLoader;
@ -57,9 +60,7 @@ use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_graph::source::ResolutionMode;
use deno_graph::GraphKind;
use deno_graph::JsModule;
use deno_graph::JsonModule;
@ -76,7 +77,8 @@ use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionMode;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
pub struct ModuleLoadPreparer {
options: Arc<CliOptions>,
@ -154,6 +156,7 @@ impl ModuleLoadPreparer {
graph_kind: graph.graph_kind(),
roots: roots.to_vec(),
loader: Some(&mut cache),
npm_caching: self.options.default_npm_caching_strategy(),
},
)
.await?;
@ -223,6 +226,42 @@ struct SharedCliModuleLoaderState {
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
in_flight_loads_tracker: InFlightModuleLoadsTracker,
}
struct InFlightModuleLoadsTracker {
loads_number: Arc<AtomicU16>,
cleanup_task_timeout: u64,
cleanup_task_handle: Arc<Mutex<Option<tokio::task::JoinHandle<()>>>>,
}
impl InFlightModuleLoadsTracker {
pub fn increase(&self) {
self.loads_number.fetch_add(1, Ordering::Relaxed);
if let Some(task) = self.cleanup_task_handle.lock().take() {
task.abort();
}
}
pub fn decrease(&self, parsed_source_cache: &Arc<ParsedSourceCache>) {
let prev = self.loads_number.fetch_sub(1, Ordering::Relaxed);
if prev == 1 {
let parsed_source_cache = parsed_source_cache.clone();
let timeout = self.cleanup_task_timeout;
let task_handle = tokio::spawn(async move {
// We use a timeout here, which is defined to 10s,
// so that in situations when dynamic imports are loaded after the startup,
// we don't need to recompute and analyze multiple modules.
tokio::time::sleep(std::time::Duration::from_millis(timeout)).await;
parsed_source_cache.free_all();
});
let maybe_prev_task =
self.cleanup_task_handle.lock().replace(task_handle);
if let Some(prev_task) = maybe_prev_task {
prev_task.abort();
}
}
}
}
pub struct CliModuleLoaderFactory {
@ -273,6 +312,11 @@ impl CliModuleLoaderFactory {
npm_module_loader,
parsed_source_cache,
resolver,
in_flight_loads_tracker: InFlightModuleLoadsTracker {
loads_number: Arc::new(AtomicU16::new(0)),
cleanup_task_timeout: 10_000,
cleanup_task_handle: Arc::new(Mutex::new(None)),
},
}),
}
}
@ -498,13 +542,11 @@ impl<TGraphContainer: ModuleGraphContainer>
}
Resolution::None => Cow::Owned(self.shared.resolver.resolve(
raw_specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
self.shared.cjs_tracker.get_referrer_kind(referrer),
ResolutionMode::Execution,
referrer,
deno_graph::Position::zeroed(),
// if we're here, that means it's resolving a dynamic import
ResolutionMode::Import,
NodeResolutionKind::Execution,
)?),
};
@ -517,8 +559,8 @@ impl<TGraphContainer: ModuleGraphContainer>
.resolve_req_reference(
&reference,
referrer,
self.shared.cjs_tracker.get_referrer_kind(referrer),
NodeResolutionMode::Execution,
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.map_err(AnyError::from);
}
@ -539,8 +581,8 @@ impl<TGraphContainer: ModuleGraphContainer>
&package_folder,
module.nv_reference.sub_path(),
Some(referrer),
self.shared.cjs_tracker.get_referrer_kind(referrer),
NodeResolutionMode::Execution,
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.with_context(|| {
format!("Could not resolve '{}'.", module.nv_reference)
@ -806,7 +848,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
&self,
specifier: &str,
referrer: &str,
_kind: ResolutionKind,
_kind: deno_core::ResolutionKind,
) -> Result<ModuleSpecifier, AnyError> {
fn ensure_not_jsr_non_jsr_remote_import(
specifier: &ModuleSpecifier,
@ -870,6 +912,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
_maybe_referrer: Option<String>,
is_dynamic: bool,
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
self.0.shared.in_flight_loads_tracker.increase();
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
return Box::pin(deno_core::futures::future::ready(Ok(())));
}
@ -924,6 +967,14 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
.boxed_local()
}
fn finish_load(&self) {
self
.0
.shared
.in_flight_loads_tracker
.decrease(&self.0.shared.parsed_source_cache);
}
fn code_cache_ready(
&self,
specifier: ModuleSpecifier,
@ -945,7 +996,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
std::future::ready(()).boxed_local()
}
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
let specifier = resolve_url(file_name).ok()?;
match specifier.scheme() {
// we should only be looking for emits for schemes that denote external
@ -957,7 +1008,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
.0
.load_prepared_module_for_source_map_sync(&specifier)
.ok()??;
source_map_from_code(source.code.as_bytes())
source_map_from_code(source.code.as_bytes()).map(Cow::Owned)
}
fn get_source_mapped_source_line(
@ -1063,7 +1114,10 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
self.npm_resolver.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError> {
fn load_text_file_lossy(
&self,
path: &Path,
) -> Result<Cow<'static, str>, AnyError> {
// todo(dsherret): use the preloaded module from the graph if available?
let media_type = MediaType::from_path(path);
let text = self.fs.read_text_file_lossy_sync(path, None)?;
@ -1078,7 +1132,9 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
.into(),
);
}
self.emitter.emit_parsed_source_sync(
self
.emitter
.emit_parsed_source_sync(
&specifier,
media_type,
// this is probably not super accurate due to require esm, but probably ok.
@ -1087,6 +1143,7 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
ModuleKind::Cjs,
&text.into(),
)
.map(Cow::Owned)
} else {
Ok(text)
}
@ -1100,3 +1157,44 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
}
#[cfg(test)]
mod tests {
use super::*;
use deno_graph::ParsedSourceStore;
#[tokio::test]
async fn test_inflight_module_loads_tracker() {
let tracker = InFlightModuleLoadsTracker {
loads_number: Default::default(),
cleanup_task_timeout: 10,
cleanup_task_handle: Default::default(),
};
let specifier = ModuleSpecifier::parse("file:///a.js").unwrap();
let source = "const a = 'hello';";
let parsed_source_cache = Arc::new(ParsedSourceCache::default());
let parsed_source = parsed_source_cache
.remove_or_parse_module(&specifier, source.into(), MediaType::JavaScript)
.unwrap();
parsed_source_cache.set_parsed_source(specifier, parsed_source);
assert_eq!(parsed_source_cache.len(), 1);
assert!(tracker.cleanup_task_handle.lock().is_none());
tracker.increase();
tracker.increase();
assert!(tracker.cleanup_task_handle.lock().is_none());
tracker.decrease(&parsed_source_cache);
assert!(tracker.cleanup_task_handle.lock().is_none());
tracker.decrease(&parsed_source_cache);
assert!(tracker.cleanup_task_handle.lock().is_some());
assert_eq!(parsed_source_cache.len(), 1);
tracker.increase();
assert!(tracker.cleanup_task_handle.lock().is_none());
assert_eq!(parsed_source_cache.len(), 1);
tracker.decrease(&parsed_source_cache);
// Rather long timeout, but to make sure CI is not flaking on it.
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
assert_eq!(parsed_source_cache.len(), 0);
}
}

View file

@ -160,7 +160,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
if let Ok(source_from_file) =
self.fs.read_text_file_lossy_async(path, None).await
{
Cow::Owned(source_from_file)
source_from_file
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],

View file

@ -5,8 +5,6 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use cache::RegistryInfoDownloader;
use cache::TarballCache;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context;
@ -22,6 +20,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCacheSetting;
use deno_resolver::npm::CliNpmReqResolver;
use deno_runtime::colors;
use deno_runtime::deno_fs::FileSystem;
@ -42,23 +41,22 @@ use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
use crate::util::progress_bar::ProgressBar;
use crate::util::sync::AtomicFlag;
use self::cache::NpmCache;
use self::registry::CliNpmRegistryApi;
use self::resolution::NpmResolution;
use self::resolvers::create_npm_fs_resolver;
use self::resolvers::NpmPackageFsResolver;
use super::CliNpmCache;
use super::CliNpmCacheEnv;
use super::CliNpmRegistryInfoProvider;
use super::CliNpmResolver;
use super::CliNpmTarballCache;
use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError;
pub mod cache;
mod registry;
mod resolution;
mod resolvers;
@ -73,7 +71,7 @@ pub struct CliManagedNpmResolverCreateOptions {
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
pub npm_cache_dir: Arc<NpmCacheDir>,
pub cache_setting: crate::args::CacheSetting,
pub cache_setting: deno_cache_dir::file_fetcher::CacheSetting,
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
@ -85,8 +83,9 @@ pub struct CliManagedNpmResolverCreateOptions {
pub async fn create_managed_npm_resolver_for_lsp(
options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> {
let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone());
let cache_env = create_cache_env(&options);
let npm_cache = create_cache(cache_env.clone(), &options);
let npm_api = create_api(npm_cache.clone(), cache_env.clone(), &options);
// spawn due to the lsp's `Send` requirement
deno_core::unsync::spawn(async move {
let snapshot = match resolve_snapshot(&npm_api, options.snapshot).await {
@ -97,8 +96,8 @@ pub async fn create_managed_npm_resolver_for_lsp(
}
};
create_inner(
cache_env,
options.fs,
options.http_client_provider,
options.maybe_lockfile,
npm_api,
npm_cache,
@ -118,14 +117,15 @@ pub async fn create_managed_npm_resolver_for_lsp(
pub async fn create_managed_npm_resolver(
options: CliManagedNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone());
let snapshot = resolve_snapshot(&npm_api, options.snapshot).await?;
let npm_cache_env = create_cache_env(&options);
let npm_cache = create_cache(npm_cache_env.clone(), &options);
let api = create_api(npm_cache.clone(), npm_cache_env.clone(), &options);
let snapshot = resolve_snapshot(&api, options.snapshot).await?;
Ok(create_inner(
npm_cache_env,
options.fs,
options.http_client_provider,
options.maybe_lockfile,
npm_api,
api,
npm_cache,
options.npmrc,
options.npm_install_deps_provider,
@ -139,11 +139,11 @@ pub async fn create_managed_npm_resolver(
#[allow(clippy::too_many_arguments)]
fn create_inner(
env: Arc<CliNpmCacheEnv>,
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_cache: Arc<CliNpmCache>,
npm_rc: Arc<ResolvedNpmRc>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
@ -153,16 +153,14 @@ fn create_inner(
lifecycle_scripts: LifecycleScriptsConfig,
) -> Arc<dyn CliNpmResolver> {
let resolution = Arc::new(NpmResolution::from_serialized(
npm_api.clone(),
registry_info_provider.clone(),
snapshot,
maybe_lockfile.clone(),
));
let tarball_cache = Arc::new(TarballCache::new(
let tarball_cache = Arc::new(CliNpmTarballCache::new(
npm_cache.clone(),
fs.clone(),
http_client_provider.clone(),
env,
npm_rc.clone(),
text_only_progress_bar.clone(),
));
let fs_resolver = create_npm_fs_resolver(
fs.clone(),
@ -179,7 +177,7 @@ fn create_inner(
fs,
fs_resolver,
maybe_lockfile,
npm_api,
registry_info_provider,
npm_cache,
npm_install_deps_provider,
resolution,
@ -190,37 +188,51 @@ fn create_inner(
))
}
fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> {
Arc::new(NpmCache::new(
fn create_cache_env(
options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmCacheEnv> {
Arc::new(CliNpmCacheEnv::new(
options.fs.clone(),
options.http_client_provider.clone(),
options.text_only_progress_bar.clone(),
))
}
fn create_cache(
env: Arc<CliNpmCacheEnv>,
options: &CliManagedNpmResolverCreateOptions,
) -> Arc<CliNpmCache> {
Arc::new(CliNpmCache::new(
options.npm_cache_dir.clone(),
options.cache_setting.clone(),
NpmCacheSetting::from_cache_setting(&options.cache_setting),
env,
options.npmrc.clone(),
))
}
fn create_api(
cache: Arc<CliNpmCache>,
env: Arc<CliNpmCacheEnv>,
options: &CliManagedNpmResolverCreateOptions,
npm_cache: Arc<NpmCache>,
) -> Arc<CliNpmRegistryApi> {
Arc::new(CliNpmRegistryApi::new(
npm_cache.clone(),
Arc::new(RegistryInfoDownloader::new(
npm_cache,
options.http_client_provider.clone(),
) -> Arc<CliNpmRegistryInfoProvider> {
Arc::new(CliNpmRegistryInfoProvider::new(
cache,
env,
options.npmrc.clone(),
options.text_only_progress_bar.clone(),
)),
))
}
async fn resolve_snapshot(
api: &CliNpmRegistryApi,
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
snapshot: CliNpmResolverManagedSnapshotOption,
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, AnyError> {
match snapshot {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
if !lockfile.overwrite() {
let snapshot = snapshot_from_lockfile(lockfile.clone(), api)
let snapshot = snapshot_from_lockfile(
lockfile.clone(),
&registry_info_provider.as_npm_registry_api(),
)
.await
.with_context(|| {
format!("failed reading lockfile '{}'", lockfile.filename.display())
@ -285,17 +297,23 @@ pub fn create_managed_in_npm_pkg_checker(
Arc::new(ManagedInNpmPackageChecker { root_dir })
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PackageCaching<'a> {
Only(Cow<'a, [PackageReq]>),
All,
}
/// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver {
fs: Arc<dyn FileSystem>,
fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
top_level_install_flag: AtomicFlag,
@ -316,11 +334,11 @@ impl ManagedCliNpmResolver {
fs: Arc<dyn FileSystem>,
fs_resolver: Arc<dyn NpmPackageFsResolver>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
text_only_progress_bar: ProgressBar,
npm_system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
@ -329,7 +347,7 @@ impl ManagedCliNpmResolver {
fs,
fs_resolver,
maybe_lockfile,
npm_api,
registry_info_provider,
npm_cache,
npm_install_deps_provider,
text_only_progress_bar,
@ -409,19 +427,44 @@ impl ManagedCliNpmResolver {
/// Adds package requirements to the resolver and ensures everything is setup.
/// This includes setting up the `node_modules` directory, if applicable.
pub async fn add_package_reqs(
pub async fn add_and_cache_package_reqs(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages)
.add_package_reqs_raw(
packages,
Some(PackageCaching::Only(packages.into())),
)
.await
.dependencies_result
}
pub async fn add_package_reqs_raw(
pub async fn add_package_reqs_no_cache(
&self,
packages: &[PackageReq],
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, None)
.await
.dependencies_result
}
pub async fn add_package_reqs(
&self,
packages: &[PackageReq],
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self
.add_package_reqs_raw(packages, Some(caching))
.await
.dependencies_result
}
pub async fn add_package_reqs_raw<'a>(
&self,
packages: &[PackageReq],
caching: Option<PackageCaching<'a>>,
) -> AddPkgReqsResult {
if packages.is_empty() {
return AddPkgReqsResult {
@ -438,7 +481,9 @@ impl ManagedCliNpmResolver {
}
}
if result.dependencies_result.is_ok() {
result.dependencies_result = self.cache_packages().await;
if let Some(caching) = caching {
result.dependencies_result = self.cache_packages(caching).await;
}
}
result
@ -480,16 +525,20 @@ impl ManagedCliNpmResolver {
pub async fn inject_synthetic_types_node_package(
&self,
) -> Result<(), AnyError> {
let reqs = &[PackageReq::from_str("@types/node").unwrap()];
// add and ensure this isn't added to the lockfile
self
.add_package_reqs(&[PackageReq::from_str("@types/node").unwrap()])
.add_package_reqs(reqs, PackageCaching::Only(reqs.into()))
.await?;
Ok(())
}
pub async fn cache_packages(&self) -> Result<(), AnyError> {
self.fs_resolver.cache_packages().await
pub async fn cache_packages(
&self,
caching: PackageCaching<'_>,
) -> Result<(), AnyError> {
self.fs_resolver.cache_packages(caching).await
}
pub fn resolve_pkg_folder_from_deno_module(
@ -511,11 +560,11 @@ impl ManagedCliNpmResolver {
&self,
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match &err.source {
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
match err.source.as_kind() {
deno_package_json::PackageJsonDepValueParseErrorKind::VersionReq(_) => {
return Err(Box::new(err.clone()));
}
deno_package_json::PackageJsonDepValueParseError::Unsupported {
deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported {
..
} => {
// only warn for this one
@ -534,18 +583,18 @@ impl ManagedCliNpmResolver {
/// Ensures that the top level `package.json` dependencies are installed.
/// This may set up the `node_modules` directory.
///
/// Returns `true` if any changes (such as caching packages) were made.
/// If this returns `false`, `node_modules` has _not_ been set up.
/// Returns `true` if the top level packages are already installed. A
/// return value of `false` means that new packages were added to the NPM resolution.
pub async fn ensure_top_level_package_json_install(
&self,
) -> Result<bool, AnyError> {
if !self.top_level_install_flag.raise() {
return Ok(false); // already did this
return Ok(true); // already did this
}
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
if pkg_json_remote_pkgs.is_empty() {
return Ok(false);
return Ok(true);
}
// check if something needs resolving before bothering to load all
@ -559,14 +608,16 @@ impl ManagedCliNpmResolver {
log::debug!(
"All package.json deps resolvable. Skipping top level install."
);
return Ok(false); // everything is already resolvable
return Ok(true); // everything is already resolvable
}
let pkg_reqs = pkg_json_remote_pkgs
.iter()
.map(|pkg| pkg.req.clone())
.collect::<Vec<_>>();
self.add_package_reqs(&pkg_reqs).await.map(|_| true)
self.add_package_reqs_no_cache(&pkg_reqs).await?;
Ok(false)
}
pub async fn cache_package_info(
@ -575,7 +626,7 @@ impl ManagedCliNpmResolver {
) -> Result<Arc<NpmPackageInfo>, AnyError> {
// this will internally cache the package information
self
.npm_api
.registry_info_provider
.package_info(package_name)
.await
.map_err(|err| err.into())
@ -671,7 +722,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
// create a new snapshotted npm resolution and resolver
let npm_resolution = Arc::new(NpmResolution::new(
self.npm_api.clone(),
self.registry_info_provider.clone(),
self.resolution.snapshot(),
self.maybe_lockfile.clone(),
));
@ -690,7 +741,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self.lifecycle_scripts.clone(),
),
self.maybe_lockfile.clone(),
self.npm_api.clone(),
self.registry_info_provider.clone(),
self.npm_cache.clone(),
self.npm_install_deps_provider.clone(),
npm_resolution,

View file

@ -1,200 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use crate::args::CacheSetting;
use crate::util::sync::AtomicFlag;
use super::cache::NpmCache;
use super::cache::RegistryInfoDownloader;
#[derive(Debug)]
pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>);
impl CliNpmRegistryApi {
pub fn new(
cache: Arc<NpmCache>,
registry_info_downloader: Arc<RegistryInfoDownloader>,
) -> Self {
Self(Some(Arc::new(CliNpmRegistryApiInner {
cache,
force_reload_flag: Default::default(),
mem_cache: Default::default(),
previously_reloaded_packages: Default::default(),
registry_info_downloader,
})))
}
/// Clears the internal memory cache.
pub fn clear_memory_cache(&self) {
self.inner().clear_memory_cache();
}
fn inner(&self) -> &Arc<CliNpmRegistryApiInner> {
// this panicking indicates a bug in the code where this
// wasn't initialized
self.0.as_ref().unwrap()
}
}
#[async_trait(?Send)]
impl NpmRegistryApi for CliNpmRegistryApi {
async fn package_info(
&self,
name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
match self.inner().maybe_package_info(name).await {
Ok(Some(info)) => Ok(info),
Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists {
package_name: name.to_string(),
}),
Err(err) => {
Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(err)))
}
}
}
fn mark_force_reload(&self) -> bool {
self.inner().mark_force_reload()
}
}
type CacheItemPendingResult =
Result<Option<Arc<NpmPackageInfo>>, Arc<AnyError>>;
#[derive(Debug)]
enum CacheItem {
Pending(Shared<BoxFuture<'static, CacheItemPendingResult>>),
Resolved(Option<Arc<NpmPackageInfo>>),
}
#[derive(Debug)]
struct CliNpmRegistryApiInner {
cache: Arc<NpmCache>,
force_reload_flag: AtomicFlag,
mem_cache: Mutex<HashMap<String, CacheItem>>,
previously_reloaded_packages: Mutex<HashSet<String>>,
registry_info_downloader: Arc<RegistryInfoDownloader>,
}
impl CliNpmRegistryApiInner {
pub async fn maybe_package_info(
self: &Arc<Self>,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
let (created, future) = {
let mut mem_cache = self.mem_cache.lock();
match mem_cache.get(name) {
Some(CacheItem::Resolved(maybe_info)) => {
return Ok(maybe_info.clone());
}
Some(CacheItem::Pending(future)) => (false, future.clone()),
None => {
let future = {
let api = self.clone();
let name = name.to_string();
async move {
if (api.cache.cache_setting().should_use_for_npm_package(&name) && !api.force_reload_flag.is_raised())
// if this has been previously reloaded, then try loading from the
// file system cache
|| !api.previously_reloaded_packages.lock().insert(name.to_string())
{
// attempt to load from the file cache
if let Some(info) = api.load_file_cached_package_info(&name).await {
let result = Some(Arc::new(info));
return Ok(result);
}
}
api.registry_info_downloader
.load_package_info(&name)
.await
.map_err(Arc::new)
}
.boxed()
.shared()
};
mem_cache
.insert(name.to_string(), CacheItem::Pending(future.clone()));
(true, future)
}
}
};
if created {
match future.await {
Ok(maybe_info) => {
// replace the cache item to say it's resolved now
self
.mem_cache
.lock()
.insert(name.to_string(), CacheItem::Resolved(maybe_info.clone()));
Ok(maybe_info)
}
Err(err) => {
// purge the item from the cache so it loads next time
self.mem_cache.lock().remove(name);
Err(anyhow!("{:#}", err))
}
}
} else {
Ok(future.await.map_err(|err| anyhow!("{:#}", err))?)
}
}
fn mark_force_reload(&self) -> bool {
// never force reload the registry information if reloading
// is disabled or if we're already reloading
if matches!(
self.cache.cache_setting(),
CacheSetting::Only | CacheSetting::ReloadAll
) {
return false;
}
if self.force_reload_flag.raise() {
self.clear_memory_cache();
true
} else {
false
}
}
async fn load_file_cached_package_info(
&self,
name: &str,
) -> Option<NpmPackageInfo> {
let result = deno_core::unsync::spawn_blocking({
let cache = self.cache.clone();
let name = name.to_string();
move || cache.load_package_info(&name)
})
.await
.unwrap();
match result {
Ok(value) => value,
Err(err) => {
if cfg!(debug_assertions) {
panic!("error loading cached npm package info for {name}: {err:#}");
} else {
None
}
}
}
}
fn clear_memory_cache(&self) {
self.mem_cache.lock().clear();
}
}

View file

@ -4,15 +4,15 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use capacity_builder::StringBuilder;
use deno_core::error::AnyError;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::NpmPackagesPartitioned;
use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::NpmResolutionSnapshotPendingResolver;
use deno_npm::resolution::NpmResolutionSnapshotPendingResolverOptions;
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
use deno_npm::resolution::PackageNotFoundFromReferrerError;
use deno_npm::resolution::PackageNvNotFoundError;
@ -25,13 +25,13 @@ use deno_npm::NpmSystemInfo;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::SmallStackString;
use deno_semver::VersionReq;
use crate::args::CliLockfile;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::util::sync::SyncReadAsyncWriteLock;
use super::CliNpmRegistryApi;
pub struct AddPkgReqsResult {
/// Results from adding the individual packages.
///
@ -48,7 +48,7 @@ pub struct AddPkgReqsResult {
///
/// This does not interact with the file system.
pub struct NpmResolution {
api: Arc<CliNpmRegistryApi>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
snapshot: SyncReadAsyncWriteLock<NpmResolutionSnapshot>,
maybe_lockfile: Option<Arc<CliLockfile>>,
}
@ -64,22 +64,22 @@ impl std::fmt::Debug for NpmResolution {
impl NpmResolution {
pub fn from_serialized(
api: Arc<CliNpmRegistryApi>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
maybe_lockfile: Option<Arc<CliLockfile>>,
) -> Self {
let snapshot =
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
Self::new(api, snapshot, maybe_lockfile)
Self::new(registry_info_provider, snapshot, maybe_lockfile)
}
pub fn new(
api: Arc<CliNpmRegistryApi>,
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
initial_snapshot: NpmResolutionSnapshot,
maybe_lockfile: Option<Arc<CliLockfile>>,
) -> Self {
Self {
api,
registry_info_provider,
snapshot: SyncReadAsyncWriteLock::new(initial_snapshot),
maybe_lockfile,
}
@ -92,7 +92,7 @@ impl NpmResolution {
// only allow one thread in here at a time
let snapshot_lock = self.snapshot.acquire().await;
let result = add_package_reqs_to_snapshot(
&self.api,
&self.registry_info_provider,
package_reqs,
self.maybe_lockfile.clone(),
|| snapshot_lock.read().clone(),
@ -120,7 +120,7 @@ impl NpmResolution {
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
let snapshot = add_package_reqs_to_snapshot(
&self.api,
&self.registry_info_provider,
package_reqs,
self.maybe_lockfile.clone(),
|| {
@ -257,10 +257,14 @@ impl NpmResolution {
.read()
.as_valid_serialized_for_system(system_info)
}
pub fn subset(&self, package_reqs: &[PackageReq]) -> NpmResolutionSnapshot {
self.snapshot.read().subset(package_reqs)
}
}
async fn add_package_reqs_to_snapshot(
api: &CliNpmRegistryApi,
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
package_reqs: &[PackageReq],
maybe_lockfile: Option<Arc<CliLockfile>>,
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
@ -283,23 +287,28 @@ async fn add_package_reqs_to_snapshot(
/* this string is used in tests */
"Running npm resolution."
);
let pending_resolver = get_npm_pending_resolver(api);
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await;
api.clear_memory_cache();
let npm_registry_api = registry_info_provider.as_npm_registry_api();
let result = snapshot
.add_pkg_reqs(&npm_registry_api, get_add_pkg_reqs_options(package_reqs))
.await;
let result = match &result.dep_graph_result {
Err(NpmResolutionError::Resolution(err)) if api.mark_force_reload() => {
Err(NpmResolutionError::Resolution(err))
if npm_registry_api.mark_force_reload() =>
{
log::debug!("{err:#}");
log::debug!("npm resolution failed. Trying again...");
// try again
// try again with forced reloading
let snapshot = get_new_snapshot();
let result = pending_resolver.add_pkg_reqs(snapshot, package_reqs).await;
api.clear_memory_cache();
result
snapshot
.add_pkg_reqs(&npm_registry_api, get_add_pkg_reqs_options(package_reqs))
.await
}
_ => result,
};
registry_info_provider.clear_memory_cache();
if let Ok(snapshot) = &result.dep_graph_result {
if let Some(lockfile) = maybe_lockfile {
populate_lockfile_from_snapshot(&lockfile, snapshot);
@ -309,19 +318,15 @@ async fn add_package_reqs_to_snapshot(
result
}
fn get_npm_pending_resolver(
api: &CliNpmRegistryApi,
) -> NpmResolutionSnapshotPendingResolver<CliNpmRegistryApi> {
NpmResolutionSnapshotPendingResolver::new(
NpmResolutionSnapshotPendingResolverOptions {
api,
fn get_add_pkg_reqs_options(package_reqs: &[PackageReq]) -> AddPkgReqsOptions {
AddPkgReqsOptions {
package_reqs,
// WARNING: When bumping this version, check if anything needs to be
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
types_node_version_req: Some(
VersionReq::parse_from_npm("22.0.0 - 22.5.4").unwrap(),
),
},
)
}
}
fn populate_lockfile_from_snapshot(
@ -333,7 +338,13 @@ fn populate_lockfile_from_snapshot(
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
lockfile.insert_package_specifier(
JsrDepPackageReq::npm(package_req.clone()),
format!("{}{}", id.nv.version, id.peer_deps_serialized()),
{
StringBuilder::<SmallStackString>::build(|builder| {
builder.append(&id.nv.version);
builder.append(&id.peer_dependencies);
})
.unwrap()
},
);
}
for package in snapshot.all_packages_for_every_system() {

View file

@ -11,6 +11,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use super::super::PackageCaching;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
@ -24,7 +25,7 @@ use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use node_resolver::errors::PackageFolderResolveError;
use crate::npm::managed::cache::TarballCache;
use crate::npm::CliNpmTarballCache;
/// Part of the resolution that interacts with the file system.
#[async_trait(?Send)]
@ -57,7 +58,10 @@ pub trait NpmPackageFsResolver: Send + Sync {
specifier: &ModuleSpecifier,
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
async fn cache_packages(&self) -> Result<(), AnyError>;
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a>(
@ -140,7 +144,7 @@ impl RegistryReadPermissionChecker {
/// Caches all the packages in parallel.
pub async fn cache_packages(
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<TarballCache>,
tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), AnyError> {
let mut futures_unordered = futures::stream::FuturesUnordered::new();
for package in packages {

View file

@ -28,8 +28,10 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
.id
.nv
.name
.as_str()
.rsplit_once('/')
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
.map(|(_, name)| name)
.unwrap_or(package.id.nv.name.as_str())
}
pub fn warn_missing_entrypoint(

View file

@ -9,6 +9,7 @@ use deno_npm::resolution::NpmResolutionSnapshot;
use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use deno_task_shell::KillSignal;
use std::borrow::Cow;
use std::collections::HashSet;
use std::rc::Rc;
@ -155,6 +156,29 @@ impl<'a> LifecycleScripts<'a> {
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar,
) -> Result<(), AnyError> {
let kill_signal = KillSignal::default();
let _drop_signal = kill_signal.clone().drop_guard();
// we don't run with signals forwarded because once signals
// are setup then they're process wide.
self
.finish_with_cancellation(
snapshot,
packages,
root_node_modules_dir_path,
progress_bar,
kill_signal,
)
.await
}
async fn finish_with_cancellation(
self,
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar,
kill_signal: KillSignal,
) -> Result<(), AnyError> {
self.warn_not_run_scripts()?;
let get_package_path =
@ -182,6 +206,12 @@ impl<'a> LifecycleScripts<'a> {
);
let mut env_vars = crate::task_runner::real_env_vars();
// so the subprocess can detect that it is running as part of a lifecycle script,
// and avoid trying to set up node_modules again
env_vars.insert(
LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR.to_string(),
"1".to_string(),
);
// we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file
//
@ -240,6 +270,7 @@ impl<'a> LifecycleScripts<'a> {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
}),
kill_signal: kill_signal.clone(),
},
)
.await?;
@ -303,6 +334,13 @@ impl<'a> LifecycleScripts<'a> {
}
}
const LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR: &str =
"DENO_INTERNAL_IS_LIFECYCLE_SCRIPT";
pub fn is_running_lifecycle_script() -> bool {
std::env::var(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR).is_ok()
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner

View file

@ -8,6 +8,9 @@ use std::path::PathBuf;
use std::sync::Arc;
use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
@ -24,8 +27,6 @@ use node_resolver::errors::ReferrerNotFoundError;
use crate::args::LifecycleScriptsConfig;
use crate::cache::FastInsecureHasher;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution;
use super::common::cache_packages;
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
@ -35,8 +36,8 @@ use super::common::RegistryReadPermissionChecker;
/// Resolves packages from the global npm cache.
#[derive(Debug)]
pub struct GlobalNpmPackageResolver {
cache: Arc<NpmCache>,
tarball_cache: Arc<TarballCache>,
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
registry_read_permission_checker: RegistryReadPermissionChecker,
@ -45,9 +46,9 @@ pub struct GlobalNpmPackageResolver {
impl GlobalNpmPackageResolver {
pub fn new(
cache: Arc<NpmCache>,
cache: Arc<CliNpmCache>,
fs: Arc<dyn FileSystem>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
resolution: Arc<NpmResolution>,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
@ -150,10 +151,19 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
)
}
async fn cache_packages(&self) -> Result<(), AnyError> {
let package_partitions = self
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError> {
let package_partitions = match caching {
PackageCaching::All => self
.resolution
.all_system_packages_partitioned(&self.system_info);
.all_system_packages_partitioned(&self.system_info),
PackageCaching::Only(reqs) => self
.resolution
.subset(&reqs)
.all_system_packages_partitioned(&self.system_info),
};
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
// create the copy package folders

View file

@ -17,6 +17,9 @@ use std::sync::Arc;
use crate::args::LifecycleScriptsConfig;
use crate::colors;
use crate::npm::managed::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_cache_dir::npm::mixed_case_package_name_decode;
@ -35,6 +38,7 @@ use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodePermissions;
use deno_semver::package::PackageNv;
use deno_semver::StackString;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
@ -52,8 +56,6 @@ use crate::util::fs::LaxSingleProcessFsFlag;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressMessagePrompt;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution;
use super::common::bin_entries;
use super::common::NpmPackageFsResolver;
@ -63,12 +65,12 @@ use super::common::RegistryReadPermissionChecker;
/// and resolves packages from it.
#[derive(Debug)]
pub struct LocalNpmPackageResolver {
cache: Arc<NpmCache>,
cache: Arc<CliNpmCache>,
fs: Arc<dyn deno_fs::FileSystem>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
root_node_modules_path: PathBuf,
root_node_modules_url: Url,
system_info: NpmSystemInfo,
@ -79,12 +81,12 @@ pub struct LocalNpmPackageResolver {
impl LocalNpmPackageResolver {
#[allow(clippy::too_many_arguments)]
pub fn new(
cache: Arc<NpmCache>,
cache: Arc<CliNpmCache>,
fs: Arc<dyn deno_fs::FileSystem>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
node_modules_folder: PathBuf,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,
@ -236,13 +238,33 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
else {
return Ok(None);
};
let folder_name = folder_path.parent().unwrap().to_string_lossy();
Ok(get_package_folder_id_from_folder_name(&folder_name))
// ex. project/node_modules/.deno/preact@10.24.3/node_modules/preact/
let Some(node_modules_ancestor) = folder_path
.ancestors()
.find(|ancestor| ancestor.ends_with("node_modules"))
else {
return Ok(None);
};
let Some(folder_name) =
node_modules_ancestor.parent().and_then(|p| p.file_name())
else {
return Ok(None);
};
Ok(get_package_folder_id_from_folder_name(
&folder_name.to_string_lossy(),
))
}
async fn cache_packages(&self) -> Result<(), AnyError> {
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), AnyError> {
let snapshot = match caching {
PackageCaching::All => self.resolution.snapshot(),
PackageCaching::Only(reqs) => self.resolution.subset(&reqs),
};
sync_resolution_with_fs(
&self.resolution.snapshot(),
&snapshot,
&self.cache,
&self.npm_install_deps_provider,
&self.progress_bar,
@ -284,10 +306,10 @@ fn local_node_modules_package_contents_path(
#[allow(clippy::too_many_arguments)]
async fn sync_resolution_with_fs(
snapshot: &NpmResolutionSnapshot,
cache: &Arc<NpmCache>,
cache: &Arc<CliNpmCache>,
npm_install_deps_provider: &NpmInstallDepsProvider,
progress_bar: &ProgressBar,
tarball_cache: &Arc<TarballCache>,
tarball_cache: &Arc<CliNpmTarballCache>,
root_node_modules_dir_path: &Path,
system_info: &NpmSystemInfo,
lifecycle_scripts: &LifecycleScriptsConfig,
@ -298,6 +320,12 @@ async fn sync_resolution_with_fs(
return Ok(()); // don't create the directory
}
// don't set up node_modules (and more importantly try to acquire the file lock)
// if we're running as part of a lifecycle script
if super::common::lifecycle_scripts::is_running_lifecycle_script() {
return Ok(());
}
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
@ -328,8 +356,10 @@ async fn sync_resolution_with_fs(
let package_partitions =
snapshot.all_system_packages_partitioned(system_info);
let mut cache_futures = FuturesUnordered::new();
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
HashMap::with_capacity(package_partitions.packages.len());
let mut newest_packages_by_name: HashMap<
&StackString,
&NpmResolutionPackage,
> = HashMap::with_capacity(package_partitions.packages.len());
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
@ -509,7 +539,7 @@ async fn sync_resolution_with_fs(
}
}
let mut found_names: HashMap<&String, &PackageNv> = HashMap::new();
let mut found_names: HashMap<&StackString, &PackageNv> = HashMap::new();
// set of node_modules in workspace packages that we've already ensured exist
let mut existing_child_node_modules_dirs: HashSet<PathBuf> = HashSet::new();
@ -985,10 +1015,10 @@ fn get_package_folder_id_from_folder_name(
) -> Option<NpmPackageCacheFolderId> {
let folder_name = folder_name.replace('+', "/");
let (name, ending) = folder_name.rsplit_once('@')?;
let name = if let Some(encoded_name) = name.strip_prefix('_') {
mixed_case_package_name_decode(encoded_name)?
let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') {
StackString::from_string(mixed_case_package_name_decode(encoded_name)?)
} else {
name.to_string()
name.into()
};
let (raw_version, copy_index) = match ending.split_once('_') {
Some((raw_version, copy_index)) => {

View file

@ -12,6 +12,8 @@ use deno_runtime::deno_fs::FileSystem;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::util::progress_bar::ProgressBar;
pub use self::common::NpmPackageFsResolver;
@ -19,18 +21,16 @@ pub use self::common::NpmPackageFsResolver;
use self::global::GlobalNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
use super::cache::NpmCache;
use super::cache::TarballCache;
use super::resolution::NpmResolution;
#[allow(clippy::too_many_arguments)]
pub fn create_npm_fs_resolver(
fs: Arc<dyn FileSystem>,
npm_cache: Arc<NpmCache>,
npm_cache: Arc<CliNpmCache>,
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
progress_bar: &ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
tarball_cache: Arc<CliNpmTarballCache>,
maybe_node_modules_path: Option<PathBuf>,
system_info: NpmSystemInfo,
lifecycle_scripts: LifecycleScriptsConfig,

View file

@ -1,33 +1,39 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod byonm;
mod common;
mod managed;
use std::borrow::Cow;
use std::path::Path;
use std::sync::Arc;
use common::maybe_auth_header_for_npm_registry;
use dashmap::DashMap;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::CliNpmReqResolver;
use deno_resolver::npm::ResolvePkgFolderFromDenoReqError;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use managed::cache::registry_info::get_package_url;
use http::HeaderName;
use http::HeaderValue;
use managed::create_managed_in_npm_pkg_checker;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::hard_link_dir_recursive;
use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::progress_bar::ProgressBar;
pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions;
@ -35,6 +41,100 @@ pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver;
pub use self::managed::PackageCaching;
pub type CliNpmTarballCache = deno_npm_cache::TarballCache<CliNpmCacheEnv>;
pub type CliNpmCache = deno_npm_cache::NpmCache<CliNpmCacheEnv>;
pub type CliNpmRegistryInfoProvider =
deno_npm_cache::RegistryInfoProvider<CliNpmCacheEnv>;
#[derive(Debug)]
pub struct CliNpmCacheEnv {
fs: Arc<dyn FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
}
impl CliNpmCacheEnv {
pub fn new(
fs: Arc<dyn FileSystem>,
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
) -> Self {
Self {
fs,
http_client_provider,
progress_bar,
}
}
}
#[async_trait::async_trait(?Send)]
impl deno_npm_cache::NpmCacheEnv for CliNpmCacheEnv {
fn exists(&self, path: &Path) -> bool {
self.fs.exists_sync(path)
}
fn hard_link_dir_recursive(
&self,
from: &Path,
to: &Path,
) -> Result<(), AnyError> {
// todo(dsherret): use self.fs here instead
hard_link_dir_recursive(from, to)
}
fn atomic_write_file_with_retries(
&self,
file_path: &Path,
data: &[u8],
) -> std::io::Result<()> {
atomic_write_file_with_retries_and_fs(
&AtomicWriteFileFsAdapter {
fs: self.fs.as_ref(),
write_mode: crate::cache::CACHE_PERM,
},
file_path,
data,
)
}
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth_header: Option<(HeaderName, HeaderValue)>,
) -> Result<Option<Vec<u8>>, deno_npm_cache::DownloadError> {
let guard = self.progress_bar.update(url.as_str());
let client = self.http_client_provider.get_or_create().map_err(|err| {
deno_npm_cache::DownloadError {
status_code: None,
error: err,
}
})?;
client
.download_with_progress_and_retries(url, maybe_auth_header, &guard)
.await
.map_err(|err| {
use crate::http_util::DownloadErrorKind::*;
let status_code = match err.as_kind() {
Fetch { .. }
| UrlParse { .. }
| HttpParse { .. }
| Json { .. }
| ToStr { .. }
| RedirectHeaderParse { .. }
| TooManyRedirects => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code)
}
};
deno_npm_cache::DownloadError {
status_code,
error: err.into(),
}
})
}
}
pub enum CliNpmResolverCreateOptions {
Managed(CliManagedNpmResolverCreateOptions),
@ -132,13 +232,13 @@ pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver {
pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
}
impl NpmFetchResolver {
pub fn new(
file_fetcher: Arc<FileFetcher>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self {
@ -179,13 +279,15 @@ impl NpmFetchResolver {
if let Some(info) = self.info_by_name.get(name) {
return info.value().clone();
}
// todo(#27198): use RegistryInfoProvider instead
let fetch_package_info = || async {
let info_url = get_package_url(&self.npmrc, name);
let info_url = deno_npm_cache::get_package_url(&self.npmrc, name);
let file_fetcher = self.file_fetcher.clone();
let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header =
maybe_auth_header_for_npm_registry(registry_config).ok()?;
deno_npm_cache::maybe_auth_header_for_npm_registry(registry_config)
.ok()?;
// spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move {
file_fetcher

34
cli/ops/lint.rs Normal file
View file

@ -0,0 +1,34 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::op2;
use crate::tools::lint;
deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],);
#[op2]
#[buffer]
fn op_lint_create_serialized_ast(
#[string] file_name: &str,
#[string] source: String,
) -> Result<Vec<u8>, AnyError> {
let file_text = deno_ast::strip_bom(source);
let path = std::env::current_dir()?.join(file_name);
let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| {
generic_error(format!("Failed to parse path as URL: {}", path.display()))
})?;
let media_type = MediaType::from_specifier(&specifier);
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: file_text.into(),
media_type,
capture_tokens: false,
scope_analysis: false,
maybe_syntax: None,
})?;
Ok(lint::serialize_ast_to_buffer(&parsed_source))
}

View file

@ -2,4 +2,5 @@
pub mod bench;
pub mod jupyter;
pub mod lint;
pub mod testing;

View file

@ -12,7 +12,6 @@ use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::NpmLoadError;
@ -25,25 +24,25 @@ use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_semver::package::PackageReq;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
use crate::args::NpmCachingStrategy;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_owned;
use crate::util::text_encoding::from_utf8_lossy_cow;
pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>;
pub type IsCjsResolver =
deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>;
pub type IsCjsResolverOptions = deno_resolver::cjs::IsCjsResolverOptions;
pub type CliSloppyImportsResolver =
SloppyImportsResolver<SloppyImportsCachedFs>;
pub type CliDenoResolver = deno_resolver::DenoResolver<
@ -64,7 +63,10 @@ pub struct ModuleCodeStringSource {
pub struct CliDenoResolverFs(pub Arc<dyn FileSystem>);
impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String> {
fn read_to_string_lossy(
&self,
path: &Path,
) -> std::io::Result<Cow<'static, str>> {
self
.0
.read_text_file_lossy_sync(path, None)
@ -184,18 +186,21 @@ impl NpmModuleLoader {
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_owned(code);
let code = from_utf8_lossy_cow(code);
ModuleSourceCode::String(
self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(Cow::Owned(code)))
.translate_cjs_to_esm(specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
ModuleSourceCode::Bytes(code.into_boxed_slice().into())
ModuleSourceCode::Bytes(match code {
Cow::Owned(bytes) => bytes.into_boxed_slice().into(),
Cow::Borrowed(bytes) => bytes.into(),
})
};
Ok(ModuleCodeStringSource {
@ -236,36 +241,29 @@ impl CliResolver {
// todo(dsherret): move this off CliResolver as CliResolver is acting
// like a factory by doing this (it's beyond its responsibility)
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
pub fn create_graph_npm_resolver(
&self,
npm_caching: NpmCachingStrategy,
) -> WorkerCliNpmGraphResolver {
WorkerCliNpmGraphResolver {
npm_resolver: self.npm_resolver.as_ref(),
found_package_json_dep_flag: &self.found_package_json_dep_flag,
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
npm_caching,
}
}
pub fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
referrer_kind: NodeModuleKind,
mode: ResolutionMode,
referrer: &ModuleSpecifier,
referrer_range_start: deno_graph::Position,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode {
match mode {
ResolutionMode::Execution => NodeResolutionMode::Execution,
ResolutionMode::Types => NodeResolutionMode::Types,
}
}
let resolution = self
.deno_resolver
.resolve(
raw_specifier,
&referrer_range.specifier,
referrer_kind,
to_node_mode(mode),
)
.resolve(raw_specifier, referrer, resolution_mode, resolution_kind)
.map_err(|err| match err.into_kind() {
deno_resolver::DenoResolveErrorKind::MappedResolution(
mapped_resolution_error,
@ -291,10 +289,11 @@ impl CliResolver {
} => {
if self.warned_pkgs.insert(reference.req().clone()) {
log::warn!(
"{} {}\n at {}",
"{} {}\n at {}:{}",
colors::yellow("Warning"),
diagnostic,
referrer_range
referrer,
referrer_range_start,
);
}
}
@ -310,6 +309,7 @@ pub struct WorkerCliNpmGraphResolver<'a> {
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
found_package_json_dep_flag: &'a AtomicFlag,
bare_node_builtins_enabled: bool,
npm_caching: NpmCachingStrategy,
}
#[async_trait(?Send)]
@ -335,13 +335,10 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
module_name: &str,
range: &deno_graph::Range,
) {
let deno_graph::Range {
start, specifier, ..
} = range;
let line = start.line + 1;
let column = start.character + 1;
let start = range.range.start;
let specifier = &range.specifier;
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{line}:{column}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
log::warn!("{} Resolving \"{module_name}\" as \"node:{module_name}\" at {specifier}:{start}. If you want to use a built-in Node module, add a \"node:\" prefix.", colors::yellow("Warning"))
}
}
@ -382,7 +379,20 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
Ok(())
};
let result = npm_resolver.add_package_reqs_raw(package_reqs).await;
let result = npm_resolver
.add_package_reqs_raw(
package_reqs,
match self.npm_caching {
NpmCachingStrategy::Eager => {
Some(crate::npm::PackageCaching::All)
}
NpmCachingStrategy::Lazy => {
Some(crate::npm::PackageCaching::Only(package_reqs.into()))
}
NpmCachingStrategy::Manual => None,
},
)
.await;
NpmResolvePkgReqsResult {
results: result

View file

@ -291,7 +291,7 @@
"type": "array",
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
"items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json"
"$ref": "lint-tags.v1.json"
},
"minItems": 0,
"uniqueItems": true
@ -300,7 +300,7 @@
"type": "array",
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
"items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
"$ref": "lint-rules.v1.json"
},
"minItems": 0,
"uniqueItems": true
@ -309,7 +309,7 @@
"type": "array",
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
"items": {
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
"$ref": "lint-rules.v1.json"
},
"minItems": 0,
"uniqueItems": true
@ -446,7 +446,6 @@
},
"command": {
"type": "string",
"required": true,
"description": "The task to execute"
},
"dependencies": {
@ -554,6 +553,7 @@
"bare-node-builtins",
"byonm",
"cron",
"detect-cjs",
"ffi",
"fs",
"fmt-component",

View file

@ -0,0 +1,112 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"enum": [
"adjacent-overload-signatures",
"ban-ts-comment",
"ban-types",
"ban-unknown-rule-code",
"ban-untagged-ignore",
"ban-untagged-todo",
"ban-unused-ignore",
"camelcase",
"constructor-super",
"default-param-last",
"eqeqeq",
"explicit-function-return-type",
"explicit-module-boundary-types",
"for-direction",
"fresh-handler-export",
"fresh-server-event-handlers",
"getter-return",
"guard-for-in",
"no-array-constructor",
"no-async-promise-executor",
"no-await-in-loop",
"no-await-in-sync-fn",
"no-boolean-literal-for-arguments",
"no-case-declarations",
"no-class-assign",
"no-compare-neg-zero",
"no-cond-assign",
"no-console",
"no-const-assign",
"no-constant-condition",
"no-control-regex",
"no-debugger",
"no-delete-var",
"no-deprecated-deno-api",
"no-dupe-args",
"no-dupe-class-members",
"no-dupe-else-if",
"no-dupe-keys",
"no-duplicate-case",
"no-empty",
"no-empty-character-class",
"no-empty-enum",
"no-empty-interface",
"no-empty-pattern",
"no-eval",
"no-ex-assign",
"no-explicit-any",
"no-external-import",
"no-extra-boolean-cast",
"no-extra-non-null-assertion",
"no-fallthrough",
"no-func-assign",
"no-global-assign",
"no-implicit-declare-namespace-export",
"no-import-assertions",
"no-import-assign",
"no-inferrable-types",
"no-inner-declarations",
"no-invalid-regexp",
"no-invalid-triple-slash-reference",
"no-irregular-whitespace",
"no-misused-new",
"no-namespace",
"no-new-symbol",
"no-node-globals",
"no-non-null-asserted-optional-chain",
"no-non-null-assertion",
"no-obj-calls",
"no-octal",
"no-process-globals",
"no-prototype-builtins",
"no-redeclare",
"no-regex-spaces",
"no-self-assign",
"no-self-compare",
"no-setter-return",
"no-shadow-restricted-names",
"no-sloppy-imports",
"no-slow-types",
"no-sparse-arrays",
"no-sync-fn-in-async-fn",
"no-this-alias",
"no-this-before-super",
"no-throw-literal",
"no-top-level-await",
"no-undef",
"no-unreachable",
"no-unsafe-finally",
"no-unsafe-negation",
"no-unused-labels",
"no-unused-vars",
"no-var",
"no-window",
"no-window-prefix",
"no-with",
"prefer-as-const",
"prefer-ascii",
"prefer-const",
"prefer-namespace-keyword",
"prefer-primordials",
"require-await",
"require-yield",
"single-var-declarator",
"triple-slash-reference",
"use-isnan",
"valid-typeof",
"verbatim-module-syntax"
]
}

View file

@ -0,0 +1,4 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"enum": ["fresh", "jsr", "jsx", "react", "recommended"]
}

View file

@ -4,6 +4,7 @@ use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::env;
use std::env::current_exe;
use std::ffi::OsString;
use std::fs;
@ -15,6 +16,7 @@ use std::io::Seek;
use std::io::SeekFrom;
use std::io::Write;
use std::ops::Range;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
@ -42,16 +44,19 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::ops::otel::OtelConfig;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError;
use deno_telemetry::OtelConfig;
use indexmap::IndexMap;
use log::Level;
use serde::Deserialize;
@ -66,7 +71,7 @@ use crate::args::UnstableConfig;
use crate::cache::DenoDir;
use crate::cache::FastInsecureHasher;
use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
@ -74,6 +79,7 @@ use crate::resolver::CjsTracker;
use crate::shared::ReleaseChannel;
use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive;
use crate::util::fs::canonicalize_path;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@ -85,30 +91,31 @@ use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder;
use super::serialization::SourceMapStore;
use super::virtual_fs::output_vfs;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsFileSubDataKind;
use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory;
use super::virtual_fs::VirtualDirectoryEntries;
use super::virtual_fs::WindowsSystemRootablePath;
pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str =
".deno_compile_node_modules";
/// A URL that can be designated as the base for relative URLs.
///
/// After creation, this URL may be used to get the key for a
/// module in the binary.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url);
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> {
fn from(url: &'a Url) -> Self {
Self(url)
}
pub enum StandaloneRelativeFileBaseUrl<'a> {
WindowsSystemRoot,
Path(&'a Url),
}
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
pub fn new(url: &'a Url) -> Self {
debug_assert_eq!(url.scheme(), "file");
Self(url)
}
/// Gets the module map key of the provided specifier.
///
/// * Descendant file specifiers will be made relative to the base.
@ -118,22 +125,29 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> {
if target.scheme() != "file" {
return Cow::Borrowed(target.as_str());
}
let base = match self {
Self::Path(base) => base,
Self::WindowsSystemRoot => return Cow::Borrowed(target.path()),
};
match self.0.make_relative(target) {
match base.make_relative(target) {
Some(relative) => {
if relative.starts_with("../") {
Cow::Borrowed(target.as_str())
} else {
// This is not a great scenario to have because it means that the
// specifier is outside the vfs and could cause the binary to act
// strangely. If you encounter this, the fix is to add more paths
// to the vfs builder by calling `add_possible_min_root_dir`.
debug_assert!(
!relative.starts_with("../"),
"{} -> {} ({})",
base.as_str(),
target.as_str(),
relative,
);
Cow::Owned(relative)
}
}
None => Cow::Borrowed(target.as_str()),
}
}
pub fn inner(&self) -> &Url {
self.0
}
}
#[derive(Deserialize, Serialize)]
@ -188,20 +202,27 @@ pub struct Metadata {
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: Option<OtelConfig>, // None means disabled.
pub otel_config: OtelConfig,
}
#[allow(clippy::too_many_arguments)]
fn write_binary_bytes(
mut file_writer: File,
original_bin: Vec<u8>,
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder,
source_map_store: &SourceMapStore,
vfs: &BuiltVfs,
compile_flags: &CompileFlags,
) -> Result<(), AnyError> {
let data_section_bytes =
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)
let data_section_bytes = serialize_binary_data_section(
metadata,
npm_snapshot,
remote_modules,
source_map_store,
vfs,
)
.context("Serializing binary data section.")?;
let target = compile_flags.resolve_target();
@ -244,6 +265,7 @@ pub struct StandaloneData {
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub source_maps: SourceMapStore,
pub vfs: Arc<FileBackedVfs>,
}
@ -271,20 +293,20 @@ impl StandaloneModules {
pub fn read<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
kind: VfsFileSubDataKind,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry)?,
Ok(entry) => self.vfs.read_file_all(entry, kind)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
let bytes = match RealFs.read_file_sync(&path, None) {
match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
};
Cow::Owned(bytes)
}
}
Err(err) => return Err(err.into()),
};
@ -294,7 +316,18 @@ impl StandaloneModules {
data: bytes,
}))
} else {
self.remote_modules.read(specifier)
self.remote_modules.read(specifier).map(|maybe_entry| {
maybe_entry.map(|entry| DenoCompileModuleData {
media_type: entry.media_type,
specifier: entry.specifier,
data: match kind {
VfsFileSubDataKind::Raw => entry.data,
VfsFileSubDataKind::ModuleGraph => {
entry.transpiled_data.unwrap_or(entry.data)
}
},
})
})
}
}
}
@ -315,7 +348,8 @@ pub fn extract_standalone(
mut metadata,
npm_snapshot,
remote_modules,
mut vfs_dir,
source_maps,
vfs_root_entries,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
@ -338,11 +372,12 @@ pub fn extract_standalone(
metadata.argv.push(arg.into_string().unwrap());
}
let vfs = {
// align the name of the directory with the root dir
vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string();
let fs_root = VfsRoot {
dir: vfs_dir,
dir: VirtualDirectory {
// align the name of the directory with the root dir
name: root_path.file_name().unwrap().to_string_lossy().to_string(),
entries: vfs_root_entries,
},
root_path: root_path.clone(),
start_file_offset: 0,
};
@ -359,16 +394,26 @@ pub fn extract_standalone(
},
npm_snapshot,
root_path,
source_maps,
vfs,
}))
}
pub struct WriteBinOptions<'a> {
pub writer: File,
pub display_output_filename: &'a str,
pub graph: &'a ModuleGraph,
pub entrypoint: &'a ModuleSpecifier,
pub include_files: &'a [ModuleSpecifier],
pub compile_flags: &'a CompileFlags,
}
pub struct DenoCompileBinaryWriter<'a> {
cjs_tracker: &'a CjsTracker,
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
@ -382,7 +427,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
cli_options: &'a CliOptions,
deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher,
file_fetcher: &'a CliFileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
workspace_resolver: &'a WorkspaceResolver,
@ -403,18 +448,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub async fn write_bin(
&self,
writer: File,
graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags,
options: WriteBinOptions<'_>,
) -> Result<(), AnyError> {
// Select base binary based on target
let mut original_binary = self.get_base_binary(compile_flags).await?;
let mut original_binary =
self.get_base_binary(options.compile_flags).await?;
if compile_flags.no_terminal {
let target = compile_flags.resolve_target();
if options.compile_flags.no_terminal {
let target = options.compile_flags.resolve_target();
if !target.contains("windows") {
bail!(
"The `--no-terminal` flag is only available when targeting Windows (current: {})",
@ -424,8 +465,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
set_windows_binary_to_gui(&mut original_binary)
.context("Setting windows binary to GUI.")?;
}
if compile_flags.icon.is_some() {
let target = compile_flags.resolve_target();
if options.compile_flags.icon.is_some() {
let target = options.compile_flags.resolve_target();
if !target.contains("windows") {
bail!(
"The `--icon` flag is only available when targeting Windows (current: {})",
@ -433,17 +474,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
)
}
}
self
.write_standalone_binary(
writer,
original_binary,
graph,
root_dir_url,
entrypoint,
include_files,
compile_flags,
)
.await
self.write_standalone_binary(options, original_binary)
}
async fn get_base_binary(
@ -454,7 +485,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
//
// Phase 2 of the 'min sized' deno compile RFC talks
// about adding this as a flag.
if let Some(path) = std::env::var_os("DENORT_BIN") {
if let Some(path) = get_dev_binary_path() {
return std::fs::read(&path).with_context(|| {
format!("Could not find denort at '{}'", path.to_string_lossy())
});
@ -546,16 +577,19 @@ impl<'a> DenoCompileBinaryWriter<'a> {
/// This functions creates a standalone deno binary by appending a bundle
/// and magic trailer to the currently executing binary.
#[allow(clippy::too_many_arguments)]
async fn write_standalone_binary(
fn write_standalone_binary(
&self,
writer: File,
options: WriteBinOptions<'_>,
original_bin: Vec<u8>,
graph: &ModuleGraph,
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
include_files: &[ModuleSpecifier],
compile_flags: &CompileFlags,
) -> Result<(), AnyError> {
let WriteBinOptions {
writer,
display_output_filename,
graph,
entrypoint,
include_files,
compile_flags,
} = options;
let ca_data = match self.cli_options.ca_data() {
Some(CaData::File(ca_file)) => Some(
std::fs::read(ca_file).with_context(|| format!("Reading {ca_file}"))?,
@ -563,41 +597,174 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
None => None,
};
let root_path = root_dir_url.inner().to_file_path().unwrap();
let (maybe_npm_vfs, node_modules, npm_snapshot) =
match self.npm_resolver.as_inner() {
let mut vfs = VfsBuilder::new();
let npm_snapshot = match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(managed) => {
let snapshot =
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
if !snapshot.as_serialized().packages.is_empty() {
let npm_vfs_builder = self
.build_npm_vfs(&root_path)
.context("Building npm vfs.")?;
(
Some(npm_vfs_builder),
Some(NodeModules::Managed {
node_modules_dir: self
.npm_resolver
.root_node_modules_path()
.map(|path| {
self.fill_npm_vfs(&mut vfs).context("Building npm vfs.")?;
Some(snapshot)
} else {
None
}
}
InnerCliNpmResolverRef::Byonm(_) => {
self.fill_npm_vfs(&mut vfs)?;
None
}
};
for include_file in include_files {
let path = deno_path_util::url_to_file_path(include_file)?;
vfs
.add_file_at_path(&path)
.with_context(|| format!("Including {}", path.display()))?;
}
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
let mut source_maps = Vec::with_capacity(graph.specifiers_count());
// todo(dsherret): transpile in parallel
for module in graph.modules() {
if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code
}
let (maybe_original_source, maybe_transpiled, media_type) = match module {
deno_graph::Module::Js(m) => {
let original_bytes = m.source.as_bytes().to_vec();
let maybe_transpiled = if m.media_type.is_emittable() {
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
&m.specifier,
m.media_type,
m.is_script,
)?;
let module_kind = ModuleKind::from_is_cjs(is_cjs);
let (source, source_map) =
self.emitter.emit_parsed_source_for_deno_compile(
&m.specifier,
m.media_type,
module_kind,
&m.source,
)?;
if source != m.source.as_ref() {
source_maps.push((&m.specifier, source_map));
Some(source.into_bytes())
} else {
None
}
} else {
None
};
(Some(original_bytes), maybe_transpiled, m.media_type)
}
deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes().to_vec()), None, m.media_type)
}
deno_graph::Module::Wasm(m) => {
(Some(m.source.to_vec()), None, MediaType::Wasm)
}
deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, None, MediaType::Unknown),
};
if let Some(original_source) = maybe_original_source {
if module.specifier().scheme() == "file" {
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
vfs
.add_file_with_data(
&file_path,
original_source,
VfsFileSubDataKind::Raw,
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
if let Some(transpiled_source) = maybe_transpiled {
vfs
.add_file_with_data(
&file_path,
transpiled_source,
VfsFileSubDataKind::ModuleGraph,
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
}
} else {
remote_modules_store.add(
module.specifier(),
media_type,
original_source,
maybe_transpiled,
);
}
}
}
remote_modules_store.add_redirects(&graph.redirects);
if let Some(import_map) = self.workspace_resolver.maybe_import_map() {
if let Ok(file_path) = url_to_file_path(import_map.base_url()) {
if let Some(import_map_parent_dir) = file_path.parent() {
// tell the vfs about the import map's parent directory in case it
// falls outside what the root of where the VFS will be based
vfs.add_possible_min_root_dir(import_map_parent_dir);
}
}
}
if let Some(node_modules_dir) = self.npm_resolver.root_node_modules_path() {
// ensure the vfs doesn't go below the node_modules directory's parent
if let Some(parent) = node_modules_dir.parent() {
vfs.add_possible_min_root_dir(parent);
}
}
let vfs = self.build_vfs_consolidating_global_npm_cache(vfs);
let root_dir_url = match &vfs.root_path {
WindowsSystemRootablePath::Path(dir) => {
Some(url_from_directory_path(dir)?)
}
WindowsSystemRootablePath::WindowSystemRoot => None,
};
let root_dir_url = match &root_dir_url {
Some(url) => StandaloneRelativeFileBaseUrl::Path(url),
None => StandaloneRelativeFileBaseUrl::WindowsSystemRoot,
};
let code_cache_key = if self.cli_options.code_cache_enabled() {
let mut hasher = FastInsecureHasher::new_deno_versioned();
for module in graph.modules() {
if let Some(source) = module.source() {
hasher
.write(root_dir_url.specifier_key(module.specifier()).as_bytes());
hasher.write(source.as_bytes());
}
}
Some(hasher.finish())
} else {
None
};
let mut source_map_store = SourceMapStore::with_capacity(source_maps.len());
for (specifier, source_map) in source_maps {
source_map_store.add(
Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()),
Cow::Owned(source_map.into_bytes()),
);
}
let node_modules = match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(_) => {
npm_snapshot.as_ref().map(|_| NodeModules::Managed {
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|path| {
root_dir_url
.specifier_key(
&ModuleSpecifier::from_directory_path(path).unwrap(),
)
.into_owned()
}),
}),
Some(snapshot),
)
} else {
(None, None, None)
},
),
})
}
}
InnerCliNpmResolverRef::Byonm(resolver) => {
let npm_vfs_builder = self.build_npm_vfs(&root_path)?;
(
Some(npm_vfs_builder),
Some(NodeModules::Byonm {
InnerCliNpmResolverRef::Byonm(resolver) => Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| {
root_dir_url
@ -609,97 +776,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
},
),
}),
None,
)
}
};
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
npm_vfs
} else {
VfsBuilder::new(root_path.clone())?
};
for include_file in include_files {
let path = deno_path_util::url_to_file_path(include_file)?;
if path.is_dir() {
// TODO(#26941): we should analyze if any of these are
// modules in order to include their dependencies
vfs
.add_dir_recursive(&path)
.with_context(|| format!("Including {}", path.display()))?;
} else {
vfs
.add_file_at_path(&path)
.with_context(|| format!("Including {}", path.display()))?;
}
}
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
let mut code_cache_key_hasher = if self.cli_options.code_cache_enabled() {
Some(FastInsecureHasher::new_deno_versioned())
} else {
None
};
for module in graph.modules() {
if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code
}
if let Some(hasher) = &mut code_cache_key_hasher {
if let Some(source) = module.source() {
hasher.write(module.specifier().as_str().as_bytes());
hasher.write(source.as_bytes());
}
}
let (maybe_source, media_type) = match module {
deno_graph::Module::Js(m) => {
let source = if m.media_type.is_emittable() {
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
&m.specifier,
m.media_type,
m.is_script,
)?;
let module_kind = ModuleKind::from_is_cjs(is_cjs);
let source = self
.emitter
.emit_parsed_source(
&m.specifier,
m.media_type,
module_kind,
&m.source,
)
.await?;
source.into_bytes()
} else {
m.source.as_bytes().to_vec()
};
(Some(source), m.media_type)
}
deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes().to_vec()), m.media_type)
}
deno_graph::Module::Wasm(m) => {
(Some(m.source.to_vec()), MediaType::Wasm)
}
deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
};
if module.specifier().scheme() == "file" {
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
vfs
.add_file_with_data(
&file_path,
match maybe_source {
Some(source) => source,
None => RealFs.read_file_sync(&file_path, None)?,
},
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
} else if let Some(source) = maybe_source {
remote_modules_store.add(module.specifier(), media_type, source);
}
}
remote_modules_store.add_redirects(&graph.redirects);
let env_vars_from_env_file = match self.cli_options.env_file_name() {
Some(env_filenames) => {
@ -715,10 +792,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None => Default::default(),
};
output_vfs(&vfs, display_output_filename);
let metadata = Metadata {
argv: compile_flags.args.clone(),
seed: self.cli_options.seed(),
code_cache_key: code_cache_key_hasher.map(|h| h.finish()),
code_cache_key,
location: self.cli_options.location_flag().clone(),
permissions: self.cli_options.permission_flags().clone(),
v8_flags: self.cli_options.v8_flags().clone(),
@ -771,8 +850,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
unstable_config: UnstableConfig {
legacy_flag_enabled: false,
bare_node_builtins: self.cli_options.unstable_bare_node_builtins(),
detect_cjs: self.cli_options.unstable_detect_cjs(),
sloppy_imports: self.cli_options.unstable_sloppy_imports(),
features: self.cli_options.unstable_features(),
npm_lazy_caching: self.cli_options.unstable_npm_lazy_caching(),
},
otel_config: self.cli_options.otel_config(),
};
@ -783,13 +864,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
&metadata,
npm_snapshot.map(|s| s.into_serialized()),
&remote_modules_store,
vfs,
&source_map_store,
&vfs,
compile_flags,
)
.context("Writing binary bytes")
}
fn build_npm_vfs(&self, root_path: &Path) -> Result<VfsBuilder, AnyError> {
fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> {
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
if system_info != &NpmSystemInfo::default() {
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
@ -800,15 +882,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
builder.add_dir_recursive(node_modules_path)?;
Ok(builder)
Ok(())
} else {
// DO NOT include the user's registry url as it may contain credentials,
// but also don't make this dependent on the registry url
let global_cache_root_path = npm_resolver.global_cache_root_path();
let mut builder =
VfsBuilder::new(global_cache_root_path.to_path_buf())?;
// we'll flatten to remove any custom registries later
let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
@ -817,55 +894,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
builder.add_dir_recursive(&folder)?;
}
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary.
builder.with_root_dir(|root_dir| {
root_dir.name = ".deno_compile_node_modules".to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new();
for entry in std::mem::take(&mut root_dir.entries) {
match entry {
VfsEntry::Dir(dir) => {
for entry in dir.entries {
log::debug!(
"Flattening {} into node_modules",
entry.name()
);
if let Some(existing) =
localhost_entries.insert(entry.name().to_string(), entry)
{
panic!(
"Unhandled scenario where a duplicate entry was found: {:?}",
existing
);
}
}
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
new_entries.push(entry);
}
}
}
new_entries.push(VfsEntry::Dir(VirtualDirectory {
name: "localhost".to_string(),
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
}));
// needs to be sorted by name
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
root_dir.entries = new_entries;
});
builder.set_new_root_path(root_path.to_path_buf())?;
Ok(builder)
Ok(())
}
}
InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info);
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
for pkg_json in self.cli_options.workspace().package_jsons() {
builder.add_file_at_path(&pkg_json.path)?;
}
@ -898,10 +931,120 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}
}
}
Ok(builder)
Ok(())
}
}
}
fn build_vfs_consolidating_global_npm_cache(
&self,
mut vfs: VfsBuilder,
) -> BuiltVfs {
match self.npm_resolver.as_inner() {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if npm_resolver.root_node_modules_path().is_some() {
return vfs.build();
}
let global_cache_root_path = npm_resolver.global_cache_root_path();
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary.
let Some(root_dir) = vfs.get_dir_mut(global_cache_root_path) else {
return vfs.build();
};
root_dir.name = DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME.to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new();
for entry in root_dir.entries.take_inner() {
match entry {
VfsEntry::Dir(mut dir) => {
for entry in dir.entries.take_inner() {
log::debug!("Flattening {} into node_modules", entry.name());
if let Some(existing) =
localhost_entries.insert(entry.name().to_string(), entry)
{
panic!(
"Unhandled scenario where a duplicate entry was found: {:?}",
existing
);
}
}
}
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
new_entries.push(entry);
}
}
}
new_entries.push(VfsEntry::Dir(VirtualDirectory {
name: "localhost".to_string(),
entries: VirtualDirectoryEntries::new(
localhost_entries.into_iter().map(|(_, v)| v).collect(),
),
}));
root_dir.entries = VirtualDirectoryEntries::new(new_entries);
// it's better to not expose the user's cache directory, so take it out
// of there
let parent = global_cache_root_path.parent().unwrap();
let parent_dir = vfs.get_dir_mut(parent).unwrap();
let index = parent_dir
.entries
.binary_search(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME)
.unwrap();
let npm_global_cache_dir_entry = parent_dir.entries.remove(index);
// go up from the ancestors removing empty directories...
// this is not as optimized as it could be
let mut last_name =
Cow::Borrowed(DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME);
for ancestor in parent.ancestors() {
let dir = vfs.get_dir_mut(ancestor).unwrap();
if let Ok(index) = dir.entries.binary_search(&last_name) {
dir.entries.remove(index);
}
last_name = Cow::Owned(dir.name.clone());
if !dir.entries.is_empty() {
break;
}
}
// now build the vfs and add the global cache dir entry there
let mut built_vfs = vfs.build();
built_vfs.entries.insert(npm_global_cache_dir_entry);
built_vfs
}
InnerCliNpmResolverRef::Byonm(_) => vfs.build(),
}
}
}
fn get_denort_path(deno_exe: PathBuf) -> Option<OsString> {
let mut denort = deno_exe;
denort.set_file_name(if cfg!(windows) {
"denort.exe"
} else {
"denort"
});
denort.exists().then(|| denort.into_os_string())
}
fn get_dev_binary_path() -> Option<OsString> {
env::var_os("DENORT_BIN").or_else(|| {
env::current_exe().ok().and_then(|exec_path| {
if exec_path
.components()
.any(|component| component == Component::Normal("target".as_ref()))
{
get_denort_path(exec_path)
} else {
None
}
})
})
}
/// This function returns the environment variables specified

View file

@ -17,6 +17,7 @@ use deno_runtime::deno_io::fs::FsResult;
use deno_runtime::deno_io::fs::FsStat;
use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsFileSubDataKind;
#[derive(Debug, Clone)]
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
@ -36,7 +37,8 @@ impl DenoCompileFileSystem {
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
let old_file = self.0.file_entry(oldpath)?;
let old_file_bytes = self.0.read_file_all(old_file)?;
let old_file_bytes =
self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?;
RealFs.write_file_sync(
newpath,
OpenOptions {

View file

@ -9,6 +9,7 @@ use binary::StandaloneData;
use binary::StandaloneModules;
use code_cache::DenoCompileCodeCache;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionError;
@ -32,6 +33,7 @@ use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::create_host_defined_options;
@ -50,18 +52,20 @@ use deno_semver::npm::NpmPackageReqReference;
use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use serialization::DenoCompileModuleSource;
use serialization::SourceMapStore;
use std::borrow::Cow;
use std::rc::Rc;
use std::sync::Arc;
use virtual_fs::FileBackedVfs;
use virtual_fs::VfsFileSubDataKind;
use crate::args::create_default_npmrc;
use crate::args::get_root_cert_store;
use crate::args::npm_pkg_req_ref_to_binary_command;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver;
use crate::cache::Caches;
@ -85,10 +89,10 @@ use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::text_encoding::from_utf8_lossy_cow;
use crate::util::v8::construct_v8_flags;
use crate::worker::CliCodeCache;
use crate::worker::CliMainWorkerFactory;
@ -111,6 +115,7 @@ use self::file_system::DenoCompileFileSystem;
struct SharedModuleLoaderState {
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<dyn CliCodeCache>>,
fs: Arc<dyn deno_fs::FileSystem>,
modules: StandaloneModules,
node_code_translator: Arc<CliNodeCodeTranslator>,
@ -118,8 +123,9 @@ struct SharedModuleLoaderState {
npm_module_loader: Arc<NpmModuleLoader>,
npm_req_resolver: Arc<CliNpmReqResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
source_maps: SourceMapStore,
vfs: Arc<FileBackedVfs>,
workspace_resolver: WorkspaceResolver,
code_cache: Option<Arc<dyn CliCodeCache>>,
}
impl SharedModuleLoaderState {
@ -190,9 +196,9 @@ impl ModuleLoader for EmbeddedModuleLoader {
.cjs_tracker
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
{
NodeModuleKind::Cjs
ResolutionMode::Require
} else {
NodeModuleKind::Esm
ResolutionMode::Import
};
if self.shared.node_resolver.in_npm_package(&referrer) {
@ -204,7 +210,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)?
.into_url(),
);
@ -232,7 +238,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)?,
),
Ok(MappedResolution::PackageJson {
@ -249,7 +255,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
sub_path.as_deref(),
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)
.map_err(AnyError::from),
PackageJsonDepValue::Workspace(version_req) => {
@ -269,7 +275,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)?,
)
}
@ -283,7 +289,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
&reference,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)?);
}
@ -310,7 +316,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
NodeResolutionKind::Execution,
)?;
if let Some(res) = maybe_res {
return Ok(res.into_url());
@ -392,7 +398,11 @@ impl ModuleLoader for EmbeddedModuleLoader {
);
}
match self.shared.modules.read(original_specifier) {
match self
.shared
.modules
.read(original_specifier, VfsFileSubDataKind::ModuleGraph)
{
Ok(Some(module)) => {
let media_type = module.media_type;
let (module_specifier, module_type, module_source) =
@ -491,6 +501,45 @@ impl ModuleLoader for EmbeddedModuleLoader {
}
std::future::ready(()).boxed_local()
}
fn get_source_map(&self, file_name: &str) -> Option<Cow<[u8]>> {
if file_name.starts_with("file:///") {
let url =
deno_path_util::url_from_directory_path(self.shared.vfs.root()).ok()?;
let file_url = ModuleSpecifier::parse(file_name).ok()?;
let relative_path = url.make_relative(&file_url)?;
self.shared.source_maps.get(&relative_path)
} else {
self.shared.source_maps.get(file_name)
}
.map(Cow::Borrowed)
}
fn get_source_mapped_source_line(
&self,
file_name: &str,
line_number: usize,
) -> Option<String> {
let specifier = ModuleSpecifier::parse(file_name).ok()?;
let data = self
.shared
.modules
.read(&specifier, VfsFileSubDataKind::Raw)
.ok()??;
let source = String::from_utf8_lossy(&data.data);
// Do NOT use .lines(): it skips the terminating empty line.
// (due to internally using_terminator() instead of .split())
let lines: Vec<&str> = source.split('\n').collect();
if line_number >= lines.len() {
Some(format!(
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
crate::colors::yellow("Warning"), line_number + 1,
))
} else {
Some(lines[line_number].to_string())
}
}
}
impl NodeRequireLoader for EmbeddedModuleLoader {
@ -513,8 +562,13 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
fn load_text_file_lossy(
&self,
path: &std::path::Path,
) -> Result<String, AnyError> {
Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?)
) -> Result<Cow<'static, str>, AnyError> {
let file_entry = self.shared.vfs.file_entry(path)?;
let file_bytes = self
.shared
.vfs
.read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)?;
Ok(from_utf8_lossy_cow(file_bytes))
}
fn is_maybe_cjs(
@ -581,6 +635,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
modules,
npm_snapshot,
root_path,
source_maps,
vfs,
} = data;
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
@ -723,9 +778,12 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
IsCjsResolverOptions {
detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(),
is_node_main: false,
if metadata.unstable_config.detect_cjs {
IsCjsResolutionMode::ImplicitTypeCommonJs
} else if metadata.workspace_resolver.package_jsons.is_empty() {
IsCjsResolutionMode::Disabled
} else {
IsCjsResolutionMode::ExplicitTypeCommonJs
},
));
let cache_db = Caches::new(deno_dir_provider.clone());
@ -817,6 +875,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState {
cjs_tracker: cjs_tracker.clone(),
code_cache: code_cache.clone(),
fs: fs.clone(),
modules,
node_code_translator: node_code_translator.clone(),
@ -826,10 +885,11 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
fs.clone(),
node_code_translator,
)),
code_cache: code_cache.clone(),
npm_resolver: npm_resolver.clone(),
workspace_resolver,
npm_req_resolver,
source_maps,
vfs,
workspace_resolver,
}),
};
@ -911,6 +971,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
serve_host: None,
},
metadata.otel_config,
crate::args::NpmCachingStrategy::Lazy,
);
// Initialize v8 once from the main thread.

View file

@ -1,10 +1,13 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
use capacity_builder::BytesAppendable;
use deno_ast::swc::common::source_map;
use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
@ -19,11 +22,15 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use indexmap::IndexMap;
use crate::standalone::virtual_fs::VirtualDirectory;
use super::binary::Metadata;
use super::virtual_fs::BuiltVfs;
use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VirtualDirectoryEntries;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
@ -31,61 +38,64 @@ const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
/// * d3n0l4nd
/// * <metadata_len><metadata>
/// * <npm_snapshot_len><npm_snapshot>
/// * <remote_modules_len><remote_modules>
/// * <remote_modules>
/// * <vfs_headers_len><vfs_headers>
/// * <vfs_file_data_len><vfs_file_data>
/// * <source_map_data>
/// * d3n0l4nd
pub fn serialize_binary_data_section(
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder,
source_map_store: &SourceMapStore,
vfs: &BuiltVfs,
) -> Result<Vec<u8>, AnyError> {
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
bytes.extend_from_slice(data);
}
let mut bytes = Vec::new();
bytes.extend_from_slice(MAGIC_BYTES);
let metadata = serde_json::to_string(metadata)?;
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
let serialized_vfs = serde_json::to_string(&vfs.entries)?;
let bytes = capacity_builder::BytesBuilder::build(|builder| {
builder.append(MAGIC_BYTES);
// 1. Metadata
{
let metadata = serde_json::to_string(metadata)?;
write_bytes_with_len(&mut bytes, metadata.as_bytes());
builder.append_le(metadata.len() as u64);
builder.append(&metadata);
}
// 2. Npm snapshot
{
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
write_bytes_with_len(&mut bytes, &npm_snapshot);
builder.append_le(npm_snapshot.len() as u64);
builder.append(&npm_snapshot);
}
// 3. Remote modules
{
let update_index = bytes.len();
bytes.extend_from_slice(&(0_u64).to_le_bytes());
let start_index = bytes.len();
remote_modules.write(&mut bytes)?;
let length = bytes.len() - start_index;
let length_bytes = (length as u64).to_le_bytes();
bytes[update_index..update_index + length_bytes.len()]
.copy_from_slice(&length_bytes);
remote_modules.write(builder);
}
// 4. VFS
{
let (vfs, vfs_files) = vfs.into_dir_and_files();
let vfs = serde_json::to_string(&vfs)?;
write_bytes_with_len(&mut bytes, vfs.as_bytes());
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
for file in &vfs_files {
bytes.extend_from_slice(file);
builder.append_le(serialized_vfs.len() as u64);
builder.append(&serialized_vfs);
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
builder.append_le(vfs_bytes_len);
for file in &vfs.files {
builder.append(file);
}
}
// 5. Source maps
{
builder.append_le(source_map_store.data.len() as u32);
for (specifier, source_map) in &source_map_store.data {
builder.append_le(specifier.len() as u32);
builder.append(specifier);
builder.append_le(source_map.len() as u32);
builder.append(source_map.as_ref());
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
bytes.extend_from_slice(MAGIC_BYTES);
builder.append(MAGIC_BYTES);
})?;
Ok(bytes)
}
@ -94,19 +104,14 @@ pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub vfs_dir: VirtualDirectory,
pub source_maps: SourceMapStore,
pub vfs_root_entries: VirtualDirectoryEntries,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
@ -118,34 +123,51 @@ pub fn deserialize_binary_data_section(
Ok((input, true))
}
#[allow(clippy::type_complexity)]
fn read_source_map_entry(
input: &[u8],
) -> Result<(&[u8], (Cow<str>, &[u8])), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let (input, source_map) = read_bytes_with_u32_len(input)?;
Ok((input, (specifier, source_map)))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) = read_bytes_with_len(input).context("reading metadata")?;
let (input, data) =
read_bytes_with_u64_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_len(input).context("reading npm snapshot")?;
read_bytes_with_u64_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, data) =
read_bytes_with_len(input).context("reading remote modules data")?;
let remote_modules =
RemoteModulesStore::build(data).context("deserializing remote modules")?;
let (input, remote_modules) =
RemoteModulesStore::build(input).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_len(input).context("vfs")?;
let vfs_dir: VirtualDirectory =
let (input, data) = read_bytes_with_u64_len(input).context("vfs")?;
let vfs_root_entries: VirtualDirectoryEntries =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_len(input).context("reading vfs files data")?;
read_bytes_with_u64_len(input).context("reading vfs files data")?;
// 5. Source maps
let (mut input, source_map_data_len) = read_u32_as_usize(input)?;
let mut source_maps = SourceMapStore::with_capacity(source_map_data_len);
for _ in 0..source_map_data_len {
let (current_input, (specifier, source_map)) =
read_source_map_entry(input)?;
input = current_input;
source_maps.add(specifier, Cow::Borrowed(source_map));
}
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
@ -157,7 +179,8 @@ pub fn deserialize_binary_data_section(
metadata,
npm_snapshot,
remote_modules,
vfs_dir,
source_maps,
vfs_root_entries,
vfs_files_data,
}))
}
@ -165,19 +188,31 @@ pub fn deserialize_binary_data_section(
#[derive(Default)]
pub struct RemoteModulesStoreBuilder {
specifiers: Vec<(String, u64)>,
data: Vec<(MediaType, Vec<u8>)>,
data: Vec<(MediaType, Vec<u8>, Option<Vec<u8>>)>,
data_byte_len: u64,
redirects: Vec<(String, String)>,
redirects_len: u64,
}
impl RemoteModulesStoreBuilder {
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
pub fn add(
&mut self,
specifier: &Url,
media_type: MediaType,
data: Vec<u8>,
maybe_transpiled: Option<Vec<u8>>,
) {
log::debug!("Adding '{}' ({})", specifier, media_type);
let specifier = specifier.to_string();
self.specifiers.push((specifier, self.data_byte_len));
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
self.data.push((media_type, data));
let maybe_transpiled_len = match &maybe_transpiled {
// data length (4 bytes), data
Some(data) => 4 + data.len() as u64,
None => 0,
};
// media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length
self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len;
self.data.push((media_type, data, maybe_transpiled));
}
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
@ -191,26 +226,50 @@ impl RemoteModulesStoreBuilder {
}
}
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
fn write<'a, TBytes: capacity_builder::BytesType>(
&'a self,
builder: &mut capacity_builder::BytesBuilder<'a, TBytes>,
) {
builder.append_le(self.specifiers.len() as u32);
builder.append_le(self.redirects.len() as u32);
for (specifier, offset) in &self.specifiers {
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
writer.write_all(specifier.as_bytes())?;
writer.write_all(&offset.to_le_bytes())?;
builder.append_le(specifier.len() as u32);
builder.append(specifier);
builder.append_le(*offset);
}
for (from, to) in &self.redirects {
writer.write_all(&(from.len() as u32).to_le_bytes())?;
writer.write_all(from.as_bytes())?;
writer.write_all(&(to.len() as u32).to_le_bytes())?;
writer.write_all(to.as_bytes())?;
builder.append_le(from.len() as u32);
builder.append(from);
builder.append_le(to.len() as u32);
builder.append(to);
}
builder.append_le(
self
.data
.iter()
.map(|(_, data, maybe_transpiled)| {
1 + 4
+ (data.len() as u64)
+ 1
+ match maybe_transpiled {
Some(transpiled) => 4 + (transpiled.len() as u64),
None => 0,
}
})
.sum::<u64>(),
);
for (media_type, data, maybe_transpiled) in &self.data {
builder.append(serialize_media_type(*media_type));
builder.append_le(data.len() as u32);
builder.append(data);
if let Some(transpiled) = maybe_transpiled {
builder.append(1);
builder.append_le(transpiled.len() as u32);
builder.append(transpiled);
} else {
builder.append(0);
}
for (media_type, data) in &self.data {
writer.write_all(&[serialize_media_type(*media_type)])?;
writer.write_all(&(data.len() as u64).to_le_bytes())?;
writer.write_all(data)?;
}
Ok(())
}
}
@ -238,6 +297,30 @@ impl DenoCompileModuleSource {
}
}
pub struct SourceMapStore {
data: IndexMap<Cow<'static, str>, Cow<'static, [u8]>>,
}
impl SourceMapStore {
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: IndexMap::with_capacity(capacity),
}
}
pub fn add(
&mut self,
specifier: Cow<'static, str>,
source_map: Cow<'static, [u8]>,
) {
self.data.insert(specifier, source_map);
}
pub fn get(&self, specifier: &str) -> Option<&[u8]> {
self.data.get(specifier).map(|v| v.as_ref())
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
@ -284,6 +367,13 @@ impl<'a> DenoCompileModuleData<'a> {
}
}
pub struct RemoteModuleEntry<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
pub transpiled_data: Option<Cow<'static, [u8]>>,
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
@ -295,7 +385,7 @@ pub struct RemoteModulesStore {
}
impl RemoteModulesStore {
fn build(data: &'static [u8]) -> Result<Self, AnyError> {
fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
@ -338,12 +428,16 @@ impl RemoteModulesStore {
Ok((input, specifiers))
}
let (files_data, specifiers) = read_headers(data)?;
let (input, specifiers) = read_headers(input)?;
let (input, files_data) = read_bytes_with_u64_len(input)?;
Ok(Self {
Ok((
input,
Self {
specifiers,
files_data,
})
},
))
}
pub fn resolve_specifier<'a>(
@ -374,7 +468,7 @@ impl RemoteModulesStore {
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
) -> Result<Option<RemoteModuleEntry<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
@ -390,12 +484,25 @@ impl RemoteModulesStore {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, len) = read_u64(input)?;
let (_input, data) = read_bytes(input, len as usize)?;
return Ok(Some(DenoCompileModuleData {
let (input, data) = read_bytes_with_u32_len(input)?;
check_has_len(input, 1)?;
let (input, has_transpiled) = (&input[1..], input[0]);
let (_, transpiled_data) = match has_transpiled {
0 => (input, None),
1 => {
let (input, data) = read_bytes_with_u32_len(input)?;
(input, Some(data))
}
value => bail!(
"Invalid transpiled data flag: {}. Compiled data is corrupt.",
value
),
};
return Ok(Some(RemoteModuleEntry {
specifier,
media_type,
data: Cow::Borrowed(data),
transpiled_data: transpiled_data.map(Cow::Borrowed),
}));
}
None => {
@ -479,12 +586,13 @@ fn deserialize_npm_snapshot(
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a
) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req.into_owned(), id_to_npm_id(id)?)))
let req = StackString::from_cow(req);
Ok((input, (req, id_to_npm_id(id)?)))
}
}
@ -634,17 +742,34 @@ fn parse_vec_n_times_with_index<TResult>(
Ok((input, results))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.",);
fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u32_as_usize(input)?;
let (input, data) = read_bytes(input, len)?;
Ok((input, data))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
check_has_len(input, len)?;
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
#[inline(always)]
fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.");
}
Ok(())
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, str_len) = read_u32_as_usize(input)?;
let (input, data_bytes) = read_bytes(input, str_len)?;
let (input, data_bytes) = read_bytes_with_u32_len(input)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}

File diff suppressed because it is too large Load diff

View file

@ -14,6 +14,7 @@ use deno_runtime::deno_node::NodeResolver;
use deno_semver::package::PackageNv;
use deno_task_shell::ExecutableCommand;
use deno_task_shell::ExecuteResult;
use deno_task_shell::KillSignal;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;
use deno_task_shell::ShellPipeReader;
@ -22,6 +23,7 @@ use lazy_regex::Lazy;
use regex::Regex;
use tokio::task::JoinHandle;
use tokio::task::LocalSet;
use tokio_util::sync::CancellationToken;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
@ -45,9 +47,11 @@ impl TaskStdio {
pub fn stdout() -> Self {
Self(None, ShellPipeWriter::stdout())
}
pub fn stderr() -> Self {
Self(None, ShellPipeWriter::stderr())
}
pub fn piped() -> Self {
let (r, w) = deno_task_shell::pipe();
Self(Some(r), w)
@ -62,8 +66,8 @@ pub struct TaskIo {
impl Default for TaskIo {
fn default() -> Self {
Self {
stderr: TaskStdio::stderr(),
stdout: TaskStdio::stdout(),
stderr: TaskStdio::stderr(),
}
}
}
@ -78,6 +82,7 @@ pub struct RunTaskOptions<'a> {
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
pub root_node_modules_dir: Option<&'a Path>,
pub stdio: Option<TaskIo>,
pub kill_signal: KillSignal,
}
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
@ -96,8 +101,12 @@ pub async fn run_task(
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
let env_vars =
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
let state =
deno_task_shell::ShellState::new(env_vars, opts.cwd, opts.custom_commands);
let state = deno_task_shell::ShellState::new(
env_vars,
opts.cwd,
opts.custom_commands,
opts.kill_signal,
);
let stdio = opts.stdio.unwrap_or_default();
let (
TaskStdio(stdout_read, stdout_write),
@ -537,6 +546,86 @@ fn resolve_managed_npm_commands(
Ok(result)
}
/// Runs a deno task future forwarding any signals received
/// to the process.
///
/// Signal listeners and ctrl+c listening will be setup.
pub async fn run_future_forwarding_signals<TOutput>(
kill_signal: KillSignal,
future: impl std::future::Future<Output = TOutput>,
) -> TOutput {
fn spawn_future_with_cancellation(
future: impl std::future::Future<Output = ()> + 'static,
token: CancellationToken,
) {
deno_core::unsync::spawn(async move {
tokio::select! {
_ = future => {}
_ = token.cancelled() => {}
}
});
}
let token = CancellationToken::new();
let _token_drop_guard = token.clone().drop_guard();
let _drop_guard = kill_signal.clone().drop_guard();
spawn_future_with_cancellation(
listen_ctrl_c(kill_signal.clone()),
token.clone(),
);
#[cfg(unix)]
spawn_future_with_cancellation(
listen_and_forward_all_signals(kill_signal),
token,
);
future.await
}
async fn listen_ctrl_c(kill_signal: KillSignal) {
while let Ok(()) = tokio::signal::ctrl_c().await {
// On windows, ctrl+c is sent to the process group, so the signal would
// have already been sent to the child process. We still want to listen
// for ctrl+c here to keep the process alive when receiving it, but no
// need to forward the signal because it's already been sent.
if !cfg!(windows) {
kill_signal.send(deno_task_shell::SignalKind::SIGINT)
}
}
}
#[cfg(unix)]
async fn listen_and_forward_all_signals(kill_signal: KillSignal) {
use deno_core::futures::FutureExt;
use deno_runtime::signal::SIGNAL_NUMS;
// listen and forward every signal we support
let mut futures = Vec::with_capacity(SIGNAL_NUMS.len());
for signo in SIGNAL_NUMS.iter().copied() {
if signo == libc::SIGKILL || signo == libc::SIGSTOP {
continue; // skip, can't listen to these
}
let kill_signal = kill_signal.clone();
futures.push(
async move {
let Ok(mut stream) = tokio::signal::unix::signal(
tokio::signal::unix::SignalKind::from_raw(signo),
) else {
return;
};
let signal_kind: deno_task_shell::SignalKind = signo.into();
while let Some(()) = stream.recv().await {
kill_signal.send(signal_kind);
}
}
.boxed_local(),
)
}
futures::future::join_all(futures).await;
}
#[cfg(test)]
mod test {

View file

@ -538,7 +538,11 @@ pub async fn run_benchmarks_with_watch(
)?;
let graph = module_graph_creator
.create_graph(graph_kind, collected_bench_modules.clone())
.create_graph(
graph_kind,
collected_bench_modules.clone(),
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?;
module_graph_creator.graph_valid(&graph)?;
let bench_modules = &graph.roots;

View file

@ -64,7 +64,7 @@ pub async fn check(
let file = file_fetcher.fetch(&s, root_permissions).await?;
let snippet_files = extract::extract_snippet_files(file)?;
for snippet_file in snippet_files {
specifiers_for_typecheck.push(snippet_file.specifier.clone());
specifiers_for_typecheck.push(snippet_file.url.clone());
file_fetcher.insert_memory_files(snippet_file);
}
}

View file

@ -5,7 +5,7 @@ use crate::args::CompileFlags;
use crate::args::Flags;
use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider;
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
use crate::standalone::binary::WriteBinOptions;
use crate::standalone::is_standalone_binary;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
@ -15,8 +15,12 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_terminal::colors;
use rand::Rng;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
@ -69,7 +73,11 @@ pub async fn compile(
// create a module graph with types information in it. We don't want to
// store that in the binary so create a code only module graph from scratch.
module_graph_creator
.create_graph(GraphKind::CodeOnly, module_roots)
.create_graph(
GraphKind::CodeOnly,
module_roots,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?
} else {
graph
@ -78,20 +86,6 @@ pub async fn compile(
let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
check_warn_tsconfig(&ts_config_for_emit);
let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(),
graph
.specifiers()
.map(|(s, _)| s)
.chain(
cli_options
.node_modules_dir_path()
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
.iter(),
)
.chain(include_files.iter()),
);
log::debug!("Binary root dir: {}", root_dir_url);
log::info!(
"{} {} to {}",
colors::green("Compile"),
@ -116,14 +110,17 @@ pub async fn compile(
})?;
let write_result = binary_writer
.write_bin(
file,
&graph,
StandaloneRelativeFileBaseUrl::from(&root_dir_url),
.write_bin(WriteBinOptions {
writer: file,
display_output_filename: &output_path
.file_name()
.unwrap()
.to_string_lossy(),
graph: &graph,
entrypoint,
&include_files,
&compile_flags,
)
include_files: &include_files,
compile_flags: &compile_flags,
})
.await
.with_context(|| {
format!(
@ -242,16 +239,59 @@ fn get_module_roots_and_include_files(
}
}
let mut module_roots = Vec::with_capacity(compile_flags.include.len() + 1);
let mut include_files = Vec::with_capacity(compile_flags.include.len());
fn analyze_path(
url: &ModuleSpecifier,
module_roots: &mut Vec<ModuleSpecifier>,
include_files: &mut Vec<ModuleSpecifier>,
searched_paths: &mut HashSet<PathBuf>,
) -> Result<(), AnyError> {
let Ok(path) = url_to_file_path(url) else {
return Ok(());
};
let mut pending = VecDeque::from([path]);
while let Some(path) = pending.pop_front() {
if !searched_paths.insert(path.clone()) {
continue;
}
if !path.is_dir() {
let url = url_from_file_path(&path)?;
include_files.push(url.clone());
if is_module_graph_module(&url) {
module_roots.push(url);
}
continue;
}
for entry in std::fs::read_dir(&path).with_context(|| {
format!("Failed reading directory '{}'", path.display())
})? {
let entry = entry.with_context(|| {
format!("Failed reading entry in directory '{}'", path.display())
})?;
pending.push_back(entry.path());
}
}
Ok(())
}
let mut searched_paths = HashSet::new();
let mut module_roots = Vec::new();
let mut include_files = Vec::new();
module_roots.push(entrypoint.clone());
for side_module in &compile_flags.include {
let url = resolve_url_or_path(side_module, initial_cwd)?;
if is_module_graph_module(&url) {
module_roots.push(url);
} else {
module_roots.push(url.clone());
if url.scheme() == "file" {
include_files.push(url);
}
} else {
analyze_path(
&url,
&mut module_roots,
&mut include_files,
&mut searched_paths,
)?;
}
}
Ok((module_roots, include_files))
}
@ -316,68 +356,6 @@ fn get_os_specific_filepath(
}
}
fn resolve_root_dir_from_specifiers<'a>(
starting_dir: &ModuleSpecifier,
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
) -> ModuleSpecifier {
fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
let min_length = a.len().min(b.len());
let mut last_slash = 0;
for i in 0..min_length {
if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
last_slash = i;
} else if a.as_bytes()[i] != b.as_bytes()[i] {
break;
}
}
// Return the common root path up to the last common slash.
// This returns a slice of the original string 'a', up to and including the last matching '/'.
let common = &a[..=last_slash];
if cfg!(windows) && common == "file:///" {
a
} else {
common
}
}
fn is_file_system_root(url: &str) -> bool {
let Some(path) = url.strip_prefix("file:///") else {
return false;
};
if cfg!(windows) {
let Some((_drive, path)) = path.split_once('/') else {
return true;
};
path.is_empty()
} else {
path.is_empty()
}
}
let mut found_dir = starting_dir.as_str();
if !is_file_system_root(found_dir) {
for specifier in specifiers {
if specifier.scheme() == "file" {
found_dir = select_common_root(found_dir, specifier.as_str());
}
}
}
let found_dir = if is_file_system_root(found_dir) {
found_dir
} else {
// include the parent dir name because it helps create some context
found_dir
.strip_suffix('/')
.unwrap_or(found_dir)
.rfind('/')
.map(|i| &found_dir[..i + 1])
.unwrap_or(found_dir)
};
ModuleSpecifier::parse(found_dir).unwrap()
}
#[cfg(test)]
mod test {
pub use super::*;
@ -454,38 +432,4 @@ mod test {
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
}
#[test]
fn test_resolve_root_dir_from_specifiers() {
fn resolve(start: &str, specifiers: &[&str]) -> String {
let specifiers = specifiers
.iter()
.map(|s| ModuleSpecifier::parse(s).unwrap())
.collect::<Vec<_>>();
resolve_root_dir_from_specifiers(
&ModuleSpecifier::parse(start).unwrap(),
specifiers.iter(),
)
.to_string()
}
assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
assert_eq!(
resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
"file:///a/b/"
);
assert_eq!(
resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
"file:///a/b/"
);
assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
if cfg!(windows) {
assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
// this will ignore the other one because it's on a separate drive
assert_eq!(
resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
"file:///c:/a/b/"
);
}
}
}

View file

@ -6,6 +6,7 @@ use crate::args::FileFlags;
use crate::args::Flags;
use crate::cdp;
use crate::factory::CliFactory;
use crate::file_fetcher::TextDecodedFile;
use crate::tools::fmt::format_json;
use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code;
@ -197,7 +198,7 @@ pub struct CoverageReport {
fn generate_coverage_report(
script_coverage: &cdp::ScriptCoverage,
script_source: String,
maybe_source_map: &Option<Vec<u8>>,
maybe_source_map: Option<&[u8]>,
output: &Option<PathBuf>,
) -> CoverageReport {
let maybe_source_map = maybe_source_map
@ -559,6 +560,12 @@ pub fn cover_files(
},
None => None,
};
let get_message = |specifier: &ModuleSpecifier| -> String {
format!(
"Failed to fetch \"{}\" from cache. Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
specifier,
)
};
for script_coverage in script_coverages {
let module_specifier = deno_core::resolve_url_or_path(
@ -566,21 +573,14 @@ pub fn cover_files(
cli_options.initial_cwd(),
)?;
let maybe_file = if module_specifier.scheme() == "file" {
file_fetcher.get_source(&module_specifier)
} else {
file_fetcher
.fetch_cached(&module_specifier, 10)
.with_context(|| {
format!("Failed to fetch \"{module_specifier}\" from cache.")
})?
let maybe_file_result = file_fetcher
.get_cached_source_or_local(&module_specifier)
.map_err(AnyError::from);
let file = match maybe_file_result {
Ok(Some(file)) => TextDecodedFile::decode(file)?,
Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))),
Err(err) => return Err(err).context(get_message(&module_specifier)),
};
let file = maybe_file.ok_or_else(|| {
anyhow!("Failed to fetch \"{}\" from cache.
Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
module_specifier
)
})?.into_text_decoded()?;
let original_source = file.source.clone();
// Check if file was transpiled
@ -625,7 +625,7 @@ pub fn cover_files(
let coverage_report = generate_coverage_report(
&script_coverage,
runtime_code.as_str().to_owned(),
&source_map,
source_map.as_deref(),
&out_mode,
);

View file

@ -131,7 +131,11 @@ pub async fn doc(
|_| true,
)?;
let graph = module_graph_creator
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
.create_graph(
GraphKind::TypesOnly,
module_specifiers.clone(),
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?;
graph_exit_integrity_errors(&graph);
@ -209,10 +213,14 @@ pub async fn doc(
Default::default()
};
let mut main_entrypoint = None;
let rewrite_map =
if let Some(config_file) = cli_options.start_dir.maybe_deno_json() {
let config = config_file.to_exports_config()?;
main_entrypoint = config.get_resolved(".").ok().flatten();
let rewrite_map = config
.clone()
.into_map()
@ -240,6 +248,7 @@ pub async fn doc(
html_options,
deno_ns,
rewrite_map,
main_entrypoint,
)
} else {
let modules_len = doc_nodes_by_url.len();
@ -334,14 +343,14 @@ impl deno_doc::html::HrefResolver for DocResolver {
let name = &res.req().name;
Some((
format!("https://www.npmjs.com/package/{name}"),
name.to_owned(),
name.to_string(),
))
}
"jsr" => {
let res =
deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?;
let name = &res.req().name;
Some((format!("https://jsr.io/{name}"), name.to_owned()))
Some((format!("https://jsr.io/{name}"), name.to_string()))
}
_ => None,
}
@ -383,6 +392,7 @@ fn generate_docs_directory(
html_options: &DocHtmlFlag,
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
main_entrypoint: Option<ModuleSpecifier>,
) -> Result<(), AnyError> {
let cwd = std::env::current_dir().context("Failed to get CWD")?;
let output_dir_resolved = cwd.join(&html_options.output);
@ -415,7 +425,7 @@ fn generate_docs_directory(
let options = deno_doc::html::GenerateOptions {
package_name: html_options.name.clone(),
main_entrypoint: None,
main_entrypoint,
rewrite_map,
href_resolver: Rc::new(DocResolver {
deno_ns,
@ -427,33 +437,7 @@ fn generate_docs_directory(
symbol_redirect_map,
default_symbol_map,
markdown_renderer: deno_doc::html::comrak::create_renderer(
None,
Some(Box::new(|ammonia| {
ammonia.add_allowed_classes(
"code",
&[
"language-ts",
"language-tsx",
"language-typescript",
"language-js",
"language-jsx",
"language-javascript",
"language-bash",
"language-shell",
"language-md",
"language-markdown",
"language-rs",
"language-rust",
"language-html",
"language-xml",
"language-css",
"language-json",
"language-regex",
"language-svg",
],
);
})),
None,
None, None, None,
),
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
head_inject: Some(Rc::new(|root| {

View file

@ -440,8 +440,10 @@ pub fn format_html(
)
}
_ => {
let mut typescript_config =
get_resolved_typescript_config(fmt_options);
let mut typescript_config_builder =
get_typescript_config_builder(fmt_options);
typescript_config_builder.file_indent_level(hints.indent_level);
let mut typescript_config = typescript_config_builder.build();
typescript_config.line_width = hints.print_width as u32;
dprint_plugin_typescript::format_text(
&path,
@ -919,9 +921,9 @@ fn files_str(len: usize) -> &'static str {
}
}
fn get_resolved_typescript_config(
fn get_typescript_config_builder(
options: &FmtOptionsConfig,
) -> dprint_plugin_typescript::configuration::Configuration {
) -> dprint_plugin_typescript::configuration::ConfigurationBuilder {
let mut builder =
dprint_plugin_typescript::configuration::ConfigurationBuilder::new();
builder.deno();
@ -953,7 +955,13 @@ fn get_resolved_typescript_config(
});
}
builder.build()
builder
}
fn get_resolved_typescript_config(
options: &FmtOptionsConfig,
) -> dprint_plugin_typescript::configuration::Configuration {
get_typescript_config_builder(options).build()
}
fn get_resolved_markdown_config(
@ -1075,6 +1083,7 @@ fn get_resolved_markup_fmt_config(
};
let language_options = LanguageOptions {
script_formatter: Some(markup_fmt::config::ScriptFormatter::Dprint),
quotes: Quotes::Double,
format_comments: false,
script_indent: true,

View file

@ -2,7 +2,6 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt;
use std::fmt::Write;
use std::sync::Arc;
@ -35,6 +34,7 @@ use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliNpmResolver;
use crate::npm::ManagedCliNpmResolver;
use crate::util::checksum;
use crate::util::display::DisplayTreeNode;
const JSON_SCHEMA_VERSION: u8 = 1;
@ -49,19 +49,67 @@ pub async fn info(
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = cli_options.maybe_lockfile();
let resolver = factory.workspace_resolver().await?.clone();
let npmrc = cli_options.npmrc();
let resolver = factory.workspace_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let cwd_url =
url::Url::from_directory_path(cli_options.initial_cwd()).unwrap();
let maybe_import_specifier = if let Some(import_map) =
resolver.maybe_import_map()
let maybe_import_specifier = if let Ok(resolved) =
resolver.resolve(&specifier, &cwd_url)
{
if let Ok(imports_specifier) = import_map.resolve(&specifier, &cwd_url) {
Some(imports_specifier)
} else {
None
match resolved {
deno_config::workspace::MappedResolution::Normal {
specifier, ..
}
| deno_config::workspace::MappedResolution::ImportMap {
specifier,
..
}
| deno_config::workspace::MappedResolution::WorkspaceJsrPackage {
specifier,
..
} => Some(specifier),
deno_config::workspace::MappedResolution::WorkspaceNpmPackage {
target_pkg_json,
sub_path,
..
} => Some(node_resolver.resolve_package_subpath_from_deno_module(
target_pkg_json.clone().dir_path(),
sub_path.as_deref(),
Some(&cwd_url),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)?),
deno_config::workspace::MappedResolution::PackageJson {
alias,
sub_path,
dep_result,
..
} => match dep_result.as_ref().map_err(|e| e.clone())? {
deno_package_json::PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = resolver
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)?;
Some(node_resolver.resolve_package_subpath_from_deno_module(
pkg_folder,
sub_path.as_deref(),
Some(&cwd_url),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)?)
}
deno_package_json::PackageJsonDepValue::Req(req) => {
Some(ModuleSpecifier::parse(&format!(
"npm:{}{}",
req,
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
))?)
}
},
}
} else {
None
@ -75,7 +123,12 @@ pub async fn info(
let mut loader = module_graph_builder.create_graph_loader();
loader.enable_loading_cache_info(); // for displaying the cache information
let graph = module_graph_creator
.create_graph_with_loader(GraphKind::All, vec![specifier], &mut loader)
.create_graph_with_loader(
GraphKind::All,
vec![specifier],
&mut loader,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?;
// write out the lockfile if there is one
@ -225,8 +278,10 @@ fn add_npm_packages_to_json(
});
if let Some(pkg) = maybe_package {
if let Some(module) = module.as_object_mut() {
module
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
module.insert(
"npmPackage".to_string(),
pkg.id.as_serialized().into_string().into(),
);
}
}
}
@ -235,21 +290,30 @@ fn add_npm_packages_to_json(
.get_mut("dependencies")
.and_then(|d| d.as_array_mut());
if let Some(dependencies) = dependencies {
for dep in dependencies.iter_mut() {
if let serde_json::Value::Object(dep) = dep {
let specifier = dep.get("specifier").and_then(|s| s.as_str());
if let Some(specifier) = specifier {
for dep in dependencies.iter_mut().flat_map(|d| d.as_object_mut()) {
if let Some(specifier) = dep.get("specifier").and_then(|s| s.as_str())
{
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
if let Ok(pkg) =
snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
if let Ok(pkg) = snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
{
dep.insert(
"npmPackage".to_string(),
pkg.id.as_serialized().into(),
pkg.id.as_serialized().into_string().into(),
);
}
}
}
// don't show this in the output unless someone needs it
if let Some(code) =
dep.get_mut("code").and_then(|c| c.as_object_mut())
{
code.remove("resolutionMode");
}
if let Some(types) =
dep.get_mut("types").and_then(|c| c.as_object_mut())
{
types.remove("resolutionMode");
}
}
}
@ -262,94 +326,24 @@ fn add_npm_packages_to_json(
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
kv.insert("name".to_string(), pkg.id.nv.name.clone().into());
kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
let deps = deps
.into_iter()
.map(|id| serde_json::Value::String(id.as_serialized()))
.map(|id| serde_json::Value::String(id.as_serialized().into_string()))
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
kv.insert("registryUrl".to_string(), registry_url.to_string().into());
json_packages.insert(pkg.id.as_serialized(), kv.into());
json_packages.insert(pkg.id.as_serialized().into_string(), kv.into());
}
json.insert("npmPackages".to_string(), json_packages.into());
}
struct TreeNode {
text: String,
children: Vec<TreeNode>,
}
impl TreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
}
fn print_tree_node<TWrite: Write>(
tree_node: &TreeNode,
writer: &mut TWrite,
) -> fmt::Result {
fn print_children<TWrite: Write>(
writer: &mut TWrite,
prefix: &str,
children: &[TreeNode],
) -> fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!("{prefix}{sibling_connector}{child_connector}")),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", tree_node.text)?;
print_children(writer, "", &tree_node.children)?;
Ok(())
}
/// Precached information about npm packages that are used in deno info.
#[derive(Default)]
struct NpmInfo {
@ -506,7 +500,7 @@ impl<'a> GraphDisplayContext<'a> {
)?;
writeln!(writer)?;
let root_node = self.build_module_info(root, false);
print_tree_node(&root_node, writer)?;
root_node.print(writer)?;
Ok(())
}
Err(err) => {
@ -522,7 +516,7 @@ impl<'a> GraphDisplayContext<'a> {
}
}
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<TreeNode> {
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<DisplayTreeNode> {
let mut children = Vec::with_capacity(2);
if !dep.maybe_code.is_none() {
if let Some(child) = self.build_resolved_info(&dep.maybe_code, false) {
@ -537,7 +531,11 @@ impl<'a> GraphDisplayContext<'a> {
children
}
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode {
fn build_module_info(
&mut self,
module: &Module,
type_dep: bool,
) -> DisplayTreeNode {
enum PackageOrSpecifier {
Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier),
@ -553,7 +551,7 @@ impl<'a> GraphDisplayContext<'a> {
None => Specifier(module.specifier().clone()),
};
let was_seen = !self.seen.insert(match &package_or_specifier {
Package(package) => package.id.as_serialized(),
Package(package) => package.id.as_serialized().into_string(),
Specifier(specifier) => specifier.to_string(),
});
let header_text = if was_seen {
@ -583,7 +581,7 @@ impl<'a> GraphDisplayContext<'a> {
format!("{} {}", header_text, maybe_size_to_text(maybe_size))
};
let mut tree_node = TreeNode::from_text(header_text);
let mut tree_node = DisplayTreeNode::from_text(header_text);
if !was_seen {
match &package_or_specifier {
@ -621,21 +619,22 @@ impl<'a> GraphDisplayContext<'a> {
fn build_npm_deps(
&mut self,
package: &NpmResolutionPackage,
) -> Vec<TreeNode> {
) -> Vec<DisplayTreeNode> {
let mut deps = package.dependencies.values().collect::<Vec<_>>();
deps.sort();
let mut children = Vec::with_capacity(deps.len());
for dep_id in deps.into_iter() {
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
let size_str = maybe_size_to_text(maybe_size);
let mut child = TreeNode::from_text(format!(
let mut child = DisplayTreeNode::from_text(format!(
"npm:/{} {}",
dep_id.as_serialized(),
size_str
));
if let Some(package) = self.npm_info.packages.get(dep_id) {
if !package.dependencies.is_empty() {
let was_seen = !self.seen.insert(package.id.as_serialized());
let was_seen =
!self.seen.insert(package.id.as_serialized().into_string());
if was_seen {
child.text = format!("{} {}", child.text, colors::gray("*"));
} else {
@ -653,7 +652,7 @@ impl<'a> GraphDisplayContext<'a> {
&mut self,
err: &ModuleError,
specifier: &ModuleSpecifier,
) -> TreeNode {
) -> DisplayTreeNode {
self.seen.insert(specifier.to_string());
match err {
ModuleError::InvalidTypeAssertion { .. } => {
@ -696,8 +695,8 @@ impl<'a> GraphDisplayContext<'a> {
&self,
specifier: &ModuleSpecifier,
error_msg: &str,
) -> TreeNode {
TreeNode::from_text(format!(
) -> DisplayTreeNode {
DisplayTreeNode::from_text(format!(
"{} {}",
colors::red(specifier),
colors::red_bold(error_msg)
@ -708,7 +707,7 @@ impl<'a> GraphDisplayContext<'a> {
&mut self,
resolution: &Resolution,
type_dep: bool,
) -> Option<TreeNode> {
) -> Option<DisplayTreeNode> {
match resolution {
Resolution::Ok(resolved) => {
let specifier = &resolved.specifier;
@ -716,14 +715,14 @@ impl<'a> GraphDisplayContext<'a> {
Some(match self.graph.try_get(resolved_specifier) {
Ok(Some(module)) => self.build_module_info(module, type_dep),
Err(err) => self.build_error_info(err, resolved_specifier),
Ok(None) => TreeNode::from_text(format!(
Ok(None) => DisplayTreeNode::from_text(format!(
"{} {}",
colors::red(specifier),
colors::red_bold("(missing)")
)),
})
}
Resolution::Err(err) => Some(TreeNode::from_text(format!(
Resolution::Err(err) => Some(DisplayTreeNode::from_text(format!(
"{} {}",
colors::italic(err.to_string()),
colors::red_bold("(resolve error)")

View file

@ -9,6 +9,7 @@ use crate::args::RunFlags;
use crate::colors;
use color_print::cformat;
use color_print::cstr;
use deno_config::deno_json::NodeModulesDirMode;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json::json;
@ -251,8 +252,46 @@ Deno.test(function addTest() {
Ok(0)
}
fn npm_name_to_create_package(name: &str) -> String {
let mut s = "npm:".to_string();
let mut scoped = false;
let mut create = false;
for (i, ch) in name.char_indices() {
if i == 0 {
if ch == '@' {
scoped = true;
} else {
create = true;
s.push_str("create-");
}
} else if scoped {
if ch == '/' {
scoped = false;
create = true;
s.push_str("/create-");
continue;
} else if ch == '@' && !create {
scoped = false;
create = true;
s.push_str("/create@");
continue;
}
}
s.push(ch);
}
if !create {
s.push_str("/create");
}
s
}
async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
let script_name = format!("npm:create-{}", name);
let script_name = npm_name_to_create_package(name);
fn print_manual_usage(script_name: &str, args: &[String]) -> i32 {
log::info!("{}", cformat!("You can initialize project manually by running <u>deno run {} {}</> and applying desired permissions.", script_name, args.join(" ")));
@ -288,6 +327,7 @@ async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
},
allow_scripts: PackagesAllowedScripts::All,
argv: args,
node_modules_dir: Some(NodeModulesDirMode::Auto),
subcommand: DenoSubcommand::Run(RunFlags {
script: script_name,
..Default::default()
@ -334,3 +374,37 @@ fn create_file(
Ok(())
}
}
#[cfg(test)]
mod test {
use crate::tools::init::npm_name_to_create_package;
#[test]
fn npm_name_to_create_package_test() {
// See https://docs.npmjs.com/cli/v8/commands/npm-init#description
assert_eq!(
npm_name_to_create_package("foo"),
"npm:create-foo".to_string()
);
assert_eq!(
npm_name_to_create_package("foo@1.0.0"),
"npm:create-foo@1.0.0".to_string()
);
assert_eq!(
npm_name_to_create_package("@foo"),
"npm:@foo/create".to_string()
);
assert_eq!(
npm_name_to_create_package("@foo@1.0.0"),
"npm:@foo/create@1.0.0".to_string()
);
assert_eq!(
npm_name_to_create_package("@foo/bar"),
"npm:@foo/create-bar".to_string()
);
assert_eq!(
npm_name_to_create_package("@foo/bar@1.0.0"),
"npm:@foo/create-bar@1.0.0".to_string()
);
}
}

View file

@ -3,24 +3,23 @@
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
use crate::args::CacheSetting;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
use crate::args::InstallFlagsGlobal;
use crate::args::InstallFlagsLocal;
use crate::args::InstallKind;
use crate::args::TypeCheckMode;
use crate::args::UninstallFlags;
use crate::args::UninstallKind;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::ModuleGraphContainer;
use crate::http_util::HttpClientProvider;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
@ -162,11 +161,11 @@ pub async fn infer_name_from_url(
let npm_ref = npm_ref.into_inner();
if let Some(sub_path) = npm_ref.sub_path {
if !sub_path.contains('/') {
return Some(sub_path);
return Some(sub_path.to_string());
}
}
if !npm_ref.req.name.contains('/') {
return Some(npm_ref.req.name);
return Some(npm_ref.req.name.into_string());
}
return None;
}
@ -344,11 +343,11 @@ pub async fn install_command(
flags: Arc<Flags>,
install_flags: InstallFlags,
) -> Result<(), AnyError> {
match install_flags.kind {
InstallKind::Global(global_flags) => {
match install_flags {
InstallFlags::Global(global_flags) => {
install_global(flags, global_flags).await
}
InstallKind::Local(local_flags) => {
InstallFlags::Local(local_flags) => {
if let InstallFlagsLocal::Add(add_flags) = &local_flags {
check_if_installs_a_single_package_globally(Some(add_flags))?;
}
@ -367,18 +366,18 @@ async fn install_global(
let cli_options = factory.cli_options()?;
let http_client = factory.http_client_provider();
let deps_http_cache = factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
let deps_file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::ReloadAll,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::ReloadAll,
log::Level::Trace,
);
let npmrc = factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver = Arc::new(NpmFetchResolver::new(

View file

@ -0,0 +1,518 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt::Display;
use deno_ast::swc::common::Span;
use deno_ast::swc::common::DUMMY_SP;
use indexmap::IndexMap;
/// Each property has this flag to mark what kind of value it holds-
/// Plain objects and arrays are not supported yet, but could be easily
/// added if needed.
#[derive(Debug, PartialEq)]
pub enum PropFlags {
Ref,
RefArr,
String,
Bool,
Null,
Undefined,
}
impl From<PropFlags> for u8 {
fn from(m: PropFlags) -> u8 {
m as u8
}
}
impl TryFrom<u8> for PropFlags {
type Error = &'static str;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(PropFlags::Ref),
1 => Ok(PropFlags::RefArr),
2 => Ok(PropFlags::String),
3 => Ok(PropFlags::Bool),
4 => Ok(PropFlags::Null),
5 => Ok(PropFlags::Undefined),
_ => Err("Unknown Prop flag"),
}
}
}
const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000;
const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000;
const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000;
const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111;
// TODO: There is probably a native Rust function to do this.
pub fn append_u32(result: &mut Vec<u8>, value: u32) {
let v1: u8 = ((value & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((value & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((value & MASK_U32_3) >> 8) as u8;
let v4: u8 = (value & MASK_U32_4) as u8;
result.push(v1);
result.push(v2);
result.push(v3);
result.push(v4);
}
pub fn append_usize(result: &mut Vec<u8>, value: usize) {
let raw = u32::try_from(value).unwrap();
append_u32(result, raw);
}
pub fn write_usize(result: &mut [u8], value: usize, idx: usize) {
let raw = u32::try_from(value).unwrap();
let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8;
let v4: u8 = (raw & MASK_U32_4) as u8;
result[idx] = v1;
result[idx + 1] = v2;
result[idx + 2] = v3;
result[idx + 3] = v4;
}
#[derive(Debug)]
pub struct StringTable {
id: usize,
table: IndexMap<String, usize>,
}
impl StringTable {
pub fn new() -> Self {
Self {
id: 0,
table: IndexMap::new(),
}
}
pub fn insert(&mut self, s: &str) -> usize {
if let Some(id) = self.table.get(s) {
return *id;
}
let id = self.id;
self.id += 1;
self.table.insert(s.to_string(), id);
id
}
pub fn serialize(&mut self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
append_u32(&mut result, self.table.len() as u32);
// Assume that it's sorted by id
for (s, _id) in &self.table {
let bytes = s.as_bytes();
append_u32(&mut result, bytes.len() as u32);
result.append(&mut bytes.to_vec());
}
result
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct NodeRef(pub usize);
#[derive(Debug)]
pub struct BoolPos(pub usize);
#[derive(Debug)]
pub struct FieldPos(pub usize);
#[derive(Debug)]
pub struct FieldArrPos(pub usize);
#[derive(Debug)]
pub struct StrPos(pub usize);
#[derive(Debug)]
pub struct UndefPos(pub usize);
#[derive(Debug)]
pub struct NullPos(pub usize);
#[derive(Debug)]
pub enum NodePos {
Bool(BoolPos),
#[allow(dead_code)]
Field(FieldPos),
#[allow(dead_code)]
FieldArr(FieldArrPos),
Str(StrPos),
Undef(UndefPos),
#[allow(dead_code)]
Null(NullPos),
}
pub trait AstBufSerializer<K, P>
where
K: Into<u8> + Display,
P: Into<u8> + Display,
{
fn header(
&mut self,
kind: K,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef;
fn ref_field(&mut self, prop: P) -> FieldPos;
fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos;
fn str_field(&mut self, prop: P) -> StrPos;
fn bool_field(&mut self, prop: P) -> BoolPos;
fn undefined_field(&mut self, prop: P) -> UndefPos;
#[allow(dead_code)]
fn null_field(&mut self, prop: P) -> NullPos;
fn write_ref(&mut self, pos: FieldPos, value: NodeRef);
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>);
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>);
fn write_str(&mut self, pos: StrPos, value: &str);
fn write_bool(&mut self, pos: BoolPos, value: bool);
fn serialize(&mut self) -> Vec<u8>;
}
#[derive(Debug)]
pub struct SerializeCtx {
buf: Vec<u8>,
start_buf: NodeRef,
str_table: StringTable,
kind_map: Vec<usize>,
prop_map: Vec<usize>,
}
/// This is the internal context used to allocate and fill the buffer. The point
/// is to be able to write absolute offsets directly in place.
///
/// The typical workflow is to reserve all necessary space for the currrent
/// node with placeholders for the offsets of the child nodes. Once child
/// nodes have been traversed, we know their offsets and can replace the
/// placeholder values with the actual ones.
impl SerializeCtx {
pub fn new(kind_len: u8, prop_len: u8) -> Self {
let kind_size = kind_len as usize;
let prop_size = prop_len as usize;
let mut ctx = Self {
start_buf: NodeRef(0),
buf: vec![],
str_table: StringTable::new(),
kind_map: vec![0; kind_size + 1],
prop_map: vec![0; prop_size + 1],
};
ctx.str_table.insert("");
// Placeholder node is always 0
ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0);
ctx.kind_map[0] = 0;
ctx.start_buf = NodeRef(ctx.buf.len());
// Insert default props that are always present
let type_str = ctx.str_table.insert("type");
let parent_str = ctx.str_table.insert("parent");
let range_str = ctx.str_table.insert("range");
let length_str = ctx.str_table.insert("length");
// These values are expected to be in this order on the JS side
ctx.prop_map[0] = type_str;
ctx.prop_map[1] = parent_str;
ctx.prop_map[2] = range_str;
ctx.prop_map[3] = length_str;
ctx
}
/// Allocate a node's header
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.buf.len();
let n: u8 = prop.clone().into();
self.buf.push(n);
if let Some(v) = self.prop_map.get::<usize>(n.into()) {
if *v == 0 {
let id = self.str_table.insert(&format!("{prop}"));
self.prop_map[n as usize] = id;
}
}
let flags: u8 = prop_flags.into();
self.buf.push(flags);
offset
}
/// Allocate a property pointing to another node.
fn field<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field_header(prop, prop_flags);
append_usize(&mut self.buf, 0);
offset
}
fn append_node(
&mut self,
kind: u8,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef {
let offset = self.buf.len();
// Node type fits in a u8
self.buf.push(kind);
// Offset to the parent node. Will be 0 if none exists
append_usize(&mut self.buf, parent.0);
// Span, the start and end location of this node
append_u32(&mut self.buf, span.lo.0);
append_u32(&mut self.buf, span.hi.0);
// No node has more than <10 properties
debug_assert!(prop_count < 10);
self.buf.push(prop_count as u8);
NodeRef(offset)
}
/// Allocate the node header. It's always the same for every node.
/// <type u8>
/// <parent offset u32>
/// <span lo u32>
/// <span high u32>
/// <property count u8> (There is no node with more than 10 properties)
pub fn header<N>(
&mut self,
kind: N,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef
where
N: Into<u8> + Display + Clone,
{
let n: u8 = kind.clone().into();
if let Some(v) = self.kind_map.get::<usize>(n.into()) {
if *v == 0 {
let id = self.str_table.insert(&format!("{kind}"));
self.kind_map[n as usize] = id;
}
}
self.append_node(n, parent, span, prop_count)
}
/// Allocate a reference property that will hold the offset of
/// another node.
pub fn ref_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field(prop, PropFlags::Ref)
}
/// Allocate a property that is a vec of node offsets pointing to other
/// nodes.
pub fn ref_vec_field<P>(&mut self, prop: P, len: usize) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field(prop, PropFlags::RefArr);
for _ in 0..len {
append_u32(&mut self.buf, 0);
}
offset
}
// Allocate a property representing a string. Strings are deduplicated
// in the message and the property will only contain the string id.
pub fn str_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field(prop, PropFlags::String)
}
/// Allocate a bool field
pub fn bool_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field_header(prop, PropFlags::Bool);
self.buf.push(0);
offset
}
/// Allocate an undefined field
pub fn undefined_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Undefined)
}
/// Allocate an undefined field
#[allow(dead_code)]
pub fn null_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Null)
}
/// Replace the placeholder of a reference field with the actual offset
/// to the node we want to point to.
pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
write_usize(&mut self.buf, value.0, field_offset + 2);
}
/// Helper for writing optional node offsets
pub fn write_maybe_ref(
&mut self,
field_offset: usize,
value: Option<NodeRef>,
) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
let ref_value = if let Some(v) = value { v } else { NodeRef(0) };
write_usize(&mut self.buf, ref_value.0, field_offset + 2);
}
/// Write a vec of node offsets into the property. The necessary space
/// has been reserved earlier.
pub fn write_refs(&mut self, field_offset: usize, value: Vec<NodeRef>) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr {
panic!("Trying to write a ref into a non-ref array field")
}
}
let mut offset = field_offset + 2;
write_usize(&mut self.buf, value.len(), offset);
offset += 4;
for item in value {
write_usize(&mut self.buf, item.0, offset);
offset += 4;
}
}
/// Store the string in our string table and save the id of the string
/// in the current field.
pub fn write_str(&mut self, field_offset: usize, value: &str) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::String {
panic!("Trying to write a ref into a non-string field")
}
}
let id = self.str_table.insert(value);
write_usize(&mut self.buf, id, field_offset + 2);
}
/// Write a bool to a field.
pub fn write_bool(&mut self, field_offset: usize, value: bool) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool {
panic!("Trying to write a ref into a non-bool field")
}
}
self.buf[field_offset + 2] = if value { 1 } else { 0 };
}
/// Serialize all information we have into a buffer that can be sent to JS.
/// It has the following structure:
///
/// <...ast>
/// <string table>
/// <node kind map> <- node kind id maps to string id
/// <node prop map> <- node property id maps to string id
/// <offset kind map>
/// <offset prop map>
/// <offset str table>
pub fn serialize(&mut self) -> Vec<u8> {
let mut buf: Vec<u8> = vec![];
// The buffer starts with the serialized AST first, because that
// contains absolute offsets. By butting this at the start of the
// message we don't have to waste time updating any offsets.
buf.append(&mut self.buf);
// Next follows the string table. We'll keep track of the offset
// in the message of where the string table begins
let offset_str_table = buf.len();
// Serialize string table
buf.append(&mut self.str_table.serialize());
// Next, serialize the mappings of kind -> string of encountered
// nodes in the AST. We use this additional lookup table to compress
// the message so that we can save space by using a u8 . All nodes of
// JS, TS and JSX together are <200
let offset_kind_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
// TODO: make this a u8
append_usize(&mut buf, self.kind_map.len());
for v in &self.kind_map {
append_usize(&mut buf, *v);
}
// Store offset to prop -> string map. It's the same as with node kind
// as the total number of properties is <120 which allows us to store it
// as u8.
let offset_prop_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
append_usize(&mut buf, self.prop_map.len());
for v in &self.prop_map {
append_usize(&mut buf, *v);
}
// Putting offsets of relevant parts of the buffer at the end. This
// allows us to hop to the relevant part by merely looking at the last
// for values in the message. Each value represents an offset into the
// buffer.
append_usize(&mut buf, offset_kind_map);
append_usize(&mut buf, offset_prop_map);
append_usize(&mut buf, offset_str_table);
append_usize(&mut buf, self.start_buf.0);
buf
}
}

View file

@ -0,0 +1,13 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::ParsedSource;
use swc::serialize_swc_to_buffer;
mod buffer;
mod swc;
mod ts_estree;
pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
// TODO: We could support multiple languages here
serialize_swc_to_buffer(parsed_source)
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,515 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use deno_ast::swc::common::Span;
use super::buffer::AstBufSerializer;
use super::buffer::BoolPos;
use super::buffer::FieldArrPos;
use super::buffer::FieldPos;
use super::buffer::NodeRef;
use super::buffer::NullPos;
use super::buffer::SerializeCtx;
use super::buffer::StrPos;
use super::buffer::UndefPos;
#[derive(Debug, Clone, PartialEq)]
pub enum AstNode {
// First node must always be the empty/invalid node
Invalid,
// Typically the
Program,
// Module declarations
ExportAllDeclaration,
ExportDefaultDeclaration,
ExportNamedDeclaration,
ImportDeclaration,
TsExportAssignment,
TsImportEquals,
TsNamespaceExport,
// Decls
ClassDeclaration,
FunctionDeclaration,
TSEnumDeclaration,
TSInterface,
TsModule,
TsTypeAlias,
Using,
VariableDeclaration,
// Statements
BlockStatement,
BreakStatement,
ContinueStatement,
DebuggerStatement,
DoWhileStatement,
EmptyStatement,
ExpressionStatement,
ForInStatement,
ForOfStatement,
ForStatement,
IfStatement,
LabeledStatement,
ReturnStatement,
SwitchCase,
SwitchStatement,
ThrowStatement,
TryStatement,
WhileStatement,
WithStatement,
// Expressions
ArrayExpression,
ArrowFunctionExpression,
AssignmentExpression,
AwaitExpression,
BinaryExpression,
CallExpression,
ChainExpression,
ClassExpression,
ConditionalExpression,
FunctionExpression,
Identifier,
ImportExpression,
LogicalExpression,
MemberExpression,
MetaProp,
NewExpression,
ObjectExpression,
PrivateIdentifier,
SequenceExpression,
Super,
TaggedTemplateExpression,
TemplateLiteral,
ThisExpression,
TSAsExpression,
TsConstAssertion,
TsInstantiation,
TSNonNullExpression,
TSSatisfiesExpression,
TSTypeAssertion,
UnaryExpression,
UpdateExpression,
YieldExpression,
// TODO: TSEsTree uses a single literal node
// Literals
StringLiteral,
Bool,
Null,
NumericLiteral,
BigIntLiteral,
RegExpLiteral,
EmptyExpr,
SpreadElement,
Property,
VariableDeclarator,
CatchClause,
RestElement,
ExportSpecifier,
TemplateElement,
MethodDefinition,
ClassBody,
// Patterns
ArrayPattern,
AssignmentPattern,
ObjectPattern,
// JSX
JSXAttribute,
JSXClosingElement,
JSXClosingFragment,
JSXElement,
JSXEmptyExpression,
JSXExpressionContainer,
JSXFragment,
JSXIdentifier,
JSXMemberExpression,
JSXNamespacedName,
JSXOpeningElement,
JSXOpeningFragment,
JSXSpreadAttribute,
JSXSpreadChild,
JSXText,
TSTypeAnnotation,
TSTypeParameterDeclaration,
TSTypeParameter,
TSTypeParameterInstantiation,
TSEnumMember,
TSInterfaceBody,
TSInterfaceHeritage,
TSTypeReference,
TSThisType,
TSLiteralType,
TSInferType,
TSConditionalType,
TSUnionType,
TSIntersectionType,
TSMappedType,
TSTypeQuery,
TSTupleType,
TSNamedTupleMember,
TSFunctionType,
TsCallSignatureDeclaration,
TSPropertySignature,
TSMethodSignature,
TSIndexSignature,
TSIndexedAccessType,
TSTypeOperator,
TSTypePredicate,
TSImportType,
TSRestType,
TSArrayType,
TSClassImplements,
TSAnyKeyword,
TSBigIntKeyword,
TSBooleanKeyword,
TSIntrinsicKeyword,
TSNeverKeyword,
TSNullKeyword,
TSNumberKeyword,
TSObjectKeyword,
TSStringKeyword,
TSSymbolKeyword,
TSUndefinedKeyword,
TSUnknownKeyword,
TSVoidKeyword,
TSEnumBody, // Last value is used for max value
}
impl Display for AstNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(self, f)
}
}
impl From<AstNode> for u8 {
fn from(m: AstNode) -> u8 {
m as u8
}
}
#[derive(Debug, Clone)]
pub enum AstProp {
// Base, these three must be in sync with JS. The
// order here for these 3 fields is important.
Type,
Parent,
Range,
Length, // Not used in AST, but can be used in attr selectors
// Starting from here the order doesn't matter.
// Following are all possible AST node properties.
Abstract,
Accessibility,
Alternate,
Argument,
Arguments,
Asserts,
Async,
Attributes,
Await,
Block,
Body,
Callee,
Cases,
Children,
CheckType,
ClosingElement,
ClosingFragment,
Computed,
Consequent,
Const,
Constraint,
Cooked,
Declaration,
Declarations,
Declare,
Default,
Definite,
Delegate,
Discriminant,
Elements,
ElementType,
ElementTypes,
ExprName,
Expression,
Expressions,
Exported,
Extends,
ExtendsType,
FalseType,
Finalizer,
Flags,
Generator,
Handler,
Id,
In,
IndexType,
Init,
Initializer,
Implements,
Key,
Kind,
Label,
Left,
Literal,
Local,
Members,
Meta,
Method,
Name,
Namespace,
NameType,
Object,
ObjectType,
OpeningElement,
OpeningFragment,
Operator,
Optional,
Out,
Param,
ParameterName,
Params,
Pattern,
Prefix,
Properties,
Property,
Qualifier,
Quasi,
Quasis,
Raw,
Readonly,
ReturnType,
Right,
SelfClosing,
Shorthand,
Source,
SourceType,
Specifiers,
Static,
SuperClass,
SuperTypeArguments,
Tag,
Tail,
Test,
TrueType,
TypeAnnotation,
TypeArguments,
TypeName,
TypeParameter,
TypeParameters,
Types,
Update,
Value, // Last value is used for max value
}
// TODO: Feels like there should be an easier way to iterater over an
// enum in Rust and lowercase the first letter.
impl Display for AstProp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match self {
AstProp::Parent => "parent",
AstProp::Range => "range",
AstProp::Type => "type",
AstProp::Length => "length",
AstProp::Abstract => "abstract",
AstProp::Accessibility => "accessibility",
AstProp::Alternate => "alternate",
AstProp::Argument => "argument",
AstProp::Arguments => "arguments",
AstProp::Asserts => "asserts",
AstProp::Async => "async",
AstProp::Attributes => "attributes",
AstProp::Await => "await",
AstProp::Block => "block",
AstProp::Body => "body",
AstProp::Callee => "callee",
AstProp::Cases => "cases",
AstProp::Children => "children",
AstProp::CheckType => "checkType",
AstProp::ClosingElement => "closingElement",
AstProp::ClosingFragment => "closingFragment",
AstProp::Computed => "computed",
AstProp::Consequent => "consequent",
AstProp::Const => "const",
AstProp::Constraint => "constraint",
AstProp::Cooked => "cooked",
AstProp::Declaration => "declaration",
AstProp::Declarations => "declarations",
AstProp::Declare => "declare",
AstProp::Default => "default",
AstProp::Definite => "definite",
AstProp::Delegate => "delegate",
AstProp::Discriminant => "discriminant",
AstProp::Elements => "elements",
AstProp::ElementType => "elementType",
AstProp::ElementTypes => "elementTypes",
AstProp::ExprName => "exprName",
AstProp::Expression => "expression",
AstProp::Expressions => "expressions",
AstProp::Exported => "exported",
AstProp::Extends => "extends",
AstProp::ExtendsType => "extendsType",
AstProp::FalseType => "falseType",
AstProp::Finalizer => "finalizer",
AstProp::Flags => "flags",
AstProp::Generator => "generator",
AstProp::Handler => "handler",
AstProp::Id => "id",
AstProp::In => "in",
AstProp::IndexType => "indexType",
AstProp::Init => "init",
AstProp::Initializer => "initializer",
AstProp::Implements => "implements",
AstProp::Key => "key",
AstProp::Kind => "kind",
AstProp::Label => "label",
AstProp::Left => "left",
AstProp::Literal => "literal",
AstProp::Local => "local",
AstProp::Members => "members",
AstProp::Meta => "meta",
AstProp::Method => "method",
AstProp::Name => "name",
AstProp::Namespace => "namespace",
AstProp::NameType => "nameType",
AstProp::Object => "object",
AstProp::ObjectType => "objectType",
AstProp::OpeningElement => "openingElement",
AstProp::OpeningFragment => "openingFragment",
AstProp::Operator => "operator",
AstProp::Optional => "optional",
AstProp::Out => "out",
AstProp::Param => "param",
AstProp::ParameterName => "parameterName",
AstProp::Params => "params",
AstProp::Pattern => "pattern",
AstProp::Prefix => "prefix",
AstProp::Properties => "properties",
AstProp::Property => "property",
AstProp::Qualifier => "qualifier",
AstProp::Quasi => "quasi",
AstProp::Quasis => "quasis",
AstProp::Raw => "raw",
AstProp::Readonly => "readonly",
AstProp::ReturnType => "returnType",
AstProp::Right => "right",
AstProp::SelfClosing => "selfClosing",
AstProp::Shorthand => "shorthand",
AstProp::Source => "source",
AstProp::SourceType => "sourceType",
AstProp::Specifiers => "specifiers",
AstProp::Static => "static",
AstProp::SuperClass => "superClass",
AstProp::SuperTypeArguments => "superTypeArguments",
AstProp::Tag => "tag",
AstProp::Tail => "tail",
AstProp::Test => "test",
AstProp::TrueType => "trueType",
AstProp::TypeAnnotation => "typeAnnotation",
AstProp::TypeArguments => "typeArguments",
AstProp::TypeName => "typeName",
AstProp::TypeParameter => "typeParameter",
AstProp::TypeParameters => "typeParameters",
AstProp::Types => "types",
AstProp::Update => "update",
AstProp::Value => "value",
};
write!(f, "{}", s)
}
}
impl From<AstProp> for u8 {
fn from(m: AstProp) -> u8 {
m as u8
}
}
pub struct TsEsTreeBuilder {
ctx: SerializeCtx,
}
// TODO: Add a builder API to make it easier to convert from different source
// ast formats.
impl TsEsTreeBuilder {
pub fn new() -> Self {
// Max values
// TODO: Maybe there is a rust macro to grab the last enum value?
let kind_count: u8 = AstNode::TSEnumBody.into();
let prop_count: u8 = AstProp::Value.into();
Self {
ctx: SerializeCtx::new(kind_count, prop_count),
}
}
}
impl AstBufSerializer<AstNode, AstProp> for TsEsTreeBuilder {
fn header(
&mut self,
kind: AstNode,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef {
self.ctx.header(kind, parent, span, prop_count)
}
fn ref_field(&mut self, prop: AstProp) -> FieldPos {
FieldPos(self.ctx.ref_field(prop))
}
fn ref_vec_field(&mut self, prop: AstProp, len: usize) -> FieldArrPos {
FieldArrPos(self.ctx.ref_vec_field(prop, len))
}
fn str_field(&mut self, prop: AstProp) -> StrPos {
StrPos(self.ctx.str_field(prop))
}
fn bool_field(&mut self, prop: AstProp) -> BoolPos {
BoolPos(self.ctx.bool_field(prop))
}
fn undefined_field(&mut self, prop: AstProp) -> UndefPos {
UndefPos(self.ctx.undefined_field(prop))
}
fn null_field(&mut self, prop: AstProp) -> NullPos {
NullPos(self.ctx.null_field(prop))
}
fn write_ref(&mut self, pos: FieldPos, value: NodeRef) {
self.ctx.write_ref(pos.0, value);
}
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>) {
self.ctx.write_maybe_ref(pos.0, value);
}
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>) {
self.ctx.write_refs(pos.0, value);
}
fn write_str(&mut self, pos: StrPos, value: &str) {
self.ctx.write_str(pos.0, value);
}
fn write_bool(&mut self, pos: BoolPos, value: bool) {
self.ctx.write_bool(pos.0, value);
}
fn serialize(&mut self) -> Vec<u8> {
self.ctx.serialize()
}
}

View file

@ -20,7 +20,7 @@ use deno_core::unsync::future::LocalFutureExt;
use deno_core::unsync::future::SharedLocal;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig;
use deno_lint::linter::LintConfig as DenoLintConfig;
use log::debug;
use reporters::create_reporter;
use reporters::LintReporter;
@ -29,7 +29,6 @@ use std::collections::HashSet;
use std::fs;
use std::io::stdin;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
@ -47,14 +46,18 @@ use crate::graph_util::ModuleGraphCreator;
use crate::tools::fmt::run_parallelized;
use crate::util::display;
use crate::util::file_watcher;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
mod ast_buffer;
mod linter;
mod reporters;
mod rules;
// TODO(bartlomieju): remove once we wire plugins through the CLI linter
pub use ast_buffer::serialize_ast_to_buffer;
pub use linter::CliLinter;
pub use linter::CliLinterOptions;
pub use rules::collect_no_slow_type_diagnostics;
@ -69,27 +72,74 @@ pub async fn lint(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
if let Some(watch_flags) = &lint_flags.watch {
if lint_flags.watch.is_some() {
if lint_flags.is_stdin() {
return Err(generic_error(
"Lint watch on standard input is not supported.",
));
}
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
return lint_with_watch(flags, lint_flags).await;
}
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_rule_provider = factory.lint_rule_provider().await?;
let is_stdin = lint_flags.is_stdin();
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
let workspace_lint_options =
cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
lint_stdin(
cli_options,
lint_rule_provider,
workspace_lint_options,
lint_flags,
deno_lint_config,
)?
} else {
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
lint_rule_provider,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&workspace_lint_options,
);
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
deno_lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish()
};
if !success {
deno_runtime::exit(1);
}
Ok(())
}
async fn lint_with_watch_inner(
flags: Arc<Flags>,
lint_flags: LintFlags,
watcher_communicator: Arc<WatcherCommunicator>,
changed_paths: Option<Vec<PathBuf>>,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_config = cli_options.resolve_deno_lint_config()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in &mut paths_with_options_batches {
_ = watcher_communicator
.watch_paths(paths_with_options.paths.clone());
_ = watcher_communicator.watch_paths(paths_with_options.paths.clone());
let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths {
@ -130,73 +180,29 @@ pub async fn lint(
linter.finish();
Ok(())
})
},
)
.await?;
} else {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let is_stdin = lint_flags.is_stdin();
let deno_lint_config = cli_options.resolve_deno_lint_config()?;
let workspace_lint_options =
cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
let start_dir = &cli_options.start_dir;
let reporter_lock = Arc::new(Mutex::new(create_reporter(
workspace_lint_options.reporter_kind,
)));
let lint_config = start_dir
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
let lint_rules = factory
.lint_rule_provider()
.await?
.resolve_lint_rules_err_empty(
lint_options.rules,
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let r = lint_stdin(&file_path, lint_rules, deno_lint_config);
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
reporter_lock.lock().close(1);
success
} else {
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
factory.lint_rule_provider().await?,
factory.module_graph_creator().await?.clone(),
cli_options.start_dir.clone(),
&workspace_lint_options,
);
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
deno_lint_config.clone(),
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish()
};
if !success {
deno_runtime::exit(1);
}
}
Ok(())
async fn lint_with_watch(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
let watch_flags = lint_flags.watch.as_ref().unwrap();
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(lint_with_watch_inner(
flags,
lint_flags,
watcher_communicator,
changed_paths,
))
},
)
.await
}
struct PathsWithOptions {
@ -269,7 +275,7 @@ impl WorkspaceLinter {
&mut self,
cli_options: &Arc<CliOptions>,
lint_options: LintOptions,
lint_config: LintConfig,
lint_config: DenoLintConfig,
member_dir: WorkspaceDirectory,
paths: Vec<PathBuf>,
) -> Result<(), AnyError> {
@ -294,73 +300,25 @@ impl WorkspaceLinter {
deno_lint_config: lint_config,
}));
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let mut futures = Vec::with_capacity(2);
if linter.has_package_rules() {
if self.workspace_module_graph.is_none() {
let module_graph_creator = self.module_graph_creator.clone();
let packages = self.workspace_dir.jsr_packages_for_publish();
self.workspace_module_graph = Some(
async move {
module_graph_creator
.create_and_validate_publish_graph(&packages, true)
.await
.map(Rc::new)
.map_err(Rc::new)
}
.boxed_local()
.shared_local(),
);
}
let workspace_module_graph_future =
self.workspace_module_graph.as_ref().unwrap().clone();
let publish_config = member_dir.maybe_package_config();
if let Some(publish_config) = publish_config {
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let linter = linter.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
futures.push(
async move {
let graph = workspace_module_graph_future
.await
.map_err(|err| anyhow!("{:#}", err))?;
let export_urls =
publish_config.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
return Ok(()); // entrypoint is not specified, so skip
}
let diagnostics = linter.lint_package(&graph, &export_urls);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter.visit_diagnostic(diagnostic);
}
}
Ok(())
}
.boxed_local(),
);
if let Some(fut) = self.run_package_rules(&linter, &member_dir, &paths) {
futures.push(fut);
}
}
futures.push({
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let maybe_incremental_cache = maybe_incremental_cache.clone();
let maybe_incremental_cache_ = maybe_incremental_cache.clone();
let linter = linter.clone();
let cli_options = cli_options.clone();
async move {
run_parallelized(paths, {
move |file_path| {
let file_text =
deno_ast::strip_bom(fs::read_to_string(&file_path)?);
let fut = async move {
let operation = move |file_path: PathBuf| {
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
// don't bother rechecking this file if it didn't have any diagnostics before
if let Some(incremental_cache) = &maybe_incremental_cache {
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(());
}
@ -372,7 +330,7 @@ impl WorkspaceLinter {
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r {
if let Some(incremental_cache) = &maybe_incremental_cache {
if let Some(incremental_cache) = &maybe_incremental_cache_ {
if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(
@ -394,12 +352,11 @@ impl WorkspaceLinter {
}
Ok(())
};
run_parallelized(paths, operation).await
}
})
.await
}
.boxed_local()
});
.boxed_local();
futures.push(fut);
if lint_options.fix {
// run sequentially when using `--fix` to lower the chances of weird
@ -419,6 +376,63 @@ impl WorkspaceLinter {
Ok(())
}
fn run_package_rules(
&mut self,
linter: &Arc<CliLinter>,
member_dir: &WorkspaceDirectory,
paths: &[PathBuf],
) -> Option<LocalBoxFuture<Result<(), AnyError>>> {
if self.workspace_module_graph.is_none() {
let module_graph_creator = self.module_graph_creator.clone();
let packages = self.workspace_dir.jsr_packages_for_publish();
self.workspace_module_graph = Some(
async move {
module_graph_creator
.create_and_validate_publish_graph(&packages, true)
.await
.map(Rc::new)
.map_err(Rc::new)
}
.boxed_local()
.shared_local(),
);
}
let workspace_module_graph_future =
self.workspace_module_graph.as_ref().unwrap().clone();
let maybe_publish_config = member_dir.maybe_package_config();
let publish_config = maybe_publish_config?;
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let linter = linter.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
let fut = async move {
let graph = workspace_module_graph_future
.await
.map_err(|err| anyhow!("{:#}", err))?;
let export_urls =
publish_config.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
return Ok(()); // entrypoint is not specified, so skip
}
let diagnostics = linter.lint_package(&graph, &export_urls);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter.visit_diagnostic(diagnostic);
}
}
Ok(())
}
.boxed_local();
Some(fut)
}
pub fn finish(self) -> bool {
debug!("Found {} files", self.file_count);
self.reporter_lock.lock().close(self.file_count);
@ -494,10 +508,27 @@ pub fn print_rules_list(json: bool, maybe_rules_tags: Option<Vec<String>>) {
/// Treats input as TypeScript.
/// Compatible with `--json` flag.
fn lint_stdin(
file_path: &Path,
configured_rules: ConfiguredRules,
deno_lint_config: LintConfig,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
cli_options: &Arc<CliOptions>,
lint_rule_provider: LintRuleProvider,
workspace_lint_options: WorkspaceLintOptions,
lint_flags: LintFlags,
deno_lint_config: DenoLintConfig,
) -> Result<bool, AnyError> {
let start_dir = &cli_options.start_dir;
let reporter_lock = Arc::new(Mutex::new(create_reporter(
workspace_lint_options.reporter_kind,
)));
let lint_config = start_dir
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags);
let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty(
lint_options.rules,
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let mut source_code = String::new();
if stdin().read_to_string(&mut source_code).is_err() {
return Err(generic_error("Failed to read from stdin"));
@ -509,9 +540,14 @@ fn lint_stdin(
deno_lint_config,
});
linter
.lint_file(file_path, deno_ast::strip_bom(source_code), None)
.map_err(AnyError::from)
let r = linter
.lint_file(&file_path, deno_ast::strip_bom(source_code), None)
.map_err(AnyError::from);
let success =
handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone());
reporter_lock.lock().close(1);
Ok(success)
}
fn handle_lint_result(
@ -556,3 +592,68 @@ struct LintError {
file_path: String,
message: String,
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use serde::Deserialize;
use test_util as util;
#[derive(Serialize, Deserialize)]
struct RulesSchema {
#[serde(rename = "$schema")]
schema: String,
#[serde(rename = "enum")]
rules: Vec<String>,
}
fn get_all_rules() -> Vec<String> {
let rule_provider = LintRuleProvider::new(None, None);
let configured_rules =
rule_provider.resolve_lint_rules(Default::default(), None);
let mut all_rules = configured_rules
.all_rule_codes
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>();
all_rules.sort();
all_rules
}
// TODO(bartlomieju): do the same for tags, once https://github.com/denoland/deno/pull/27162 lands
#[test]
fn all_lint_rules_are_listed_in_schema_file() {
let all_rules = get_all_rules();
let rules_schema_path =
util::root_path().join("cli/schemas/lint-rules.v1.json");
let rules_schema_file =
std::fs::read_to_string(&rules_schema_path).unwrap();
let schema: RulesSchema = serde_json::from_str(&rules_schema_file).unwrap();
const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED";
if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() {
assert_eq!(
schema.rules, all_rules,
"Lint rules schema file not up to date. Run again with {}=1 to update the expected output",
UPDATE_ENV_VAR_NAME
);
return;
}
std::fs::write(
&rules_schema_path,
serde_json::to_string_pretty(&RulesSchema {
schema: schema.schema,
rules: all_rules,
})
.unwrap(),
)
.unwrap();
}
}

View file

@ -8,7 +8,7 @@ use std::sync::Arc;
use deno_ast::SourceRange;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_graph::Range;
use deno_lint::diagnostic::LintDiagnosticDetails;
@ -17,7 +17,7 @@ use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange;
use deno_lint::rules::LintRule;
use deno_resolver::sloppy_imports::SloppyImportsResolution;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use text_lines::LineAndColumnIndex;
use crate::graph_util::CliJsrUrlProvider;
@ -101,16 +101,16 @@ impl LintRule for NoSloppyImportsRule {
maybe_npm_resolver: None,
});
for (range, sloppy_import) in resolver.captures.borrow_mut().drain() {
for (referrer, sloppy_import) in resolver.captures.borrow_mut().drain() {
let start_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.start.line,
column_index: range.start.character,
line_index: referrer.range.start.line,
column_index: referrer.range.start.character,
});
let end_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: range.end.line,
column_index: range.end.character,
line_index: referrer.range.end.line,
column_index: referrer.range.end.character,
});
let source_range = SourceRange::new(start_range, end_range);
context.add_diagnostic_details(
@ -183,7 +183,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
&self,
specifier_text: &str,
referrer_range: &Range,
mode: ResolutionMode,
resolution_kind: ResolutionKind,
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
let resolution = self
.workspace_resolver
@ -198,9 +198,9 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
specifier, ..
} => match self.sloppy_imports_resolver.resolve(
&specifier,
match mode {
ResolutionMode::Execution => SloppyImportsResolutionMode::Execution,
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
match resolution_kind {
ResolutionKind::Execution => SloppyImportsResolutionKind::Execution,
ResolutionKind::Types => SloppyImportsResolutionKind::Types,
},
) {
Some(res) => {

View file

@ -169,7 +169,7 @@ impl Diagnostic for PublishDiagnostic {
..
}) => DiagnosticLevel::Warning,
FastCheck(_) => DiagnosticLevel::Error,
SpecifierUnfurl(_) => DiagnosticLevel::Warning,
SpecifierUnfurl(d) => d.level(),
InvalidPath { .. } => DiagnosticLevel::Error,
DuplicatePath { .. } => DiagnosticLevel::Error,
UnsupportedFileType { .. } => DiagnosticLevel::Warning,
@ -187,7 +187,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.code(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.code()),
SpecifierUnfurl(diagnostic) => diagnostic.code(),
InvalidPath { .. } => Cow::Borrowed("invalid-path"),
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
@ -207,7 +207,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.message(),
SpecifierUnfurl(diagnostic) => Cow::Borrowed(diagnostic.message()),
SpecifierUnfurl(diagnostic) => diagnostic.message(),
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
DuplicatePath { .. } => {
Cow::Borrowed("package path is a case insensitive duplicate of another path in the package")
@ -234,8 +234,8 @@ impl Diagnostic for PublishDiagnostic {
specifier: Cow::Borrowed(&referrer.specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::LineAndCol {
line: referrer.start.line,
column: referrer.start.character,
line: referrer.range.start.line,
column: referrer.range.start.character,
},
}
}
@ -243,17 +243,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.location(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
},
SpecifierUnfurl(diagnostic) => diagnostic.location(),
InvalidPath { path, .. } => {
DiagnosticLocation::Path { path: path.clone() }
}
@ -300,7 +290,7 @@ impl Diagnostic for PublishDiagnostic {
text_info: &'a SourceTextInfo,
referrer: &'a deno_graph::Range,
) -> Option<DiagnosticSnippet<'a>> {
if referrer.start.line == 0 && referrer.start.character == 0 {
if referrer.range.start.line == 0 && referrer.range.start.character == 0 {
return None; // no range, probably a jsxImportSource import
}
@ -310,12 +300,12 @@ impl Diagnostic for PublishDiagnostic {
style: DiagnosticSnippetHighlightStyle::Error,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::LineAndCol {
line: referrer.start.line,
column: referrer.start.character,
line: referrer.range.start.line,
column: referrer.range.start.character,
},
end: DiagnosticSourcePos::LineAndCol {
line: referrer.end.line,
column: referrer.end.character,
line: referrer.range.end.line,
column: referrer.range.end.character,
},
},
description: Some("the specifier".into()),
@ -325,24 +315,8 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.snippet(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
},
FastCheck(d) => d.snippet(),
SpecifierUnfurl(d) => d.snippet(),
InvalidPath { .. } => None,
DuplicatePath { .. } => None,
UnsupportedFileType { .. } => None,
@ -380,7 +354,7 @@ impl Diagnostic for PublishDiagnostic {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.hint(),
SpecifierUnfurl(_) => None,
SpecifierUnfurl(d) => d.hint(),
InvalidPath { .. } => Some(
Cow::Borrowed("rename or remove the file, or add it to 'publish.exclude' in the config file"),
),
@ -436,9 +410,9 @@ impl Diagnostic for PublishDiagnostic {
None => None,
}
}
SyntaxError(diagnostic) => diagnostic.snippet_fixed(),
SyntaxError(d) => d.snippet_fixed(),
SpecifierUnfurl(d) => d.snippet_fixed(),
FastCheck(_)
| SpecifierUnfurl(_)
| InvalidPath { .. }
| DuplicatePath { .. }
| UnsupportedFileType { .. }
@ -453,16 +427,8 @@ impl Diagnostic for PublishDiagnostic {
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => {
diagnostic.info()
}
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
]),
},
FastCheck(d) => d.info(),
SpecifierUnfurl(d) => d.info(),
InvalidPath { .. } => Cow::Borrowed(&[
Cow::Borrowed("to portably support all platforms, including windows, the allowed characters in package paths are limited"),
]),
@ -476,7 +442,7 @@ impl Diagnostic for PublishDiagnostic {
InvalidExternalImport { imported, .. } => Cow::Owned(vec![
Cow::Owned(format!("the import was resolved to '{}'", imported)),
Cow::Borrowed("this specifier is not allowed to be imported on jsr"),
Cow::Borrowed("jsr only supports importing `jsr:`, `npm:`, and `data:` specifiers"),
Cow::Borrowed("jsr only supports importing `jsr:`, `npm:`, `data:`, `bun:`, and `node:` specifiers"),
]),
UnsupportedJsxTsx { .. } => Cow::Owned(vec![
Cow::Borrowed("follow https://github.com/jsr-io/jsr/issues/24 for updates"),
@ -503,10 +469,8 @@ impl Diagnostic for PublishDiagnostic {
fn docs_url(&self) -> Option<Cow<'_, str>> {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.docs_url(),
SpecifierUnfurl(diagnostic) => match diagnostic {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => None,
},
FastCheck(d) => d.docs_url(),
SpecifierUnfurl(d) => d.docs_url(),
InvalidPath { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
}

View file

@ -47,7 +47,7 @@ impl GraphDiagnosticsCollector {
resolution: &ResolutionResolved| {
if visited.insert(resolution.specifier.clone()) {
match resolution.specifier.scheme() {
"file" | "data" | "node" => {}
"file" | "data" | "node" | "bun" => {}
"jsr" => {
skip_specifiers.insert(resolution.specifier.clone());

View file

@ -14,7 +14,6 @@ use base64::Engine;
use deno_ast::ModuleSpecifier;
use deno_config::deno_json::ConfigFile;
use deno_config::workspace::JsrPackageConfig;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::Workspace;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
@ -27,6 +26,7 @@ use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_runtime::deno_fetch;
use deno_terminal::colors;
use http_body_util::BodyExt;
use serde::Deserialize;
@ -44,8 +44,6 @@ use crate::cache::ParsedSourceCache;
use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check::CheckOptions;
use crate::tools::lint::collect_no_slow_type_diagnostics;
use crate::tools::registry::diagnostics::PublishDiagnostic;
@ -97,11 +95,10 @@ pub async fn publish(
match cli_options.start_dir.maybe_deno_json() {
Some(deno_json) => {
debug_assert!(!deno_json.is_package());
if deno_json.json.name.is_none() {
bail!("Missing 'name' field in '{}'.", deno_json.specifier);
}
error_missing_exports_field(deno_json)?;
bail!(
"Missing 'name' or 'exports' field in '{}'.",
deno_json.specifier
);
}
None => {
bail!(
@ -124,19 +121,8 @@ pub async fn publish(
}
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
if cli_options.unstable_sloppy_imports() {
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
cli_factory.fs().clone(),
)))
} else {
None
},
cli_options
.create_workspace_resolver(
cli_factory.file_fetcher()?,
PackageJsonDepResolution::Enabled,
)
.await?,
cli_factory.sloppy_imports_resolver()?.cloned(),
cli_factory.workspace_resolver().await?.clone(),
cli_options.unstable_bare_node_builtins(),
));
@ -926,9 +912,7 @@ async fn publish_package(
package.config
);
let body = http_body_util::Full::new(package.tarball.bytes.clone())
.map_err(|never| match never {})
.boxed();
let body = deno_fetch::ReqBody::full(package.tarball.bytes.clone());
let response = http_client
.post(url.parse()?, body)?
.header(

View file

@ -4,6 +4,7 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@ -14,6 +15,7 @@ use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deps::KeyPath;
@ -23,12 +25,11 @@ use jsonc_parser::cst::CstRootNode;
use jsonc_parser::json;
use crate::args::AddFlags;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::RemoveFlags;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
@ -283,7 +284,7 @@ fn package_json_dependency_entry(
(npm_package.into(), selected.version_req)
} else {
(
selected.import_name,
selected.import_name.into_string(),
format!("npm:{}@{}", npm_package, selected.version_req),
)
}
@ -292,7 +293,7 @@ fn package_json_dependency_entry(
let scope_replaced = jsr_package.replace('/', "__");
let version_req =
format!("npm:@jsr/{scope_replaced}@{}", selected.version_req);
(selected.import_name, version_req)
(selected.import_name.into_string(), version_req)
} else {
(selected.package_name, selected.version_req)
}
@ -411,18 +412,18 @@ pub async fn add(
let http_client = cli_factory.http_client_provider();
let deps_http_cache = cli_factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
let deps_file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::ReloadAll,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::ReloadAll,
log::Level::Trace,
);
let npmrc = cli_factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver =
@ -432,9 +433,8 @@ pub async fn add(
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
let req = AddRmPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
let req = AddRmPackageReq::parse(entry_text)
.with_context(|| format!("Failed to parse package: {}", entry_text))?;
match req {
Ok(add_req) => package_reqs.push(add_req),
@ -550,10 +550,10 @@ pub async fn add(
}
struct SelectedPackage {
import_name: String,
import_name: StackString,
package_name: String,
version_req: String,
selected_version: String,
selected_version: StackString,
}
enum NotFoundHelp {
@ -684,7 +684,7 @@ async fn find_package_and_select_version_for_req(
import_name: add_package_req.alias,
package_name: prefixed_name,
version_req: format!("{}{}", range_symbol, &nv.version),
selected_version: nv.version.to_string(),
selected_version: nv.version.to_custom_string::<StackString>(),
}))
}
@ -706,7 +706,7 @@ enum AddRmPackageReqValue {
#[derive(Debug, PartialEq, Eq)]
pub struct AddRmPackageReq {
alias: String,
alias: StackString,
value: AddRmPackageReqValue,
}
@ -754,7 +754,11 @@ impl AddRmPackageReq {
return Ok(Err(PackageReq::from_str(entry_text)?));
}
(maybe_prefix.unwrap(), Some(alias.to_string()), entry_text)
(
maybe_prefix.unwrap(),
Some(StackString::from(alias)),
entry_text,
)
}
None => return Ok(Err(PackageReq::from_str(entry_text)?)),
},
@ -766,7 +770,7 @@ impl AddRmPackageReq {
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
let package_req = req_ref.into_inner().req;
Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()),
value: AddRmPackageReqValue::Jsr(package_req),
}))
}
@ -786,7 +790,7 @@ impl AddRmPackageReq {
);
}
Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
alias: maybe_alias.unwrap_or_else(|| package_req.name.clone()),
value: AddRmPackageReqValue::Npm(package_req),
}))
}
@ -805,9 +809,8 @@ pub async fn remove(
let mut removed_packages = vec![];
for package in &remove_flags.packages {
let req = AddRmPackageReq::parse(package).with_context(|| {
format!("Failed to parse package required: {}", package)
})?;
let req = AddRmPackageReq::parse(package)
.with_context(|| format!("Failed to parse package: {}", package))?;
let mut parsed_pkg_name = None;
for config in configs.iter_mut().flatten() {
match &req {
@ -880,14 +883,14 @@ mod test {
assert_eq!(
AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(),
AddRmPackageReq {
alias: "foo".to_string(),
alias: "foo".into(),
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
AddRmPackageReq {
alias: "alias".to_string(),
alias: "alias".into(),
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
@ -896,7 +899,7 @@ mod test {
.unwrap()
.unwrap(),
AddRmPackageReq {
alias: "@alias/pkg".to_string(),
alias: "@alias/pkg".into(),
value: AddRmPackageReqValue::Npm(
PackageReq::from_str("foo@latest").unwrap()
)
@ -907,7 +910,7 @@ mod test {
.unwrap()
.unwrap(),
AddRmPackageReq {
alias: "@alias/pkg".to_string(),
alias: "@alias/pkg".into(),
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
@ -916,7 +919,7 @@ mod test {
.unwrap()
.unwrap(),
AddRmPackageReq {
alias: "alias".to_string(),
alias: "alias".into(),
value: AddRmPackageReqValue::Jsr(
PackageReq::from_str("foo@^1.5.0").unwrap()
)

View file

@ -1,10 +1,12 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use crate::factory::CliFactory;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
@ -17,18 +19,16 @@ pub async fn cache_top_level_deps(
) -> Result<(), AnyError> {
let npm_resolver = factory.npm_resolver().await?;
let cli_options = factory.cli_options()?;
let root_permissions = factory.root_permissions_container()?;
if let Some(npm_resolver) = npm_resolver.as_managed() {
if !npm_resolver.ensure_top_level_package_json_install().await? {
npm_resolver.ensure_top_level_package_json_install().await?;
if let Some(lockfile) = cli_options.maybe_lockfile() {
lockfile.error_if_changed()?;
}
npm_resolver.cache_packages().await?;
}
}
// cache as many entries in the import map as we can
let resolver = factory.workspace_resolver().await?;
let mut maybe_graph_error = Ok(());
if let Some(import_map) = resolver.maybe_import_map() {
let jsr_resolver = if let Some(resolver) = jsr_resolver {
resolver
@ -37,6 +37,16 @@ pub async fn cache_top_level_deps(
factory.file_fetcher()?.clone(),
))
};
let mut graph_permit = factory
.main_module_graph_container()
.await?
.acquire_update_permit()
.await;
let graph = graph_permit.graph_mut();
if let Some(lockfile) = cli_options.maybe_lockfile() {
let lockfile = lockfile.lock();
crate::graph_util::fill_graph_from_lockfile(graph, &lockfile);
}
let mut roots = Vec::new();
@ -67,14 +77,17 @@ pub async fn cache_top_level_deps(
if !seen_reqs.insert(req.req().clone()) {
continue;
}
let resolved_req = graph.packages.mappings().get(req.req());
let jsr_resolver = jsr_resolver.clone();
info_futures.push(async move {
if let Some(nv) = jsr_resolver.req_to_nv(req.req()).await {
if let Some(info) = jsr_resolver.package_version_info(&nv).await
{
let nv = if let Some(req) = resolved_req {
Cow::Borrowed(req)
} else {
Cow::Owned(jsr_resolver.req_to_nv(req.req()).await?)
};
if let Some(info) = jsr_resolver.package_version_info(&nv).await {
return Some((specifier.clone(), info));
}
}
None
});
}
@ -106,25 +119,31 @@ pub async fn cache_top_level_deps(
}
}
}
let mut graph_permit = factory
.main_module_graph_container()
.await?
.acquire_update_permit()
.await;
let graph = graph_permit.graph_mut();
factory
.module_load_preparer()
.await?
.prepare_module_load(
drop(info_futures);
let graph_builder = factory.module_graph_builder().await?;
graph_builder
.build_graph_with_npm_resolution(
graph,
&roots,
false,
deno_config::deno_json::TsTypeLib::DenoWorker,
root_permissions.clone(),
None,
CreateGraphOptions {
loader: None,
graph_kind: graph.graph_kind(),
is_dynamic: false,
roots: roots.clone(),
npm_caching: crate::graph_util::NpmCachingStrategy::Manual,
},
)
.await?;
maybe_graph_error = graph_builder.graph_roots_valid(graph, &roots);
}
if let Some(npm_resolver) = npm_resolver.as_managed() {
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
maybe_graph_error?;
Ok(())
}

View file

@ -2,7 +2,7 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::atomic::AtomicBool;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
@ -11,6 +11,7 @@ use deno_config::deno_json::ConfigFileRc;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirectory;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::future::try_join;
use deno_core::futures::stream::FuturesOrdered;
@ -18,9 +19,7 @@ use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_graph::FillFromLockfileOptions;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepsMap;
use deno_package_json::PackageJsonRc;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrPackageReqReference;
@ -28,11 +27,12 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use import_map::ImportMap;
use import_map::ImportMapWithDiagnostics;
use import_map::SpecifierMapEntry;
use indexmap::IndexMap;
use tokio::sync::Semaphore;
use crate::args::CliLockfile;
@ -43,13 +43,14 @@ use crate::jsr::JsrFetchResolver;
use crate::module_loader::ModuleLoadPreparer;
use crate::npm::CliNpmResolver;
use crate::npm::NpmFetchResolver;
use crate::util::sync::AtomicFlag;
use super::ConfigUpdater;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ImportMapKind {
Inline,
Outline,
Outline(PathBuf),
}
#[derive(Clone)]
@ -65,9 +66,12 @@ impl DepLocation {
pub fn file_path(&self) -> Cow<std::path::Path> {
match self {
DepLocation::DenoJson(arc, _, _) => {
DepLocation::DenoJson(arc, _, kind) => match kind {
ImportMapKind::Inline => {
Cow::Owned(arc.specifier.to_file_path().unwrap())
}
ImportMapKind::Outline(path) => Cow::Borrowed(path.as_path()),
},
DepLocation::PackageJson(arc, _) => Cow::Borrowed(arc.path.as_ref()),
}
}
@ -136,13 +140,7 @@ pub enum KeyPart {
Scopes,
Dependencies,
DevDependencies,
String(String),
}
impl From<String> for KeyPart {
fn from(value: String) -> Self {
KeyPart::String(value)
}
String(StackString),
}
impl From<PackageJsonDepKind> for KeyPart {
@ -161,7 +159,7 @@ impl KeyPart {
KeyPart::Scopes => "scopes",
KeyPart::Dependencies => "dependencies",
KeyPart::DevDependencies => "devDependencies",
KeyPart::String(s) => s,
KeyPart::String(s) => s.as_str(),
}
}
}
@ -214,12 +212,12 @@ fn import_map_entries(
.chain(import_map.scopes().flat_map(|scope| {
let path = KeyPath::from_parts([
KeyPart::Scopes,
scope.raw_key.to_string().into(),
KeyPart::String(scope.raw_key.into()),
]);
scope.imports.entries().map(move |entry| {
let mut full_path = path.clone();
full_path.push(KeyPart::String(entry.raw_key.to_string()));
full_path.push(KeyPart::String(entry.raw_key.into()));
(full_path, entry)
})
}))
@ -241,8 +239,9 @@ fn to_import_map_value_from_imports(
fn deno_json_import_map(
deno_json: &ConfigFile,
) -> Result<Option<(ImportMapWithDiagnostics, ImportMapKind)>, AnyError> {
let (value, kind) =
if deno_json.json.imports.is_some() || deno_json.json.scopes.is_some() {
let (value, kind) = if deno_json.json.imports.is_some()
|| deno_json.json.scopes.is_some()
{
(
to_import_map_value_from_imports(deno_json),
ImportMapKind::Inline,
@ -250,9 +249,16 @@ fn deno_json_import_map(
} else {
match deno_json.to_import_map_path()? {
Some(path) => {
let text = std::fs::read_to_string(&path)?;
let value = serde_json::from_str(&text)?;
(value, ImportMapKind::Outline)
let err_context = || {
format!(
"loading import map at '{}' (from \"importMap\" field in '{}')",
path.display(),
deno_json.specifier
)
};
let text = std::fs::read_to_string(&path).with_context(err_context)?;
let value = serde_json::from_str(&text).with_context(err_context)?;
(value, ImportMapKind::Outline(path))
}
None => return Ok(None),
}
@ -269,94 +275,6 @@ enum PackageJsonDepKind {
Dev,
}
type PackageJsonDeps = IndexMap<
String,
Result<
(PackageJsonDepKind, PackageJsonDepValue),
PackageJsonDepValueParseError,
>,
>;
/// Resolve the package.json's dependencies.
// TODO(nathanwhit): Remove once we update deno_package_json with dev deps split out
fn resolve_local_package_json_deps(
package_json: &PackageJsonRc,
) -> PackageJsonDeps {
/// Gets the name and raw version constraint for a registry info or
/// package.json dependency entry taking into account npm package aliases.
fn parse_dep_entry_name_and_raw_version<'a>(
key: &'a str,
value: &'a str,
) -> (&'a str, &'a str) {
if let Some(package_and_version) = value.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
// if empty, then the name was scoped and there's no version
if name.is_empty() {
(package_and_version, "*")
} else {
(name, version)
}
} else {
(package_and_version, "*")
}
} else {
(key, value)
}
}
fn parse_entry(
key: &str,
value: &str,
) -> Result<PackageJsonDepValue, PackageJsonDepValueParseError> {
if let Some(workspace_key) = value.strip_prefix("workspace:") {
let version_req = VersionReq::parse_from_npm(workspace_key)?;
return Ok(PackageJsonDepValue::Workspace(version_req));
}
if value.starts_with("file:")
|| value.starts_with("git:")
|| value.starts_with("http:")
|| value.starts_with("https:")
{
return Err(PackageJsonDepValueParseError::Unsupported {
scheme: value.split(':').next().unwrap().to_string(),
});
}
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
let result = VersionReq::parse_from_npm(version_req);
match result {
Ok(version_req) => Ok(PackageJsonDepValue::Req(PackageReq {
name: name.to_string(),
version_req,
})),
Err(err) => Err(PackageJsonDepValueParseError::VersionReq(err)),
}
}
fn insert_deps(
deps: Option<&IndexMap<String, String>>,
result: &mut PackageJsonDeps,
kind: PackageJsonDepKind,
) {
if let Some(deps) = deps {
for (key, value) in deps {
result.entry(key.to_string()).or_insert_with(|| {
parse_entry(key, value).map(|entry| (kind, entry))
});
}
}
}
let deps = package_json.dependencies.as_ref();
let dev_deps = package_json.dev_dependencies.as_ref();
let mut result = IndexMap::new();
// favors the deps over dev_deps
insert_deps(deps, &mut result, PackageJsonDepKind::Normal);
insert_deps(dev_deps, &mut result, PackageJsonDepKind::Dev);
result
}
fn add_deps_from_deno_json(
deno_json: &Arc<ConfigFile>,
mut filter: impl DepFilter,
@ -394,7 +312,7 @@ fn add_deps_from_deno_json(
location: DepLocation::DenoJson(
deno_json.clone(),
key_path,
import_map_kind,
import_map_kind.clone(),
),
kind,
req,
@ -406,13 +324,21 @@ fn add_deps_from_deno_json(
fn add_deps_from_package_json(
package_json: &PackageJsonRc,
mut filter: impl DepFilter,
filter: impl DepFilter,
deps: &mut Vec<Dep>,
) {
let package_json_deps = package_json.resolve_local_package_json_deps();
fn iterate(
package_json: &PackageJsonRc,
mut filter: impl DepFilter,
package_dep_kind: PackageJsonDepKind,
package_json_deps: &PackageJsonDepsMap,
deps: &mut Vec<Dep>,
) {
let package_json_deps = resolve_local_package_json_deps(package_json);
for (k, v) in package_json_deps {
let (package_dep_kind, v) = match v {
Ok((k, v)) => (k, v),
let v = match v {
Ok(v) => v,
Err(e) => {
log::warn!("bad package json dep value: {e}");
continue;
@ -422,7 +348,7 @@ fn add_deps_from_package_json(
deno_package_json::PackageJsonDepValue::Req(req) => {
let alias = k.as_str();
let alias = (alias != req.name).then(|| alias.to_string());
if !filter.should_include(alias.as_deref(), &req, DepKind::Npm) {
if !filter.should_include(alias.as_deref(), req, DepKind::Npm) {
continue;
}
let id = DepId(deps.len());
@ -431,9 +357,12 @@ fn add_deps_from_package_json(
kind: DepKind::Npm,
location: DepLocation::PackageJson(
package_json.clone(),
KeyPath::from_parts([package_dep_kind.into(), k.into()]),
KeyPath::from_parts([
package_dep_kind.into(),
KeyPart::String(k.clone()),
]),
),
req,
req: req.clone(),
alias,
})
}
@ -442,6 +371,22 @@ fn add_deps_from_package_json(
}
}
iterate(
package_json,
filter,
PackageJsonDepKind::Normal,
&package_json_deps.dependencies,
deps,
);
iterate(
package_json,
filter,
PackageJsonDepKind::Dev,
&package_json_deps.dev_dependencies,
deps,
);
}
fn deps_from_workspace(
workspace: &Arc<Workspace>,
dep_filter: impl DepFilter,
@ -501,7 +446,7 @@ pub struct DepManager {
pending_changes: Vec<Change>,
dependencies_resolved: AtomicBool,
dependencies_resolved: AtomicFlag,
module_load_preparer: Arc<ModuleLoadPreparer>,
// TODO(nathanwhit): probably shouldn't be pub
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
@ -543,7 +488,7 @@ impl DepManager {
resolved_versions: Vec::new(),
latest_versions: Vec::new(),
jsr_fetch_resolver,
dependencies_resolved: AtomicBool::new(false),
dependencies_resolved: AtomicFlag::lowered(),
module_load_preparer,
npm_fetch_resolver,
npm_resolver,
@ -584,10 +529,7 @@ impl DepManager {
}
async fn run_dependency_resolution(&self) -> Result<(), AnyError> {
if self
.dependencies_resolved
.load(std::sync::atomic::Ordering::Relaxed)
{
if self.dependencies_resolved.is_raised() {
return Ok(());
}
@ -599,19 +541,8 @@ impl DepManager {
// populate the information from the lockfile
if let Some(lockfile) = &self.lockfile {
let lockfile = lockfile.lock();
graph.fill_from_lockfile(FillFromLockfileOptions {
redirects: lockfile
.content
.redirects
.iter()
.map(|(from, to)| (from.as_str(), to.as_str())),
package_specifiers: lockfile
.content
.packages
.specifiers
.iter()
.map(|(dep, id)| (dep, id.as_str())),
});
crate::graph_util::fill_graph_from_lockfile(graph, &lockfile);
}
let npm_resolver = self.npm_resolver.as_managed().unwrap();
@ -621,9 +552,7 @@ impl DepManager {
}
DepKind::Jsr => graph.packages.mappings().contains_key(&dep.req),
}) {
self
.dependencies_resolved
.store(true, std::sync::atomic::Ordering::Relaxed);
self.dependencies_resolved.raise();
graph_permit.commit();
return Ok(());
}
@ -678,6 +607,7 @@ impl DepManager {
)
.await?;
self.dependencies_resolved.raise();
graph_permit.commit();
Ok(())
@ -720,10 +650,6 @@ impl DepManager {
if self.latest_versions.len() == self.deps.len() {
return Ok(self.latest_versions.clone());
}
let latest_tag_req = deno_semver::VersionReq::from_raw_text_and_inner(
"latest".into(),
deno_semver::RangeSetOrTag::Tag("latest".into()),
);
let mut latest_versions = Vec::with_capacity(self.deps.len());
let npm_sema = Semaphore::new(32);
@ -735,14 +661,25 @@ impl DepManager {
DepKind::Npm => futs.push_back(
async {
let semver_req = &dep.req;
let latest_req = PackageReq {
name: dep.req.name.clone(),
version_req: latest_tag_req.clone(),
};
let _permit = npm_sema.acquire().await;
let semver_compatible =
self.npm_fetch_resolver.req_to_nv(semver_req).await;
let latest = self.npm_fetch_resolver.req_to_nv(&latest_req).await;
let info =
self.npm_fetch_resolver.package_info(&semver_req.name).await;
let latest = info
.and_then(|info| {
let latest_tag = info.dist_tags.get("latest")?;
let lower_bound = &semver_compatible.as_ref()?.version;
if latest_tag > lower_bound {
Some(latest_tag.clone())
} else {
latest_version(Some(latest_tag), info.versions.keys())
}
})
.map(|version| PackageNv {
name: semver_req.name.clone(),
version,
});
PackageLatestVersion {
latest,
semver_compatible,
@ -753,14 +690,29 @@ impl DepManager {
DepKind::Jsr => futs.push_back(
async {
let semver_req = &dep.req;
let latest_req = PackageReq {
name: dep.req.name.clone(),
version_req: deno_semver::WILDCARD_VERSION_REQ.clone(),
};
let _permit = jsr_sema.acquire().await;
let semver_compatible =
self.jsr_fetch_resolver.req_to_nv(semver_req).await;
let latest = self.jsr_fetch_resolver.req_to_nv(&latest_req).await;
let info =
self.jsr_fetch_resolver.package_info(&semver_req.name).await;
let latest = info
.and_then(|info| {
let lower_bound = &semver_compatible.as_ref()?.version;
latest_version(
Some(lower_bound),
info.versions.iter().filter_map(|(version, version_info)| {
if !version_info.yanked {
Some(version)
} else {
None
}
}),
)
})
.map(|version| PackageNv {
name: semver_req.name.clone(),
version,
});
PackageLatestVersion {
latest,
semver_compatible,
@ -825,11 +777,7 @@ impl DepManager {
let dep = &mut self.deps[dep_id.0];
dep.req.version_req = version_req.clone();
match &dep.location {
DepLocation::DenoJson(arc, key_path, import_map_kind) => {
if matches!(import_map_kind, ImportMapKind::Outline) {
// not supported
continue;
}
DepLocation::DenoJson(arc, key_path, _) => {
let updater =
get_or_create_updater(&mut config_updaters, &dep.location)?;
@ -962,3 +910,18 @@ fn parse_req_reference(
DepKind::Jsr => JsrPackageReqReference::from_str(input)?.into_inner(),
})
}
fn latest_version<'a>(
start: Option<&Version>,
versions: impl IntoIterator<Item = &'a Version>,
) -> Option<Version> {
let mut best = start;
for version in versions {
match best {
Some(best_version) if version > best_version => best = Some(version),
None => best = Some(version),
_ => {}
}
}
best.cloned()
}

View file

@ -3,18 +3,20 @@
use std::collections::HashSet;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::VersionReq;
use deno_terminal::colors;
use crate::args::CacheSetting;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::OutdatedFlags;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
use crate::tools::registry::pm::deps::DepKind;
@ -30,7 +32,7 @@ struct OutdatedPackage {
latest: String,
semver_compatible: String,
current: String,
name: String,
name: StackString,
}
#[allow(clippy::print_stdout)]
@ -100,6 +102,23 @@ fn print_outdated_table(packages: &[OutdatedPackage]) {
println!("{package_fill}{current_fill}{update_fill}{latest_fill}",);
}
fn print_suggestion(compatible: bool) {
log::info!("");
let (cmd, txt) = if compatible {
("", "compatible")
} else {
(" --latest", "available")
};
log::info!(
"{}",
color_print::cformat!(
"<p(245)>Run</> <u>deno outdated --update{}</> <p(245)>to update to the latest {} versions,</>\n<p(245)>or</> <u>deno outdated --help</> <p(245)>for more information.</>",
cmd,
txt,
)
);
}
fn print_outdated(
deps: &mut DepManager,
compatible: bool,
@ -148,6 +167,7 @@ fn print_outdated(
if !outdated.is_empty() {
outdated.sort();
print_outdated_table(&outdated);
print_suggestion(compatible);
}
Ok(())
@ -162,15 +182,15 @@ pub async fn outdated(
let workspace = cli_options.workspace();
let http_client = factory.http_client_provider();
let deps_http_cache = factory.global_http_cache()?;
let mut file_fetcher = FileFetcher::new(
let file_fetcher = CliFileFetcher::new(
deps_http_cache.clone(),
CacheSetting::RespectHeaders,
true,
http_client.clone(),
Default::default(),
None,
true,
CacheSetting::RespectHeaders,
log::Level::Trace,
);
file_fetcher.set_download_log_level(log::Level::Trace);
let file_fetcher = Arc::new(file_fetcher);
let npm_fetch_resolver = Arc::new(NpmFetchResolver::new(
file_fetcher.clone(),
@ -179,6 +199,15 @@ pub async fn outdated(
let jsr_fetch_resolver =
Arc::new(JsrFetchResolver::new(file_fetcher.clone()));
if !cli_options.start_dir.has_deno_json()
&& !cli_options.start_dir.has_pkg_json()
{
bail!(
"No deno.json or package.json in \"{}\".",
cli_options.initial_cwd().display(),
);
}
let args = dep_manager_args(
&factory,
cli_options,

View file

@ -1,19 +1,35 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::diagnostics::DiagnosticLevel;
use deno_ast::diagnostics::DiagnosticLocation;
use deno_ast::diagnostics::DiagnosticSnippet;
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_ast::SourceTextProvider;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow;
use deno_core::ModuleSpecifier;
use deno_graph::DependencyDescriptor;
use deno_graph::DynamicTemplatePart;
use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference;
use deno_package_json::PackageJsonDepValue;
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_resolver::sloppy_imports::SloppyImportsResolutionKind;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_semver::Version;
use deno_semver::VersionReq;
use crate::resolver::CliSloppyImportsResolver;
@ -24,34 +40,163 @@ pub enum SpecifierUnfurlerDiagnostic {
text_info: SourceTextInfo,
range: SourceRange,
},
ResolvingNpmWorkspacePackage {
specifier: ModuleSpecifier,
package_name: String,
text_info: SourceTextInfo,
range: SourceRange,
reason: String,
},
}
impl SpecifierUnfurlerDiagnostic {
pub fn code(&self) -> &'static str {
impl Diagnostic for SpecifierUnfurlerDiagnostic {
fn level(&self) -> DiagnosticLevel {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
DiagnosticLevel::Warning
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
DiagnosticLevel::Error
}
}
}
fn code(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
Self::ResolvingNpmWorkspacePackage { .. } => "npm-workspace-package",
}
.into()
}
fn message(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import".into()
}
Self::ResolvingNpmWorkspacePackage {
package_name,
reason,
..
} => format!(
"failed resolving npm workspace package '{}': {}",
package_name, reason
)
.into(),
}
}
pub fn message(&self) -> &'static str {
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation {
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import"
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier,
text_info,
range,
..
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
}
}
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unresolved import".into()),
}],
}),
}
}
fn hint(&self) -> Option<Cow<'_, str>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
None
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => Some(
"make sure the npm workspace package is resolvable and has a version field in its package.json".into()
),
}
}
fn snippet_fixed(
&self,
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
None
}
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map / package.json do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map / package.json")
]),
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
Cow::Borrowed(&[])
},
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
None
}
}
enum UnfurlSpecifierError {
Workspace {
package_name: String,
reason: String,
},
}
pub struct SpecifierUnfurler {
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
workspace_resolver: WorkspaceResolver,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Arc<WorkspaceResolver>,
bare_node_builtins: bool,
}
impl SpecifierUnfurler {
pub fn new(
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
workspace_resolver: WorkspaceResolver,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Arc<WorkspaceResolver>,
bare_node_builtins: bool,
) -> Self {
debug_assert_eq!(
@ -65,11 +210,45 @@ impl SpecifierUnfurler {
}
}
fn unfurl_specifier_reporting_diagnostic(
&self,
referrer: &ModuleSpecifier,
specifier: &str,
text_info: &SourceTextInfo,
range: &deno_graph::PositionRange,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> Option<String> {
match self.unfurl_specifier(referrer, specifier) {
Ok(maybe_unfurled) => maybe_unfurled,
Err(diagnostic) => match diagnostic {
UnfurlSpecifierError::Workspace {
package_name,
reason,
} => {
let range = to_range(text_info, range);
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier: referrer.clone(),
package_name,
text_info: text_info.clone(),
range: SourceRange::new(
text_info.start_pos() + range.start,
text_info.start_pos() + range.end,
),
reason,
},
);
None
}
},
}
}
fn unfurl_specifier(
&self,
referrer: &ModuleSpecifier,
specifier: &str,
) -> Option<String> {
) -> Result<Option<String>, UnfurlSpecifierError> {
let resolved = if let Ok(resolved) =
self.workspace_resolver.resolve(specifier, referrer)
{
@ -120,8 +299,40 @@ impl SpecifierUnfurler {
))
.ok()
}
PackageJsonDepValue::Workspace(version_req) => {
// todo(#24612): consider warning or error when this is also a jsr package?
PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
Cow::Borrowed(version_req)
}
PackageJsonDepWorkspaceReq::Caret => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("^{}", version))
.unwrap(),
)
}
PackageJsonDepWorkspaceReq::Tilde => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("~{}", version))
.unwrap(),
)
}
};
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
ModuleSpecifier::parse(&format!(
"npm:{}@{}{}",
alias,
@ -151,10 +362,14 @@ impl SpecifierUnfurler {
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
format!("node:{specifier}").parse().unwrap()
}
None => ModuleSpecifier::options()
None => match ModuleSpecifier::options()
.base_url(Some(referrer))
.parse(specifier)
.ok()?,
.ok()
{
Some(value) => value,
None => return Ok(None),
},
};
// TODO(lucacasonato): this requires integration in deno_graph first
// let resolved = if let Ok(specifier) =
@ -180,7 +395,7 @@ impl SpecifierUnfurler {
let resolved =
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
sloppy_imports_resolver
.resolve(&resolved, SloppyImportsResolutionMode::Execution)
.resolve(&resolved, SloppyImportsResolutionKind::Execution)
.map(|res| res.into_specifier())
.unwrap_or(resolved)
} else {
@ -188,7 +403,7 @@ impl SpecifierUnfurler {
};
let relative_resolved = relative_url(&resolved, referrer);
if relative_resolved == specifier {
None // nothing to unfurl
Ok(None) // nothing to unfurl
} else {
log::debug!(
"Unfurled specifier: {} from {} -> {}",
@ -196,7 +411,29 @@ impl SpecifierUnfurler {
referrer,
relative_resolved
);
Some(relative_resolved)
Ok(Some(relative_resolved))
}
}
fn find_workspace_npm_dep_version(
&self,
pkg_name: &str,
) -> Result<Version, anyhow::Error> {
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
let pkg_json = self
.workspace_resolver
.package_jsons()
.find(|pkg| pkg.name.as_deref() == Some(pkg_name))
.ok_or_else(|| {
anyhow::anyhow!("unable to find npm package in workspace")
})?;
if let Some(version) = &pkg_json.version {
Ok(Version::parse_from_npm(version)?)
} else {
Err(anyhow::anyhow!(
"missing version in package.json of npm package",
))
}
}
@ -208,6 +445,7 @@ impl SpecifierUnfurler {
text_info: &SourceTextInfo,
dep: &deno_graph::DynamicDependencyDescriptor,
text_changes: &mut Vec<deno_ast::TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> bool {
match &dep.argument {
deno_graph::DynamicArgument::String(specifier) => {
@ -217,8 +455,14 @@ impl SpecifierUnfurler {
let Some(relative_index) = maybe_relative_index else {
return true; // always say it's analyzable for a string
};
let unfurled = self.unfurl_specifier(module_url, specifier);
if let Some(unfurled) = unfurled {
let maybe_unfurled = self.unfurl_specifier_reporting_diagnostic(
module_url,
specifier,
text_info,
&dep.argument_range,
diagnostic_reporter,
);
if let Some(unfurled) = maybe_unfurled {
let start = range.start + relative_index;
text_changes.push(deno_ast::TextChange {
range: start..start + specifier.len(),
@ -238,7 +482,13 @@ impl SpecifierUnfurler {
if !specifier.ends_with('/') {
return false;
}
let unfurled = self.unfurl_specifier(module_url, specifier);
let unfurled = self.unfurl_specifier_reporting_diagnostic(
module_url,
specifier,
text_info,
&dep.argument_range,
diagnostic_reporter,
);
let Some(unfurled) = unfurled else {
return true; // nothing to unfurl
};
@ -280,8 +530,15 @@ impl SpecifierUnfurler {
let analyze_specifier =
|specifier: &str,
range: &deno_graph::PositionRange,
text_changes: &mut Vec<deno_ast::TextChange>| {
if let Some(unfurled) = self.unfurl_specifier(url, specifier) {
text_changes: &mut Vec<deno_ast::TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic)| {
if let Some(unfurled) = self.unfurl_specifier_reporting_diagnostic(
url,
specifier,
text_info,
range,
diagnostic_reporter,
) {
text_changes.push(deno_ast::TextChange {
range: to_range(text_info, range),
new_text: unfurled,
@ -295,11 +552,17 @@ impl SpecifierUnfurler {
&dep.specifier,
&dep.specifier_range,
&mut text_changes,
diagnostic_reporter,
);
}
DependencyDescriptor::Dynamic(dep) => {
let success =
self.try_unfurl_dynamic_dep(url, text_info, dep, &mut text_changes);
let success = self.try_unfurl_dynamic_dep(
url,
text_info,
dep,
&mut text_changes,
diagnostic_reporter,
);
if !success {
let start_pos = text_info.line_start(dep.argument_range.start.line)
@ -319,20 +582,22 @@ impl SpecifierUnfurler {
}
for ts_ref in &module_info.ts_references {
let specifier_with_range = match ts_ref {
TypeScriptReference::Path(range) => range,
TypeScriptReference::Types(range) => range,
TypeScriptReference::Path(s) => s,
TypeScriptReference::Types { specifier, .. } => specifier,
};
analyze_specifier(
&specifier_with_range.text,
&specifier_with_range.range,
&mut text_changes,
diagnostic_reporter,
);
}
for specifier_with_range in &module_info.jsdoc_imports {
for jsdoc in &module_info.jsdoc_imports {
analyze_specifier(
&specifier_with_range.text,
&specifier_with_range.range,
&jsdoc.specifier.text,
&jsdoc.specifier.range,
&mut text_changes,
diagnostic_reporter,
);
}
if let Some(specifier_with_range) = &module_info.jsx_import_source {
@ -340,6 +605,7 @@ impl SpecifierUnfurler {
&specifier_with_range.text,
&specifier_with_range.range,
&mut text_changes,
diagnostic_reporter,
);
}
@ -458,10 +724,10 @@ mod tests {
);
let fs = Arc::new(RealFs);
let unfurler = SpecifierUnfurler::new(
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
fs,
Some(Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(fs),
))),
workspace_resolver,
Arc::new(workspace_resolver),
true,
);
@ -547,4 +813,114 @@ const warn2 = await import(`${expr}`);
assert_eq!(unfurled_source, expected_source);
}
}
#[test]
fn test_unfurling_npm_dep_workspace_specifier() {
let cwd = testdata_path().join("unfurl").to_path_buf();
let pkg_json_add = PackageJson::load_from_value(
cwd.join("add/package.json"),
json!({ "name": "add", "version": "0.1.0", }),
);
let pkg_json_subtract = PackageJson::load_from_value(
cwd.join("subtract/package.json"),
json!({ "name": "subtract", "version": "0.2.0", }),
);
let pkg_json_publishing = PackageJson::load_from_value(
cwd.join("publish/package.json"),
json!({
"name": "@denotest/main",
"version": "1.0.0",
"dependencies": {
"add": "workspace:~",
"subtract": "workspace:^",
"non-existent": "workspace:~",
}
}),
);
let root_pkg_json = PackageJson::load_from_value(
cwd.join("package.json"),
json!({ "workspaces": ["./publish", "./subtract", "./add"] }),
);
let workspace_resolver = WorkspaceResolver::new_raw(
Arc::new(ModuleSpecifier::from_directory_path(&cwd).unwrap()),
None,
vec![ResolverWorkspaceJsrPackage {
is_patch: false,
base: ModuleSpecifier::from_directory_path(
cwd.join("publish/jsr.json"),
)
.unwrap(),
name: "@denotest/main".to_string(),
version: Some(Version::parse_standard("1.0.0").unwrap()),
exports: IndexMap::from([(".".to_string(), "mod.ts".to_string())]),
}],
vec![
Arc::new(root_pkg_json),
Arc::new(pkg_json_add),
Arc::new(pkg_json_subtract),
Arc::new(pkg_json_publishing),
],
deno_config::workspace::PackageJsonDepResolution::Enabled,
);
let fs = Arc::new(RealFs);
let unfurler = SpecifierUnfurler::new(
Some(Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new(fs),
))),
Arc::new(workspace_resolver),
true,
);
{
let source_code = r#"import add from "add";
import subtract from "subtract";
console.log(add, subtract);
"#;
let specifier =
ModuleSpecifier::from_file_path(cwd.join("publish").join("mod.ts"))
.unwrap();
let source = parse_ast(&specifier, source_code);
let mut d = Vec::new();
let mut reporter = |diagnostic| d.push(diagnostic);
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
assert_eq!(d.len(), 0);
// it will inline the version
let expected_source = r#"import add from "npm:add@~0.1.0";
import subtract from "npm:subtract@^0.2.0";
console.log(add, subtract);
"#;
assert_eq!(unfurled_source, expected_source);
}
{
let source_code = r#"import nonExistent from "non-existent";
console.log(nonExistent);
"#;
let specifier =
ModuleSpecifier::from_file_path(cwd.join("publish").join("other.ts"))
.unwrap();
let source = parse_ast(&specifier, source_code);
let mut d = Vec::new();
let mut reporter = |diagnostic| d.push(diagnostic);
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
assert_eq!(d.len(), 1);
match &d[0] {
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
package_name,
reason,
..
} => {
assert_eq!(package_name, "non-existent");
assert_eq!(reason, "unable to find npm package in workspace");
}
_ => unreachable!(),
}
// won't make any changes, but the above will be a fatal error
assert!(matches!(d[0].level(), DiagnosticLevel::Error));
assert_eq!(unfurled_source, source_code);
}
}
}

View file

@ -11,7 +11,8 @@ use crate::args::ReplFlags;
use crate::cdp;
use crate::colors;
use crate::factory::CliFactory;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
@ -143,7 +144,7 @@ async fn read_line_and_poll(
async fn read_eval_file(
cli_options: &CliOptions,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
eval_file: &str,
) -> Result<Arc<str>, AnyError> {
let specifier =
@ -151,7 +152,7 @@ async fn read_eval_file(
let file = file_fetcher.fetch_bypass_permissions(&specifier).await?;
Ok(file.into_text_decoded()?.source)
Ok(TextDecodedFile::decode(file)?.source)
}
#[allow(clippy::print_stdout)]

View file

@ -43,13 +43,13 @@ use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_core::PollEventLoopOptions;
use deno_graph::source::ResolutionMode;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Match;
use regex::Regex;
@ -701,11 +701,6 @@ impl ReplSession {
let mut collector = ImportCollector::new();
program.visit_with(&mut collector);
let referrer_range = deno_graph::Range {
specifier: self.referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
};
let resolved_imports = collector
.imports
.iter()
@ -714,9 +709,10 @@ impl ReplSession {
.resolver
.resolve(
i,
&referrer_range,
NodeModuleKind::Esm,
ResolutionMode::Execution,
&self.referrer,
deno_graph::Position::zeroed(),
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.ok()
.or_else(|| ModuleSpecifier::parse(i).ok())
@ -731,7 +727,9 @@ impl ReplSession {
let has_node_specifier =
resolved_imports.iter().any(|url| url.scheme() == "node");
if !npm_imports.is_empty() || has_node_specifier {
npm_resolver.add_package_reqs(&npm_imports).await?;
npm_resolver
.add_and_cache_package_reqs(&npm_imports)
.await?;
// prevent messages in the repl about @types/node not being cached
if has_node_specifier {

View file

@ -3,6 +3,7 @@
use std::io::Read;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::File;
use deno_config::deno_json::NodeModulesDirMode;
use deno_core::error::AnyError;
use deno_runtime::WorkerExecutionMode;
@ -11,7 +12,6 @@ use crate::args::EvalFlags;
use crate::args::Flags;
use crate::args::WatchFlagsWithPaths;
use crate::factory::CliFactory;
use crate::file_fetcher::File;
use crate::util;
use crate::util::file_watcher::WatcherRestartMode;
@ -97,7 +97,7 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
// Save a fake file into file fetcher cache
// to allow module access by TS compiler
file_fetcher.insert_memory_files(File {
specifier: main_module.clone(),
url: main_module.clone(),
maybe_headers: None,
source: source.into(),
});
@ -184,7 +184,7 @@ pub async fn eval_command(
// Save a fake file into file fetcher cache
// to allow module access by TS compiler.
file_fetcher.insert_memory_files(File {
specifier: main_module.clone(),
url: main_module.clone(),
maybe_headers: None,
source: source_code.into_bytes().into(),
});
@ -198,13 +198,23 @@ pub async fn eval_command(
}
pub async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> {
let cli_options = factory.cli_options()?;
// ensure an "npm install" is done if the user has explicitly
// opted into using a managed node_modules directory
if factory.cli_options()?.node_modules_dir()?
== Some(NodeModulesDirMode::Auto)
{
if cli_options.node_modules_dir()? == Some(NodeModulesDirMode::Auto) {
if let Some(npm_resolver) = factory.npm_resolver().await?.as_managed() {
let already_done =
npm_resolver.ensure_top_level_package_json_install().await?;
if !already_done
&& matches!(
cli_options.default_npm_caching_strategy(),
crate::graph_util::NpmCachingStrategy::Eager
)
{
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
}
}
Ok(())

View file

@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use console_static_text::ansi::strip_ansi_codes;
use deno_config::workspace::FolderConfigs;
use deno_config::workspace::TaskDefinition;
use deno_config::workspace::TaskOrScript;
@ -25,6 +26,7 @@ use deno_core::futures::StreamExt;
use deno_core::url::Url;
use deno_path_util::normalize_path;
use deno_runtime::deno_node::NodeResolver;
use deno_task_shell::KillSignal;
use deno_task_shell::ShellCommand;
use indexmap::IndexMap;
use regex::Regex;
@ -36,6 +38,7 @@ use crate::colors;
use crate::factory::CliFactory;
use crate::npm::CliNpmResolver;
use crate::task_runner;
use crate::task_runner::run_future_forwarding_signals;
use crate::util::fs::canonicalize_path;
#[derive(Debug)]
@ -75,43 +78,29 @@ pub async fn execute_script(
let packages_task_configs: Vec<PackageTaskInfo> = if let Some(filter) =
&task_flags.filter
{
let task_name = task_flags.task.as_ref().unwrap();
// Filter based on package name
let package_regex = arg_to_regex(filter)?;
let task_regex = arg_to_regex(task_name)?;
let workspace = cli_options.workspace();
let Some(task_name) = &task_flags.task else {
print_available_tasks_workspace(
cli_options,
&package_regex,
filter,
force_use_pkg_json,
task_flags.recursive,
)?;
return Ok(0);
};
let task_name_filter = arg_to_task_name_filter(task_name)?;
let mut packages_task_info: Vec<PackageTaskInfo> = vec![];
fn matches_package(
config: &FolderConfigs,
force_use_pkg_json: bool,
regex: &Regex,
) -> bool {
if !force_use_pkg_json {
if let Some(deno_json) = &config.deno_json {
if let Some(name) = &deno_json.json.name {
if regex.is_match(name) {
return true;
}
}
}
}
if let Some(package_json) = &config.pkg_json {
if let Some(name) = &package_json.name {
if regex.is_match(name) {
return true;
}
}
}
false
}
let workspace = cli_options.workspace();
for folder in workspace.config_folders() {
if !matches_package(folder.1, force_use_pkg_json, &package_regex) {
if !task_flags.recursive
&& !matches_package(folder.1, force_use_pkg_json, &package_regex)
{
continue;
}
@ -148,12 +137,20 @@ pub async fn execute_script(
// Match tasks in deno.json
for name in tasks_config.task_names() {
if task_regex.is_match(name) && !visited.contains(name) {
let matches_filter = match &task_name_filter {
TaskNameFilter::Exact(n) => *n == name,
TaskNameFilter::Regex(re) => re.is_match(name),
};
if matches_filter && !visited.contains(name) {
matched.insert(name.to_string());
visit_task(&tasks_config, &mut visited, name);
}
}
if matched.is_empty() {
continue;
}
packages_task_info.push(PackageTaskInfo {
matched_tasks: matched
.iter()
@ -195,6 +192,7 @@ pub async fn execute_script(
&mut std::io::stdout(),
&cli_options.start_dir,
&tasks_config,
None,
)?;
return Ok(0);
};
@ -225,28 +223,36 @@ pub async fn execute_script(
concurrency: no_of_concurrent_tasks.into(),
};
let kill_signal = KillSignal::default();
run_future_forwarding_signals(kill_signal.clone(), async {
if task_flags.eval {
return task_runner
.run_deno_task(
&Url::from_directory_path(cli_options.initial_cwd()).unwrap(),
"",
&TaskDefinition {
command: task_flags.task.as_ref().unwrap().to_string(),
command: Some(task_flags.task.as_ref().unwrap().to_string()),
dependencies: vec![],
description: None,
},
kill_signal,
cli_options.argv(),
)
.await;
}
for task_config in &packages_task_configs {
let exit_code = task_runner.run_tasks(task_config).await?;
let exit_code = task_runner
.run_tasks(task_config, &kill_signal, cli_options.argv())
.await?;
if exit_code > 0 {
return Ok(exit_code);
}
}
Ok(0)
})
.await
}
struct RunSingleOptions<'a> {
@ -254,6 +260,8 @@ struct RunSingleOptions<'a> {
script: &'a str,
cwd: &'a Path,
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
kill_signal: KillSignal,
argv: &'a [String],
}
struct TaskRunner<'a> {
@ -269,9 +277,11 @@ impl<'a> TaskRunner<'a> {
pub async fn run_tasks(
&self,
pkg_tasks_config: &PackageTaskInfo,
kill_signal: &KillSignal,
argv: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
match sort_tasks_topo(pkg_tasks_config) {
Ok(sorted) => self.run_tasks_in_parallel(sorted).await,
Ok(sorted) => self.run_tasks_in_parallel(sorted, kill_signal, argv).await,
Err(err) => match err {
TaskError::NotFound(name) => {
if self.task_flags.is_run {
@ -300,12 +310,15 @@ impl<'a> TaskRunner<'a> {
&mut std::io::stderr(),
&self.cli_options.start_dir,
tasks_config,
None,
)
}
async fn run_tasks_in_parallel(
&self,
tasks: Vec<ResolvedTask<'a>>,
kill_signal: &KillSignal,
args: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
struct PendingTasksContext<'a> {
completed: HashSet<usize>,
@ -326,13 +339,22 @@ impl<'a> TaskRunner<'a> {
fn get_next_task<'b>(
&mut self,
runner: &'b TaskRunner<'b>,
kill_signal: &KillSignal,
argv: &'a [String],
) -> Option<
LocalBoxFuture<'b, Result<(i32, &'a ResolvedTask<'a>), AnyError>>,
>
where
'a: 'b,
{
for task in self.tasks.iter() {
let mut tasks_iter = self.tasks.iter().peekable();
while let Some(task) = tasks_iter.next() {
let args = if tasks_iter.peek().is_none() {
argv
} else {
&[]
};
if self.completed.contains(&task.id)
|| self.running.contains(&task.id)
{
@ -348,15 +370,30 @@ impl<'a> TaskRunner<'a> {
}
self.running.insert(task.id);
let kill_signal = kill_signal.clone();
return Some(
async move {
match task.task_or_script {
TaskOrScript::Task(_, def) => {
runner.run_deno_task(task.folder_url, task.name, def).await
runner
.run_deno_task(
task.folder_url,
task.name,
def,
kill_signal,
args,
)
.await
}
TaskOrScript::Script(scripts, _) => {
runner
.run_npm_script(task.folder_url, task.name, scripts)
.run_npm_script(
task.folder_url,
task.name,
scripts,
kill_signal,
args,
)
.await
}
}
@ -379,7 +416,7 @@ impl<'a> TaskRunner<'a> {
while context.has_remaining_tasks() {
while queue.len() < self.concurrency {
if let Some(task) = context.get_next_task(self) {
if let Some(task) = context.get_next_task(self, kill_signal, args) {
queue.push(task);
} else {
break;
@ -408,7 +445,26 @@ impl<'a> TaskRunner<'a> {
dir_url: &Url,
task_name: &str,
definition: &TaskDefinition,
kill_signal: KillSignal,
argv: &'a [String],
) -> Result<i32, deno_core::anyhow::Error> {
let Some(command) = &definition.command else {
log::info!(
"{} {} {}",
colors::green("Task"),
colors::cyan(task_name),
colors::gray("(no command)")
);
return Ok(0);
};
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
let cwd = match &self.task_flags.cwd {
Some(path) => canonicalize_path(&PathBuf::from(path))
.context("failed canonicalizing --cwd")?,
@ -419,12 +475,15 @@ impl<'a> TaskRunner<'a> {
self.npm_resolver,
self.node_resolver,
)?;
self
.run_single(RunSingleOptions {
task_name,
script: &definition.command,
script: command,
cwd: &cwd,
custom_commands,
kill_signal,
argv,
})
.await
}
@ -434,10 +493,15 @@ impl<'a> TaskRunner<'a> {
dir_url: &Url,
task_name: &str,
scripts: &IndexMap<String, String>,
kill_signal: KillSignal,
argv: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
// ensure the npm packages are installed if using a managed resolver
if let Some(npm_resolver) = self.npm_resolver.as_managed() {
npm_resolver.ensure_top_level_package_json_install().await?;
npm_resolver
.cache_packages(crate::npm::PackageCaching::All)
.await?;
}
let cwd = match &self.task_flags.cwd {
@ -457,6 +521,7 @@ impl<'a> TaskRunner<'a> {
self.npm_resolver,
self.node_resolver,
)?;
for task_name in &task_names {
if let Some(script) = scripts.get(task_name) {
let exit_code = self
@ -465,6 +530,8 @@ impl<'a> TaskRunner<'a> {
script,
cwd: &cwd,
custom_commands: custom_commands.clone(),
kill_signal: kill_signal.clone(),
argv,
})
.await?;
if exit_code > 0 {
@ -485,11 +552,13 @@ impl<'a> TaskRunner<'a> {
script,
cwd,
custom_commands,
kill_signal,
argv,
} = opts;
output_task(
opts.task_name,
&task_runner::get_script_with_args(script, self.cli_options.argv()),
&task_runner::get_script_with_args(script, argv),
);
Ok(
@ -500,9 +569,10 @@ impl<'a> TaskRunner<'a> {
env_vars: self.env_vars.clone(),
custom_commands,
init_cwd: self.cli_options.initial_cwd(),
argv: self.cli_options.argv(),
argv,
root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
stdio: None,
kill_signal,
})
.await?
.exit_code,
@ -623,6 +693,32 @@ fn sort_tasks_topo<'a>(
Ok(sorted)
}
fn matches_package(
config: &FolderConfigs,
force_use_pkg_json: bool,
regex: &Regex,
) -> bool {
if !force_use_pkg_json {
if let Some(deno_json) = &config.deno_json {
if let Some(name) = &deno_json.json.name {
if regex.is_match(name) {
return true;
}
}
}
}
if let Some(package_json) = &config.pkg_json {
if let Some(name) = &package_json.name {
if regex.is_match(name) {
return true;
}
}
}
false
}
fn output_task(task_name: &str, script: &str) {
log::info!(
"{} {} {}",
@ -632,12 +728,70 @@ fn output_task(task_name: &str, script: &str) {
);
}
fn print_available_tasks_workspace(
cli_options: &Arc<CliOptions>,
package_regex: &Regex,
filter: &str,
force_use_pkg_json: bool,
recursive: bool,
) -> Result<(), AnyError> {
let workspace = cli_options.workspace();
let mut matched = false;
for folder in workspace.config_folders() {
if !recursive
&& !matches_package(folder.1, force_use_pkg_json, package_regex)
{
continue;
}
matched = true;
let member_dir = workspace.resolve_member_dir(folder.0);
let mut tasks_config = member_dir.to_tasks_config()?;
let mut pkg_name = folder
.1
.deno_json
.as_ref()
.and_then(|deno| deno.json.name.clone())
.or(folder.1.pkg_json.as_ref().and_then(|pkg| pkg.name.clone()));
if force_use_pkg_json {
tasks_config = tasks_config.with_only_pkg_json();
pkg_name = folder.1.pkg_json.as_ref().and_then(|pkg| pkg.name.clone());
}
print_available_tasks(
&mut std::io::stdout(),
&cli_options.start_dir,
&tasks_config,
pkg_name,
)?;
}
if !matched {
log::warn!(
"{}",
colors::red(format!("No package name matched the filter '{}' in available 'deno.json' or 'package.json' files.", filter))
);
}
Ok(())
}
fn print_available_tasks(
writer: &mut dyn std::io::Write,
workspace_dir: &Arc<WorkspaceDirectory>,
tasks_config: &WorkspaceTasksConfig,
pkg_name: Option<String>,
) -> Result<(), std::io::Error> {
writeln!(writer, "{}", colors::green("Available tasks:"))?;
let heading = if let Some(s) = pkg_name {
format!("Available tasks ({}):", colors::cyan(s))
} else {
"Available tasks:".to_string()
};
writeln!(writer, "{}", colors::green(heading))?;
let is_cwd_root_dir = tasks_config.root.is_none();
if tasks_config.is_empty() {
@ -693,7 +847,7 @@ fn print_available_tasks(
is_deno: false,
name: name.to_string(),
task: deno_config::deno_json::TaskDefinition {
command: script.to_string(),
command: Some(script.to_string()),
dependencies: vec![],
description: None,
},
@ -721,22 +875,88 @@ fn print_available_tasks(
)?;
if let Some(description) = &desc.task.description {
let slash_slash = colors::italic_gray("//");
for line in description.lines() {
writeln!(
writer,
" {slash_slash} {}",
colors::italic_gray(description)
colors::italic_gray(strip_ansi_codes_and_escape_control_chars(line))
)?;
}
writeln!(writer, " {}", desc.task.command)?;
}
if let Some(command) = &desc.task.command {
writeln!(
writer,
" {}",
strip_ansi_codes_and_escape_control_chars(command)
)?;
};
if !desc.task.dependencies.is_empty() {
let dependencies = desc
.task
.dependencies
.into_iter()
.map(|d| strip_ansi_codes_and_escape_control_chars(&d))
.collect::<Vec<_>>()
.join(", ");
writeln!(
writer,
" {} {}",
colors::gray("depends on:"),
colors::cyan(desc.task.dependencies.join(", "))
colors::cyan(dependencies)
)?;
}
}
Ok(())
}
fn strip_ansi_codes_and_escape_control_chars(s: &str) -> String {
strip_ansi_codes(s)
.chars()
.map(|c| match c {
'\n' => "\\n".to_string(),
'\r' => "\\r".to_string(),
'\t' => "\\t".to_string(),
c if c.is_control() => format!("\\x{:02x}", c as u8),
c => c.to_string(),
})
.collect()
}
fn arg_to_task_name_filter(input: &str) -> Result<TaskNameFilter, AnyError> {
if !input.contains("*") {
return Ok(TaskNameFilter::Exact(input));
}
let mut regex_str = regex::escape(input);
regex_str = regex_str.replace("\\*", ".*");
let re = Regex::new(&regex_str)?;
Ok(TaskNameFilter::Regex(re))
}
#[derive(Debug)]
enum TaskNameFilter<'s> {
Exact(&'s str),
Regex(regex::Regex),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_arg_to_task_name_filter() {
assert!(matches!(
arg_to_task_name_filter("test").unwrap(),
TaskNameFilter::Exact("test")
));
assert!(matches!(
arg_to_task_name_filter("test-").unwrap(),
TaskNameFilter::Exact("test-")
));
assert!(matches!(
arg_to_task_name_filter("test*").unwrap(),
TaskNameFilter::Regex(_)
));
}
}

View file

@ -7,8 +7,7 @@ use crate::args::TestReporterConfig;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
use crate::file_fetcher::File;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops;
use crate::util::extract::extract_doc_tests;
@ -21,6 +20,7 @@ use crate::worker::CliMainWorkerFactory;
use crate::worker::CoverageCollector;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::File;
use deno_config::glob::FilePatterns;
use deno_config::glob::WalkEntry;
use deno_core::anyhow;
@ -616,7 +616,10 @@ async fn configure_main_worker(
WorkerExecutionMode::Test,
specifier.clone(),
permissions_container,
vec![ops::testing::deno_test::init_ops(worker_sender.sender)],
vec![
ops::testing::deno_test::init_ops(worker_sender.sender),
ops::lint::deno_lint::init_ops(),
],
Stdio {
stdin: StdioPipe::inherit(),
stdout: StdioPipe::file(worker_sender.stdout),
@ -1514,7 +1517,7 @@ fn collect_specifiers_with_test_mode(
/// as well.
async fn fetch_specifiers_with_test_mode(
cli_options: &CliOptions,
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
member_patterns: impl Iterator<Item = FilePatterns>,
doc: &bool,
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
@ -1716,7 +1719,11 @@ pub async fn run_tests_with_watch(
&cli_options.permissions_options(),
)?;
let graph = module_graph_creator
.create_graph(graph_kind, test_modules)
.create_graph(
graph_kind,
test_modules,
crate::graph_util::NpmCachingStrategy::Eager,
)
.await?;
module_graph_creator.graph_valid(&graph)?;
let test_modules = &graph.roots;
@ -1818,7 +1825,7 @@ pub async fn run_tests_with_watch(
/// Extracts doc tests from files specified by the given specifiers.
async fn get_doc_tests(
specifiers_with_mode: &[(Url, TestMode)],
file_fetcher: &FileFetcher,
file_fetcher: &CliFileFetcher,
) -> Result<Vec<File>, AnyError> {
let specifiers_needing_extraction = specifiers_with_mode
.iter()
@ -1843,7 +1850,7 @@ fn get_target_specifiers(
specifiers_with_mode
.into_iter()
.filter_map(|(s, mode)| mode.needs_test_run().then_some(s))
.chain(doc_tests.iter().map(|d| d.specifier.clone()))
.chain(doc_tests.iter().map(|d| d.url.clone()))
.collect()
}

View file

@ -21,6 +21,7 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_semver::SmallStackString;
use deno_semver::Version;
use once_cell::sync::Lazy;
use std::borrow::Cow;
@ -255,7 +256,7 @@ async fn print_release_notes(
let is_deno_2_rc = new_semver.major == 2
&& new_semver.minor == 0
&& new_semver.patch == 0
&& new_semver.pre.first() == Some(&"rc".to_string());
&& new_semver.pre.first().map(|s| s.as_str()) == Some("rc");
if is_deno_2_rc || is_switching_from_deno1_to_deno2 {
log::info!(
@ -674,7 +675,7 @@ impl RequestedVersion {
);
};
if semver.pre.contains(&"rc".to_string()) {
if semver.pre.contains(&SmallStackString::from_static("rc")) {
(ReleaseChannel::Rc, passed_version)
} else {
(ReleaseChannel::Stable, passed_version)

Some files were not shown because too many files have changed in this diff Show more